code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class FileStorage(object): <NEW_LINE> <INDENT> __file_path = "file.json" <NEW_LINE> __objects = {} <NEW_LINE> def all(self): <NEW_LINE> <INDENT> return self.__objects <NEW_LINE> <DEDENT> def new(self, obj): <NEW_LINE> <INDENT> self.__objects["{}.{}".format(obj.__class__.__name__, obj.id)] = obj <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> ser_dict = {} <NEW_LINE> if self.__objects: <NEW_LINE> <INDENT> for key, obj in self.__objects.items(): <NEW_LINE> <INDENT> ser_dict[key] = obj.to_dict() <NEW_LINE> <DEDENT> with open(self.__file_path, 'w') as f: <NEW_LINE> <INDENT> json.dump(ser_dict, f) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.__file_path) <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def reload(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(self.__file_path, 'r') as f: <NEW_LINE> <INDENT> deser_dict = {} <NEW_LINE> deser_dict = json.load(f) <NEW_LINE> for key, val in deser_dict.items(): <NEW_LINE> <INDENT> self.__objects[key] = eval(val["__class__"])(**val) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass
|
serializes instances to a JSON file and deserializes JSON
file to instances
Attributes:
__file_path (str): path to the JSON file
__objects (dict): empty but will store all objects by <class name>.id
(ex: to store a BaseModel object with id=12121212,
the key will be BaseModel.12121212)
|
62599022507cdc57c63a5c78
|
class OUStrategy(ExplorationStrategy): <NEW_LINE> <INDENT> def __init__(self, env_spec, mu=0, sigma=0.3, theta=0.15, dt=1e-2, x0=None): <NEW_LINE> <INDENT> self.env_spec = env_spec <NEW_LINE> self.action_space = env_spec.action_space <NEW_LINE> self.action_dim = self.action_space.flat_dim <NEW_LINE> self.mu = mu <NEW_LINE> self.sigma = sigma <NEW_LINE> self.theta = theta <NEW_LINE> self.dt = dt <NEW_LINE> self.x0 = x0 <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def simulate(self): <NEW_LINE> <INDENT> x = self.state <NEW_LINE> dx = self.theta * (self.mu - x) * self.dt + self.sigma * np.sqrt( self.dt) * np.random.normal(size=len(x)) <NEW_LINE> self.state = x + dx <NEW_LINE> return self.state <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> self.state = self.x0 if self.x0 is not None else self.mu * np.zeros( self.action_dim) <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def get_action(self, t, observation, policy, **kwargs): <NEW_LINE> <INDENT> action, agent_infos = policy.get_action(observation) <NEW_LINE> ou_state = self.simulate() <NEW_LINE> return np.clip(action + ou_state, self.action_space.low, self.action_space.high), agent_infos <NEW_LINE> <DEDENT> def get_actions(self, observations, policy): <NEW_LINE> <INDENT> actions, agent_infos = policy.get_actions(observations) <NEW_LINE> ou_state = self.simulate() <NEW_LINE> return np.clip(actions + ou_state, self.action_space.low, self.action_space.high), agent_infos
|
An OU exploration strategy to add noise to environment actions.
Example:
$ python garage/tf/exploration_strategies/ou_strategy.py
|
62599022be8e80087fbbff4a
|
class InputList(object): <NEW_LINE> <INDENT> input_list = None <NEW_LINE> name = None <NEW_LINE> def __init__(self, inputs): <NEW_LINE> <INDENT> self.input_list = inputs <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> if isinstance(name, str): <NEW_LINE> <INDENT> for input_ in self.input_list: <NEW_LINE> <INDENT> if name in input_.name: <NEW_LINE> <INDENT> return input_ <NEW_LINE> <DEDENT> <DEDENT> raise IndexError("No input with name {} defined. ".format(name) + "Options are {}".format(self.input_list)) <NEW_LINE> <DEDENT> elif isinstance(name, int): <NEW_LINE> <INDENT> return self.input_list[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IndexError("Wrong type {} for indexing".format(type(name))) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.input_list <NEW_LINE> <DEDENT> def use(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> return self <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self[self.name] <NEW_LINE> <DEDENT> def __exit__(self, exctype, excvalue, traceback): <NEW_LINE> <INDENT> pass
|
Convenience class for querying inputs.
This class holds a list of input-tensors
(e.g. as created by inputlayer) and provides
simple access methods to obtain a certain
layer by name.
|
625990228c3a8732951f742b
|
class CircularQueue: <NEW_LINE> <INDENT> class _Node: <NEW_LINE> <INDENT> __slots__ = '_element', '_next' <NEW_LINE> def __init__(self, element, next): <NEW_LINE> <INDENT> self._element = element <NEW_LINE> self._next = next <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self._tail = None <NEW_LINE> self._size = 0 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self._size == 0 <NEW_LINE> <DEDENT> def first(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise Empty('Queue is empty') <NEW_LINE> <DEDENT> head = self._tail._next <NEW_LINE> return head._element <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise Empty('Queue is empty') <NEW_LINE> <DEDENT> oldhead = self._tail._next <NEW_LINE> if self._size == 1: <NEW_LINE> <INDENT> self._tail = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._tail._next = oldhead._next <NEW_LINE> <DEDENT> self._size -= 1 <NEW_LINE> return oldhead._element <NEW_LINE> <DEDENT> def enqueue(self, e): <NEW_LINE> <INDENT> newest = self._Node(e, None) <NEW_LINE> if self.is_empty(): <NEW_LINE> <INDENT> newest._next = newest <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newest._next = self._tail._next <NEW_LINE> self._tail._next = newest <NEW_LINE> <DEDENT> self._tail = newest <NEW_LINE> self._size += 1 <NEW_LINE> <DEDENT> def rotate(self): <NEW_LINE> <INDENT> if self._size > 0: <NEW_LINE> <INDENT> self._tail = self._tail._next
|
Queue implementation using circularly linked list for storage.
|
6259902230c21e258be996e9
|
class Alien(Sprite): <NEW_LINE> <INDENT> def __init__(self,ai_setting,screen): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.screen = screen <NEW_LINE> self.ai_setting = ai_setting <NEW_LINE> self.image = pygame.image.load('images/alien.bmp') <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.x = self.rect.width <NEW_LINE> self.rect.y = self.rect.height <NEW_LINE> self.x = float(self.rect.x) <NEW_LINE> <DEDENT> def blitme(self): <NEW_LINE> <INDENT> self.screen.blit(self.image,self.rect) <NEW_LINE> <DEDENT> def check_edges(self): <NEW_LINE> <INDENT> screen_rect = self.screen.get_rect() <NEW_LINE> if self.rect.right >= screen_rect.right: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.rect.left <= 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.x += (self.ai_setting.alien_speed_factor * self.ai_setting.fleet_direction) <NEW_LINE> self.rect.x = self.x
|
表示单个外星人的类
|
6259902291af0d3eaad3acfa
|
class PeopleImage(models.Model): <NEW_LINE> <INDENT> url = models.URLField(unique=True, max_length=400) <NEW_LINE> title = models.TextField(max_length=500) <NEW_LINE> category = models.ForeignKey( Category, on_delete=models.SET_NULL, null=True, blank=True ) <NEW_LINE> page = models.CharField(max_length=60) <NEW_LINE> selected = models.BooleanField(null=True, blank=True) <NEW_LINE> user_id = models.CharField(max_length=20) <NEW_LINE> meta = fields.JSONField(null=True, blank=True) <NEW_LINE> content_parsed = models.BooleanField(null=True, blank=True, default=None) <NEW_LINE> def get_user_id(self): <NEW_LINE> <INDENT> return self.url.split("/")[5] <NEW_LINE> <DEDENT> def save( self, force_insert=False, force_update=False, using=None, update_fields=None ): <NEW_LINE> <INDENT> self.user_id = self.get_user_id() <NEW_LINE> super().save(force_insert, force_update, using, update_fields) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.url)
|
이미지.
|
62599022287bf620b6272ac2
|
class DisplayNameFilter(object): <NEW_LINE> <INDENT> def filter(self, record): <NEW_LINE> <INDENT> record.display_name = record.name <NEW_LINE> return True
|
A logging filter that sets display_name.
|
625990229b70327d1c57fc56
|
class VariableWindowIndexer(WindowIndexer): <NEW_LINE> <INDENT> def build(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __reduce__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __setstate__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
|
create a variable length window indexer object
that has start & end, that point to offsets in
the index object; these are defined based on the win
arguments
Parameters
----------
input: ndarray
input data array
win: int64_t
window size
minp: int64_t
min number of obs in a window to consider non-NaN
index: ndarray
index of the input
left_closed: bint
left endpoint closedness
True if the left endpoint is closed, False if open
right_closed: bint
right endpoint closedness
True if the right endpoint is closed, False if open
floor: optional
unit for flooring the unit
|
625990221d351010ab8f49eb
|
class User(Resource): <NEW_LINE> <INDENT> schema = UserSchema() <NEW_LINE> @staticmethod <NEW_LINE> def post(): <NEW_LINE> <INDENT> data = request.get_json() <NEW_LINE> try: <NEW_LINE> <INDENT> User.schema.load(data) <NEW_LINE> <DEDENT> except ValidationError as err: <NEW_LINE> <INDENT> raise BadRequest(err.messages) <NEW_LINE> <DEDENT> user = UserModel(**data) <NEW_LINE> try: <NEW_LINE> <INDENT> user.save_to_db() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise ServerProblem() <NEW_LINE> <DEDENT> return User.schema.dump(user), 201
|
User Resource
|
62599022d18da76e235b78b8
|
class AnkIncorrectFileFormat(AutoNetkitException): <NEW_LINE> <INDENT> pass
|
Wrong file format
|
62599022bf627c535bcb238b
|
class Batch: <NEW_LINE> <INDENT> def __init__(self, torch_batch, pad_index, use_cuda=False): <NEW_LINE> <INDENT> self.src, self.src_lengths = torch_batch.src <NEW_LINE> self.src_mask = (self.src != pad_index).unsqueeze(1) <NEW_LINE> self.nseqs = self.src.size(0) <NEW_LINE> self.trg_input = None <NEW_LINE> self.trg = None <NEW_LINE> self.trg_mask = None <NEW_LINE> self.trg_lengths = None <NEW_LINE> self.ntokens = None <NEW_LINE> self.use_cuda = use_cuda <NEW_LINE> if hasattr(torch_batch, "trg"): <NEW_LINE> <INDENT> trg, trg_lengths = torch_batch.trg <NEW_LINE> self.trg_input = trg[:, :-1] <NEW_LINE> self.trg_lengths = trg_lengths <NEW_LINE> self.trg = trg[:, 1:] <NEW_LINE> self.trg_mask = (self.trg_input != pad_index).unsqueeze(1) <NEW_LINE> self.ntokens = (self.trg != pad_index).data.sum().item() <NEW_LINE> <DEDENT> if use_cuda: <NEW_LINE> <INDENT> self._make_cuda() <NEW_LINE> <DEDENT> <DEDENT> def _make_cuda(self): <NEW_LINE> <INDENT> self.src = self.src.cuda() <NEW_LINE> self.src_mask = self.src_mask.cuda() <NEW_LINE> if self.trg_input is not None: <NEW_LINE> <INDENT> self.trg_input = self.trg_input.cuda() <NEW_LINE> self.trg = self.trg.cuda() <NEW_LINE> self.trg_mask = self.trg_mask.cuda() <NEW_LINE> <DEDENT> <DEDENT> def sort_by_src_lengths(self): <NEW_LINE> <INDENT> _, perm_index = self.src_lengths.sort(0, descending=True) <NEW_LINE> rev_index = [0]*perm_index.size(0) <NEW_LINE> for new_pos, old_pos in enumerate(perm_index.cpu().numpy()): <NEW_LINE> <INDENT> rev_index[old_pos] = new_pos <NEW_LINE> <DEDENT> sorted_src_lengths = self.src_lengths[perm_index] <NEW_LINE> sorted_src = self.src[perm_index] <NEW_LINE> sorted_src_mask = self.src_mask[perm_index] <NEW_LINE> if self.trg_input is not None: <NEW_LINE> <INDENT> sorted_trg_input = self.trg_input[perm_index] <NEW_LINE> sorted_trg_lengths = self.trg_lengths[perm_index] <NEW_LINE> sorted_trg_mask = self.trg_mask[perm_index] <NEW_LINE> sorted_trg = self.trg[perm_index] <NEW_LINE> <DEDENT> self.src = sorted_src <NEW_LINE> self.src_lengths = sorted_src_lengths <NEW_LINE> self.src_mask = sorted_src_mask <NEW_LINE> if self.trg_input is not None: <NEW_LINE> <INDENT> self.trg_input = sorted_trg_input <NEW_LINE> self.trg_mask = sorted_trg_mask <NEW_LINE> self.trg_lengths = sorted_trg_lengths <NEW_LINE> self.trg = sorted_trg <NEW_LINE> <DEDENT> if self.use_cuda: <NEW_LINE> <INDENT> self._make_cuda() <NEW_LINE> <DEDENT> return rev_index
|
Object for holding a batch of data with mask during training.
Input is a batch from a torch text iterator.
|
625990225166f23b2e2442ab
|
class UpPolicy(AbstractFileSyncPolicy): <NEW_LINE> <INDENT> def _make_transfer_action(self): <NEW_LINE> <INDENT> return B2UploadAction( self._source_folder.make_full_path(self._source_file.name), self._source_file.name, self._dest_folder.make_full_path(self._source_file.name), self._get_source_mod_time(), self._source_file.latest_version().size )
|
file is synced up (from disk the cloud)
|
62599022a8ecb033258720f6
|
class Url(BaseColumnsMixin, db.Model): <NEW_LINE> <INDENT> url = db.Column(db.String(512), unique=True) <NEW_LINE> site_id = db.Column(db.Integer, db.ForeignKey('site.id')) <NEW_LINE> site = db.relationship('Site', backref=db.backref('urls'))
|
A url.
|
62599022507cdc57c63a5c7c
|
class FunctionDefinition(RestrictedDefinition): <NEW_LINE> <INDENT> def __init__(self, moduleName, defName, linkedType, returnTypes, handler=None, constraints=None, forward=True): <NEW_LINE> <INDENT> RestrictedDefinition.__init__(self, moduleName, defName, "deffunction", linkedType) <NEW_LINE> self._handler = handler <NEW_LINE> self._returnTypes = returnTypes if isinstance(returnTypes, tuple) else (returnTypes,) <NEW_LINE> self._constraints = constraints if isinstance(constraints, list) else [] <NEW_LINE> self._forward = bool(forward) <NEW_LINE> <DEDENT> @property <NEW_LINE> def handler(self): <NEW_LINE> <INDENT> return self._handler <NEW_LINE> <DEDENT> @property <NEW_LINE> def returnTypes(self): <NEW_LINE> <INDENT> return self._returnTypes <NEW_LINE> <DEDENT> @property <NEW_LINE> def isForward(self): <NEW_LINE> <INDENT> return self._forward <NEW_LINE> <DEDENT> @isForward.setter <NEW_LINE> def isForward(self, value): <NEW_LINE> <INDENT> self._forward = value <NEW_LINE> <DEDENT> def isValidCall(self, args): <NEW_LINE> <INDENT> for c in self._constraints: <NEW_LINE> <INDENT> if not c.isValid(args): <NEW_LINE> <INDENT> return (False, c.getReason()) <NEW_LINE> <DEDENT> <DEDENT> return self.customValidation(args) <NEW_LINE> <DEDENT> def customValidation(self, args): <NEW_LINE> <INDENT> return (True, None)
|
Describe a Function Definition
|
625990228c3a8732951f742e
|
class CourseWikiSubviewPage(CoursePage): <NEW_LINE> <INDENT> def __init__(self, browser, course_id, course_info): <NEW_LINE> <INDENT> super(CourseWikiSubviewPage, self).__init__(browser, course_id) <NEW_LINE> self.course_id = course_id <NEW_LINE> self.course_info = course_info <NEW_LINE> self.article_name = "{org}.{course_number}.{course_run}".format( org=self.course_info['org'], course_number=self.course_info['number'], course_run=self.course_info['run'] )
|
Abstract base page for subviews within the wiki.
|
62599022be8e80087fbbff4e
|
class Test_switch: <NEW_LINE> <INDENT> def test_list_switches(self): <NEW_LINE> <INDENT> assert C.switch.list() == [ u'brocade-01', u'dell-01', u'mock-01', u'nexus-01' ] <NEW_LINE> <DEDENT> def test_show_switch(self): <NEW_LINE> <INDENT> assert C.switch.show('dell-01') == { u'name': u'dell-01', u'ports': [], u'capabilities': ['nativeless-trunk-mode']} <NEW_LINE> <DEDENT> def test_show_switch_reserved_chars(self): <NEW_LINE> <INDENT> with pytest.raises(BadArgumentError): <NEW_LINE> <INDENT> C.switch.show('dell-/%]-01') <NEW_LINE> <DEDENT> <DEDENT> def test_delete_switch(self): <NEW_LINE> <INDENT> assert C.switch.delete('nexus-01') is None <NEW_LINE> <DEDENT> def test_delete_switch_reserved_chars(self): <NEW_LINE> <INDENT> with pytest.raises(BadArgumentError): <NEW_LINE> <INDENT> C.switch.delete('nexus/%]-01') <NEW_LINE> <DEDENT> <DEDENT> def test_switch_register(self): <NEW_LINE> <INDENT> switchinfo = { "type": "http://schema.massopencloud.org/haas/v0/switches/mock", "username": "name", "password": "asdasd", "hostname": "example.com"} <NEW_LINE> subtype = "http://schema.massopencloud.org/haas/v0/switches/mock" <NEW_LINE> assert C.switch.register('mytestswitch', subtype, switchinfo) is None <NEW_LINE> <DEDENT> def test_switch_register_fail(self): <NEW_LINE> <INDENT> switchinfo = { "type": "http://schema.massopencloud.org/haas/v0/switches/mock", "username": "name", "password": "asdasd", "unknown_keyword": "example.com"} <NEW_LINE> subtype = "http://schema.massopencloud.org/haas/v0/switches/mock" <NEW_LINE> with pytest.raises(FailedAPICallException): <NEW_LINE> <INDENT> C.switch.register('mytestswitch', subtype, switchinfo)
|
Tests switch related client calls.
|
625990228c3a8732951f742f
|
class BertEmbedding(TransformerEmbedding): <NEW_LINE> <INDENT> def to_dict(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> info_dic = super(BertEmbedding, self).to_dict() <NEW_LINE> info_dic['config']['model_folder'] = self.model_folder <NEW_LINE> return info_dic <NEW_LINE> <DEDENT> def __init__(self, model_folder: str, **kwargs: Any): <NEW_LINE> <INDENT> self.model_folder = model_folder <NEW_LINE> vocab_path = os.path.join(self.model_folder, 'vocab.txt') <NEW_LINE> config_path = os.path.join(self.model_folder, 'bert_config.json') <NEW_LINE> checkpoint_path = os.path.join(self.model_folder, 'bert_model.ckpt') <NEW_LINE> kwargs['vocab_path'] = vocab_path <NEW_LINE> kwargs['config_path'] = config_path <NEW_LINE> kwargs['checkpoint_path'] = checkpoint_path <NEW_LINE> kwargs['model_type'] = 'bert' <NEW_LINE> super(BertEmbedding, self).__init__(**kwargs)
|
BertEmbedding is a simple wrapped class of TransformerEmbedding.
If you need load other kind of transformer based language model, please use the TransformerEmbedding.
|
625990226e29344779b01528
|
class AdminOrdersView(AdminBaseView): <NEW_LINE> <INDENT> pagination_class = StandardResultsSetPagination <NEW_LINE> @AdminBaseView.permission_required( [AdminBaseView.staff_permissions.ADMIN_ORDER] ) <NEW_LINE> @use_args( { "order_types": StrToList( required=False, missing=[OrderType.NORMAL, OrderType.GROUPON], validate=[validate.ContainsOnly([OrderType.NORMAL, OrderType.GROUPON])], comment="订单类型筛选 1: 普通订单, 5: 拼团订单", ), "order_pay_types": StrToList( required=False, missing=[OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY], validate=[ validate.ContainsOnly( [OrderPayType.WEIXIN_JSAPI, OrderPayType.ON_DELIVERY] ) ], comment="订单支付方式筛选 1: 微信支付, 2: 货到付款", ), "order_delivery_methods": StrToList( required=False, missing=[ OrderDeliveryMethod.HOME_DELIVERY, OrderDeliveryMethod.CUSTOMER_PICK, ], validate=[ validate.ContainsOnly( [ OrderDeliveryMethod.HOME_DELIVERY, OrderDeliveryMethod.CUSTOMER_PICK, ] ) ], comment="订单配送方式筛选 1: 送货上门, 2: 自提", ), "order_status": StrToList( required=False, missing=[ OrderStatus.PAID, OrderStatus.CONFIRMED, OrderStatus.FINISHED, OrderStatus.REFUNDED, ], validate=[ validate.ContainsOnly( [ OrderStatus.PAID, OrderStatus.CONFIRMED, OrderStatus.FINISHED, OrderStatus.REFUNDED, ] ) ], comment="订单状态筛选 2: 未处理 3: 处理中 4: 已完成 5: 已退款", ), "num": fields.String( required=False, data_key="order_num", comment="订单号搜索,与其他条件互斥" ), }, location="query" ) <NEW_LINE> def get(self, request, args): <NEW_LINE> <INDENT> shop_id = self.current_shop.id <NEW_LINE> order_list = list_shop_orders(shop_id, **args) <NEW_LINE> order_list = self._get_paginated_data(order_list, AdminOrdersSerializer) <NEW_LINE> return self.send_success(data_list=order_list)
|
后台-订单-获取订单列表
|
6259902226238365f5fada2a
|
class ViewAvailablityPlayer(tk.Frame,ViewAvailablity): <NEW_LINE> <INDENT> def __init__(self, parent, controller): <NEW_LINE> <INDENT> tk.Frame.__init__(self, parent) <NEW_LINE> self.controller = controller <NEW_LINE> """ Widget Declearations """ <NEW_LINE> """ Widget Stylings """ <NEW_LINE> """ Widget Positions """
|
Methods:
__init__
Variables:
controller
|
62599023a8ecb033258720f8
|
class TableStats(GenericStruct): <NEW_LINE> <INDENT> table_id = UBInt8() <NEW_LINE> pad = Pad(3) <NEW_LINE> active_count = UBInt32() <NEW_LINE> lookup_count = UBInt64() <NEW_LINE> matched_count = UBInt64() <NEW_LINE> def __init__(self, table_id=None, name=None, max_entries=None, active_count=None, lookup_count=None, matched_count=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.table_id = table_id <NEW_LINE> self.active_count = active_count <NEW_LINE> self.lookup_count = lookup_count <NEW_LINE> self.matched_count = matched_count
|
Body of reply to OFPST_TABLE request.
|
625990238c3a8732951f7431
|
class Send: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def move(motorID, speed, commandID=1): <NEW_LINE> <INDENT> output = struct.pack('<h', commandID) <NEW_LINE> output += struct.pack('<h', motorID) <NEW_LINE> output += struct.pack('<h', speed) <NEW_LINE> return output <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def telemetry(x, y, z, commandID=2): <NEW_LINE> <INDENT> output = struct.pack('<h', commandID) <NEW_LINE> output += struct.pack('<f', x) <NEW_LINE> output += struct.pack('<f', y) <NEW_LINE> output += struct.pack('<f', z) <NEW_LINE> return output <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def move_all(speed, commandID=3): <NEW_LINE> <INDENT> output = struct.pack('<h', commandID) <NEW_LINE> output += struct.pack('<h', speed) <NEW_LINE> return output
|
Commands to be used when sending.
|
62599023a4f1c619b294f4cd
|
class ShowObjects(object): <NEW_LINE> <INDENT> __slots__ = ( '_show', ) <NEW_LINE> @property <NEW_LINE> def show(self): <NEW_LINE> <INDENT> return self._show <NEW_LINE> <DEDENT> @show.setter <NEW_LINE> def show(self, value): <NEW_LINE> <INDENT> self._show = msgbuffers.validate_integer( 'ShowObjects.show', value, -128, 127) <NEW_LINE> <DEDENT> def __init__(self, show=0): <NEW_LINE> <INDENT> self.show = show <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack(cls, buffer): <NEW_LINE> <INDENT> reader = msgbuffers.BinaryReader(buffer) <NEW_LINE> value = cls.unpack_from(reader) <NEW_LINE> if reader.tell() != len(reader): <NEW_LINE> <INDENT> raise msgbuffers.ReadError( ('ShowObjects.unpack received a buffer of length {length}, ' + 'but only {position} bytes were read.').format( length=len(reader), position=reader.tell())) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack_from(cls, reader): <NEW_LINE> <INDENT> _show = reader.read('b') <NEW_LINE> return cls(_show) <NEW_LINE> <DEDENT> def pack(self): <NEW_LINE> <INDENT> writer = msgbuffers.BinaryWriter() <NEW_LINE> self.pack_to(writer) <NEW_LINE> return writer.dumps() <NEW_LINE> <DEDENT> def pack_to(self, writer): <NEW_LINE> <INDENT> writer.write(self._show, 'b') <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return self._show == other._show <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return (msgbuffers.size(self._show, 'b')) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{type}(show={show})'.format( type=type(self).__name__, show=self._show) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{type}(show={show})'.format( type=type(self).__name__, show=repr(self._show))
|
Generated message-passing message.
|
62599023d164cc6175821e50
|
class CertificateSearch(): <NEW_LINE> <INDENT> def __init__(self, site='crt.sh'): <NEW_LINE> <INDENT> if site not in SUPPORTED_SITES: <NEW_LINE> <INDENT> msg = '{} is not supported. Valid sites are {}'.format(site, SUPPORTED_SITES) <NEW_LINE> raise NotImplementedError(msg) <NEW_LINE> <DEDENT> self.site = site <NEW_LINE> self._load_module() <NEW_LINE> <DEDENT> def _load_module(self): <NEW_LINE> <INDENT> self.module = SUPPORTED_SITES[self.site]() <NEW_LINE> <DEDENT> def search(self, domain): <NEW_LINE> <INDENT> return self.module.Engine.search(domain)
|
This class is a wrapper that queries issued HTTPS certificates of domains
from various sources. It currently supports only crt.sh.
|
62599023d18da76e235b78ba
|
class VpnClientConfiguration(Model): <NEW_LINE> <INDENT> _attribute_map = { 'vpn_client_address_pool': {'key': 'vpnClientAddressPool', 'type': 'AddressSpace'}, 'vpn_client_root_certificates': {'key': 'vpnClientRootCertificates', 'type': '[VpnClientRootCertificate]'}, 'vpn_client_revoked_certificates': {'key': 'vpnClientRevokedCertificates', 'type': '[VpnClientRevokedCertificate]'}, } <NEW_LINE> def __init__(self, vpn_client_address_pool=None, vpn_client_root_certificates=None, vpn_client_revoked_certificates=None): <NEW_LINE> <INDENT> super(VpnClientConfiguration, self).__init__() <NEW_LINE> self.vpn_client_address_pool = vpn_client_address_pool <NEW_LINE> self.vpn_client_root_certificates = vpn_client_root_certificates <NEW_LINE> self.vpn_client_revoked_certificates = vpn_client_revoked_certificates
|
VpnClientConfiguration for P2S client.
:param vpn_client_address_pool: Gets or sets the reference of the Address
space resource which represents Address space for P2S VpnClient.
:type vpn_client_address_pool:
~azure.mgmt.network.v2015_06_15.models.AddressSpace
:param vpn_client_root_certificates: VpnClientRootCertificate for Virtual
network gateway.
:type vpn_client_root_certificates:
list[~azure.mgmt.network.v2015_06_15.models.VpnClientRootCertificate]
:param vpn_client_revoked_certificates: VpnClientRevokedCertificate for
Virtual network gateway.
:type vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2015_06_15.models.VpnClientRevokedCertificate]
|
62599023287bf620b6272ac8
|
class RCEInternalProtocol(Int32StringReceiver, _Protocol): <NEW_LINE> <INDENT> MAX_LENGTH = 1000000 <NEW_LINE> _MSG_ID_STRUCT = struct.Struct('!B') <NEW_LINE> _TRUE = struct.pack('!?', True) <NEW_LINE> _FALSE = struct.pack('!?', False) <NEW_LINE> def __init__(self, endpoint): <NEW_LINE> <INDENT> _Protocol.__init__(self) <NEW_LINE> self._endpoint = endpoint <NEW_LINE> endpoint.registerProtocol(self) <NEW_LINE> self._initialized = False <NEW_LINE> self.stringReceived = self._initReceived <NEW_LINE> <DEDENT> def _initReceived(self, msg): <NEW_LINE> <INDENT> if len(msg) != 32: <NEW_LINE> <INDENT> log.msg('Protocol Error: iInit message has invalid format.') <NEW_LINE> self.transport.loseConnection() <NEW_LINE> return <NEW_LINE> <DEDENT> d = self._endpoint.processInit(self, msg[:16], msg[16:]) <NEW_LINE> d.addCallbacks(self._initSuccessful, self._initFailed) <NEW_LINE> <DEDENT> def _initSuccessful(self, _): <NEW_LINE> <INDENT> self.stringReceived = self._messageReceived <NEW_LINE> self._initialized = True <NEW_LINE> <DEDENT> def _initFailed(self, failure): <NEW_LINE> <INDENT> log.msg('Protocol Error: {0}'.format(failure.getErrorMessage())) <NEW_LINE> self.transport.loseConnection() <NEW_LINE> <DEDENT> def _messageReceived(self, msg): <NEW_LINE> <INDENT> if len(msg) < 17: <NEW_LINE> <INDENT> self.transport.loseConnection() <NEW_LINE> <DEDENT> flag = msg[:1] <NEW_LINE> if flag == self._TRUE: <NEW_LINE> <INDENT> destID = UUID(bytes=msg[1:17]) <NEW_LINE> offset = 17 <NEW_LINE> <DEDENT> elif flag == self._FALSE: <NEW_LINE> <INDENT> destID = None <NEW_LINE> offset = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.msg('Protocol Error: Could not identify flag.') <NEW_LINE> self.transport.loseConnection() <NEW_LINE> return <NEW_LINE> <DEDENT> remoteID = UUID(bytes=msg[offset:offset+16]) <NEW_LINE> offset += 16 <NEW_LINE> idLen, = self._MSG_ID_STRUCT.unpack(msg[offset:offset+1]) <NEW_LINE> offset += 1 <NEW_LINE> msgID = msg[offset:offset+idLen] <NEW_LINE> offset += idLen <NEW_LINE> self.messageReceived(remoteID, buffer(msg, offset), msgID, destID) <NEW_LINE> <DEDENT> def sendInit(self, connID, key): <NEW_LINE> <INDENT> assert len(connID) == 16 <NEW_LINE> assert len(key) == 16 <NEW_LINE> self.sendString(connID + key) <NEW_LINE> <DEDENT> def sendMessage(self, interface, msg, msgID, remoteID=None): <NEW_LINE> <INDENT> assert self._initialized <NEW_LINE> uid = interface.UID.bytes <NEW_LINE> assert len(uid) == 16 <NEW_LINE> try: <NEW_LINE> <INDENT> idLen = self._MSG_ID_STRUCT.pack(len(msgID)) <NEW_LINE> <DEDENT> except struct.error: <NEW_LINE> <INDENT> raise InternalError('Message ID is too long.') <NEW_LINE> <DEDENT> if remoteID: <NEW_LINE> <INDENT> flag = self._TRUE <NEW_LINE> rmtID = remoteID.bytes <NEW_LINE> assert len(rmtID) == 16 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flag = self._FALSE <NEW_LINE> rmtID = '' <NEW_LINE> <DEDENT> self.sendString(''.join((flag, rmtID, uid, idLen, msgID, msg))) <NEW_LINE> <DEDENT> sendMessage.__doc__ = _Protocol.sendMessage.__doc__ <NEW_LINE> def connectionLost(self, reason): <NEW_LINE> <INDENT> _Protocol.remote_destroy(self) <NEW_LINE> if self._endpoint: <NEW_LINE> <INDENT> self._endpoint.unregisterProtocol(self) <NEW_LINE> self._endpoint = None <NEW_LINE> <DEDENT> <DEDENT> def remote_destroy(self): <NEW_LINE> <INDENT> self.transport.loseConnection()
|
Protocol which is used to connect Endpoints such that Interfaces in
different Endpoint are able to communicate.
|
625990238c3a8732951f7432
|
class SchulzeVote(object): <NEW_LINE> <INDENT> def __init__(self, ranking, weight=1): <NEW_LINE> <INDENT> self.ranking = ranking <NEW_LINE> self.weight = weight
|
Class for a Schulze voting.
It contains the weight of a voter (default 1) and the ranking. That is
if there are n options to vote for for each option it contains the ranking
position.
Attributes:
ranking (list of int): For each option the position in the ranking.
weight (int): Weight of the voter (how many votes a single voter has).
Example:
Suppose that there are 4 options to vote for (A, B, C and D). The voter
wants to rank A > B = D > C. This can be created with:
>>> vote = SchulzeVote([0, 1, 2, 1])
|
62599023a8ecb033258720fa
|
class ApiKeyTestCase(WorkoutManagerTestCase): <NEW_LINE> <INDENT> def test_api_key_page_shows_user_has_no_access(self): <NEW_LINE> <INDENT> self.user_login('test') <NEW_LINE> response = self.client.get(reverse('core:user:api-key')) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertContains( response, 'Pending request to gain access ' + '' + 'to create users through api.') <NEW_LINE> <DEDENT> def test_api_key_page_shows_user_has_access(self): <NEW_LINE> <INDENT> self.user_login('demo') <NEW_LINE> response = self.client.get(reverse('core:user:api-key')) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertContains( response, 'You have access to create users through the api.') <NEW_LINE> <DEDENT> def test_api_key_page_shows_user_pending_access(self): <NEW_LINE> <INDENT> self.user_login('trainer1') <NEW_LINE> response = self.client.get(reverse('core:user:api-key')) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertContains( response, 'Request for access to add user through api.')
|
Tests if user has access
|
62599023a4f1c619b294f4cf
|
class StrandTest(ColorCycle): <NEW_LINE> <INDENT> def init_parameters(self): <NEW_LINE> <INDENT> super().init_parameters() <NEW_LINE> self.set_parameter('num_steps_per_cycle', self.strip.num_leds) <NEW_LINE> <DEDENT> def before_start(self): <NEW_LINE> <INDENT> self.color = 0x000000 <NEW_LINE> <DEDENT> def update(self, current_step: int, current_cycle: int): <NEW_LINE> <INDENT> if current_step == 0: <NEW_LINE> <INDENT> self.color >>= 8 <NEW_LINE> <DEDENT> if self.color == 0: <NEW_LINE> <INDENT> self.color = 0xFF0000 <NEW_LINE> <DEDENT> head = (current_step + 9) % self.p.value['num_steps_per_cycle'] <NEW_LINE> tail = current_step <NEW_LINE> self.strip.set_pixel_bytes(head, self.color) <NEW_LINE> self.strip.set_pixel_bytes(tail, 0) <NEW_LINE> return True
|
Displays a classical LED test
No parameters necessary
|
625990235e10d32532ce4072
|
class _MSRIDataSegment(_ExtractedDataSegment): <NEW_LINE> <INDENT> @deprecated_keywords({"loglevel": None}) <NEW_LINE> def __init__(self, msri, sample_rate, start_time, end_time, src_name, loglevel=None): <NEW_LINE> <INDENT> self.msri = msri <NEW_LINE> self.sample_rate = sample_rate <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.src_name = src_name <NEW_LINE> <DEDENT> def read_stream(self): <NEW_LINE> <INDENT> msrstart = self.msri.get_startepoch() <NEW_LINE> msrend = self.msri.get_endepoch() <NEW_LINE> reclen = self.msri.msr.contents.reclen <NEW_LINE> sepoch = self.start_time.timestamp <NEW_LINE> eepoch = self.end_time.timestamp <NEW_LINE> st = Stream() <NEW_LINE> if msrstart < eepoch and msrend > sepoch: <NEW_LINE> <INDENT> if self.sample_rate > 0 and (msrstart < self.start_time or msrend > self.end_time): <NEW_LINE> <INDENT> logger.debug("Trimming record %s @ %s" % (self.src_name, self.msri.get_starttime())) <NEW_LINE> tr = read(BytesIO(ctypes.string_at( self.msri.msr.contents.record, reclen)), format="MSEED")[0] <NEW_LINE> tr.trim(self.start_time, self.end_time) <NEW_LINE> st.traces.append(tr) <NEW_LINE> return st <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug("Writing full record %s @ %s" % (self.src_name, self.msri.get_starttime())) <NEW_LINE> out = (ctypes.c_char * reclen).from_address( ctypes.addressof(self.msri.msr.contents.record.contents)) <NEW_LINE> data = BytesIO(out.raw) <NEW_LINE> st = read(data, format="MSEED") <NEW_LINE> <DEDENT> <DEDENT> return st <NEW_LINE> <DEDENT> def get_num_bytes(self): <NEW_LINE> <INDENT> return self.msri.msr.contents.reclen <NEW_LINE> <DEDENT> def get_src_name(self): <NEW_LINE> <INDENT> return self.src_name
|
Segment of data from a _MSRIterator
|
6259902391af0d3eaad3ad02
|
class EncoderCNNSmall(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_dim, hidden_dim, num_objects, act_fn='sigmoid', act_fn_hid='relu'): <NEW_LINE> <INDENT> super(EncoderCNNSmall, self).__init__() <NEW_LINE> self.cnn1 = nn.Conv2d( input_dim, hidden_dim, (10, 10), stride=10) <NEW_LINE> self.cnn2 = nn.Conv2d(hidden_dim, num_objects, (1, 1), stride=1) <NEW_LINE> self.ln1 = nn.BatchNorm2d(hidden_dim) <NEW_LINE> self.act1 = util.get_act_fn(act_fn_hid) <NEW_LINE> self.act2 = util.get_act_fn(act_fn) <NEW_LINE> <DEDENT> def forward(self, obs): <NEW_LINE> <INDENT> h = self.act1(self.ln1(self.cnn1(obs))) <NEW_LINE> return self.act2(self.cnn2(h))
|
CNN encoder, maps observation to obj-specific feature maps.
|
6259902356b00c62f0fb379d
|
class PythonRunner: <NEW_LINE> <INDENT> def __init__(self, bodyLines, functionStartAndStop=None, parameters=[]): <NEW_LINE> <INDENT> self.bodyLines = bodyLines <NEW_LINE> self.functionCoordinates = functionStartAndStop <NEW_LINE> functionLines = bodyLines <NEW_LINE> if functionStartAndStop is not None: <NEW_LINE> <INDENT> functionLines = bodyLines[functionStartAndStop[0]:functionStartAndStop[1]] <NEW_LINE> <DEDENT> self.function = PythonFunction(functionLines) <NEW_LINE> self.previousState = None <NEW_LINE> self.functionStates = {} <NEW_LINE> self.lineNumber = 0 <NEW_LINE> self.parameters = parameters <NEW_LINE> <DEDENT> def processFunction(self): <NEW_LINE> <INDENT> lastLineNumber, returnValue, error = self.runFunction() <NEW_LINE> results = {} <NEW_LINE> for lineNumber in self.functionStates: <NEW_LINE> <INDENT> functionState = self.functionStates[lineNumber] <NEW_LINE> for varName in functionState: <NEW_LINE> <INDENT> variableStatement = ["{0} = {1}".format(varName, self.getValue(functionState[varName]))] <NEW_LINE> if lineNumber in results: <NEW_LINE> <INDENT> results[lineNumber] += variableStatement <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results[lineNumber] = variableStatement <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if error is not None: <NEW_LINE> <INDENT> results[lastLineNumber] = ["{0}".format(error)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results[lastLineNumber] = ["return {0}".format(self.getValue(returnValue))] <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def runFunction(self): <NEW_LINE> <INDENT> newFunctionLines = self.function.generateFunctionWithHouseKeeping(self.generateHousekeepingLines) <NEW_LINE> if self.functionCoordinates is None: <NEW_LINE> <INDENT> newLines = newFunctionLines <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newLines = list(self.bodyLines) <NEW_LINE> newLines[self.functionCoordinates[0]:self.functionCoordinates[1]] = newFunctionLines <NEW_LINE> <DEDENT> callFunctionString = self.getFunctionCallString() <NEW_LINE> newLines.append(callFunctionString) <NEW_LINE> runnableBody = "\n".join(newLines) <NEW_LINE> return RunMethod(runnableBody, self) <NEW_LINE> <DEDENT> def getFunctionCallString(self): <NEW_LINE> <INDENT> if self.function.needsArguments(): <NEW_LINE> <INDENT> return "returnValue = {0}({1}, runner)".format(self.function.name, self.getFunctionParameterString()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "returnValue = {0}(runner)".format(self.function.name) <NEW_LINE> <DEDENT> <DEDENT> def getFunctionParameterString(self): <NEW_LINE> <INDENT> return ", ".join([str(self.getValue(parameter)) for parameter in self.parameters]) <NEW_LINE> <DEDENT> def generateHousekeepingLines(self, lineNumber): <NEW_LINE> <INDENT> return ["__variables__ = {}", "for __var_name__ in [__var_name__ for __var_name__ in dir() if __var_name__ not in ['__runner__', '__var_name__', '__variables__']]:", " __variables__[__var_name__]=eval(__var_name__)", "__runner__.storeState({0}, __variables__)".format(lineNumber)] <NEW_LINE> <DEDENT> def storeState(self, lineNumber, variables): <NEW_LINE> <INDENT> self.functionStates[self.lineNumber] = {} <NEW_LINE> for varName in variables: <NEW_LINE> <INDENT> if self.previousState is None or varName not in self.previousState or self.previousState[varName] != variables[varName]: <NEW_LINE> <INDENT> self.functionStates[self.lineNumber][varName] = variables[varName] <NEW_LINE> <DEDENT> <DEDENT> self.lineNumber = lineNumber <NEW_LINE> self.previousState = variables <NEW_LINE> <DEDENT> def getValue(self, value): <NEW_LINE> <INDENT> if type(value) == str: <NEW_LINE> <INDENT> value = "'{0}'".format(value) <NEW_LINE> <DEDENT> return value
|
Represents a runner of a Python class
|
62599023be8e80087fbbff54
|
class IPsecSiteConnection(model_base.BASEV2, model_base.HasId, model_base.HasProject): <NEW_LINE> <INDENT> __tablename__ = 'ipsec_site_connections' <NEW_LINE> name = sa.Column(sa.String(db_const.NAME_FIELD_SIZE)) <NEW_LINE> description = sa.Column(sa.String(db_const.DESCRIPTION_FIELD_SIZE)) <NEW_LINE> peer_address = sa.Column(sa.String(255), nullable=False) <NEW_LINE> peer_id = sa.Column(sa.String(255), nullable=False) <NEW_LINE> local_id = sa.Column(sa.String(255), nullable=True) <NEW_LINE> route_mode = sa.Column(sa.String(8), nullable=False) <NEW_LINE> mtu = sa.Column(sa.Integer, nullable=False) <NEW_LINE> initiator = sa.Column(sa.Enum("bi-directional", "response-only", name="vpn_initiators"), nullable=False) <NEW_LINE> auth_mode = sa.Column(sa.String(16), nullable=False) <NEW_LINE> psk = sa.Column(sa.String(255), nullable=False) <NEW_LINE> dpd_action = sa.Column(sa.Enum("hold", "clear", "restart", "disabled", "restart-by-peer", name="vpn_dpd_actions"), nullable=False) <NEW_LINE> dpd_interval = sa.Column(sa.Integer, nullable=False) <NEW_LINE> dpd_timeout = sa.Column(sa.Integer, nullable=False) <NEW_LINE> status = sa.Column(sa.String(16), nullable=False) <NEW_LINE> admin_state_up = sa.Column(sa.Boolean(), nullable=False) <NEW_LINE> vpnservice_id = sa.Column(sa.String(36), sa.ForeignKey('vpnservices.id'), nullable=False) <NEW_LINE> ipsecpolicy_id = sa.Column(sa.String(36), sa.ForeignKey('ipsecpolicies.id'), nullable=False) <NEW_LINE> ikepolicy_id = sa.Column(sa.String(36), sa.ForeignKey('ikepolicies.id'), nullable=False) <NEW_LINE> ipsecpolicy = orm.relationship( IPsecPolicy, backref='ipsec_site_connection') <NEW_LINE> ikepolicy = orm.relationship(IKEPolicy, backref='ipsec_site_connection') <NEW_LINE> peer_cidrs = orm.relationship(IPsecPeerCidr, backref='ipsec_site_connection', lazy='joined', cascade='all, delete, delete-orphan') <NEW_LINE> local_ep_group_id = sa.Column(sa.String(36), sa.ForeignKey('vpn_endpoint_groups.id')) <NEW_LINE> peer_ep_group_id = sa.Column(sa.String(36), sa.ForeignKey('vpn_endpoint_groups.id')) <NEW_LINE> local_ep_group = orm.relationship("VPNEndpointGroup", foreign_keys=local_ep_group_id) <NEW_LINE> peer_ep_group = orm.relationship("VPNEndpointGroup", foreign_keys=peer_ep_group_id)
|
Represents a IPsecSiteConnection Object.
|
62599023796e427e5384f65b
|
class GEEnvironmentModel(Model): <NEW_LINE> <INDENT> def __init__(self, N, width, height, grid=10, seed=None): <NEW_LINE> <INDENT> if seed is None: <NEW_LINE> <INDENT> super(GEEnvironmentModel, self).__init__(seed=None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(GEEnvironmentModel, self).__init__(seed) <NEW_LINE> <DEDENT> self.num_agents = N <NEW_LINE> self.grid = Grid(width, height, grid) <NEW_LINE> self.schedule = SimultaneousActivation(self) <NEW_LINE> for i in range(self.num_agents): <NEW_LINE> <INDENT> a = GEBTAgent(i, self) <NEW_LINE> self.schedule.add(a) <NEW_LINE> x = 0 <NEW_LINE> y = 0 <NEW_LINE> a.location = (x, y) <NEW_LINE> self.grid.add_object_to_grid((x, y), a) <NEW_LINE> a.operation_threshold = 2 <NEW_LINE> <DEDENT> <DEDENT> def step(self): <NEW_LINE> <INDENT> self.schedule.step()
|
A environemnt to model swarms
|
6259902326238365f5fada2e
|
class GELU(torch.autograd.Function): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(GELU, self).__init__() <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> assert x.is_contiguous() <NEW_LINE> self.saved = x.float() <NEW_LINE> output = self.saved.new(*self.saved.shape) <NEW_LINE> n = x.numel() <NEW_LINE> cunnex('geluForward')( grid=((n + 1023) // 1024, 1, 1), block=(1024, 1, 1), args=[self.saved.data_ptr(), output.data_ptr(), n], stream=Stream ) <NEW_LINE> return output.type(x.type()) <NEW_LINE> <DEDENT> def backward(self, grad_output): <NEW_LINE> <INDENT> assert grad_output.is_contiguous() <NEW_LINE> grad_input = grad_output.new(*grad_output.shape).float() <NEW_LINE> n = grad_output.numel() <NEW_LINE> cunnex('geluBackward')( grid=((n + 1023) // 512, 1, 1), block=(1024, 1, 1), args=[grad_input.data_ptr(), grad_output.float().data_ptr(), self.saved.data_ptr(), n], stream=Stream ) <NEW_LINE> return grad_input.type(grad_output.type())
|
The Function is forced to work under fp32
|
62599023c432627299fa3ed0
|
class SluggedManager(Manager): <NEW_LINE> <INDENT> def get_by_natural_key(self, slug): <NEW_LINE> <INDENT> return self.get(slug=slug) <NEW_LINE> <DEDENT> def get_by_slug(self, slug, pk=None): <NEW_LINE> <INDENT> if 'slugs' in apps: <NEW_LINE> <INDENT> qs = self.get_queryset() <NEW_LINE> qs = qs.order_by('-slug_history__id') <NEW_LINE> qs = qs.filter(slug_history__slug=slug) <NEW_LINE> if pk is not None: <NEW_LINE> <INDENT> qs = qs.filter(slug_history__object_id=pk) <NEW_LINE> <DEDENT> return qs[:1].get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.get(slug=slug)
|
Provides access to items using their natural key: the ``slug`` field.
|
62599023925a0f43d25e8f24
|
class Solution(object): <NEW_LINE> <INDENT> def groupAnagrams(self, strs: List[str]) -> List[List[str]]: <NEW_LINE> <INDENT> ma = {} <NEW_LINE> for s in strs: <NEW_LINE> <INDENT> ss = "".join(sorted(s)) <NEW_LINE> if ss in ma: <NEW_LINE> <INDENT> ma[ss].append(s) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ma[ss] = [s] <NEW_LINE> <DEDENT> <DEDENT> return list(ma.values())
|
word字符排序完当做key
time: O(klogk * n)
space: O(nk)
|
62599023287bf620b6272acc
|
class FileNameNormalizer(object): <NEW_LINE> <INDENT> implements(IFileNameNormalizer) <NEW_LINE> def normalize(self, text, locale=None, max_length=MAX_FILENAME_LENGTH): <NEW_LINE> <INDENT> if locale is not None: <NEW_LINE> <INDENT> util = queryUtility(IFileNameNormalizer, name=locale) <NEW_LINE> parts = locale.split('_') <NEW_LINE> if util is None and len(parts) > 1: <NEW_LINE> <INDENT> util = queryUtility(IFileNameNormalizer, name=parts[0]) <NEW_LINE> <DEDENT> if util is not None and util.__class__ is not self.__class__: <NEW_LINE> <INDENT> text = util.normalize(text, locale=locale) <NEW_LINE> <DEDENT> <DEDENT> text = baseNormalize(text) <NEW_LINE> m = UNDERSCORE_START_REGEX.match(text) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> text = m.groups()[1] <NEW_LINE> <DEDENT> base = text <NEW_LINE> ext = '' <NEW_LINE> m = FILENAME_REGEX.match(text) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> base = m.groups()[0] <NEW_LINE> ext = m.groups()[1] <NEW_LINE> <DEDENT> base = IGNORE_REGEX.sub('', base) <NEW_LINE> base = DANGEROUS_CHARS_REGEX.sub('-', base) <NEW_LINE> base = EXTRA_DASHES_REGEX.sub('', base) <NEW_LINE> base = MULTIPLE_DASHES_REGEX.sub('-', base) <NEW_LINE> base = cropName(base, maxLength=max_length) <NEW_LINE> if ext != '': <NEW_LINE> <INDENT> base = base + '.' + ext <NEW_LINE> <DEDENT> return base
|
This normalizer can normalize any unicode string and returns a version
that only contains of ASCII characters allowed in a file name.
Let's make sure that this implementation actually fulfills the API.
>>> from zope.interface.verify import verifyClass
>>> verifyClass(IFileNameNormalizer, FileNameNormalizer)
True
|
62599023be8e80087fbbff56
|
class DatastoreNameEnum(Enum): <NEW_LINE> <INDENT> running = 0 <NEW_LINE> startup = 1 <NEW_LINE> operational = 2 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xe._meta import _tailf_confd_monitoring as meta <NEW_LINE> return meta._meta_table['ConfdState.Internal.DatastoreNameEnum']
|
DatastoreNameEnum
Name of one of the datastores implemented by CDB.
.. data:: running = 0
.. data:: startup = 1
.. data:: operational = 2
|
6259902356b00c62f0fb379f
|
class Loss(object): <NEW_LINE> <INDENT> def __init__(self, cfg, tb_writer=None): <NEW_LINE> <INDENT> self.cfg = cfg <NEW_LINE> self.meter_dict = OrderedDict() <NEW_LINE> self.tb_writer = tb_writer <NEW_LINE> <DEDENT> def reset_meters(self): <NEW_LINE> <INDENT> for k, v in self.meter_dict.items(): <NEW_LINE> <INDENT> v.reset() <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, batch, pred, step=0, **kwargs): <NEW_LINE> <INDENT> pass
|
Base class for calculating loss and managing log.
|
625990238c3a8732951f7436
|
class Multimeter(Gpib): <NEW_LINE> <INDENT> def __init__(self, name='hp3478a', pad=23, sad=0, asksleep=0.02): <NEW_LINE> <INDENT> Gpib.__init__(self, name=name, pad=pad, sad=sad) <NEW_LINE> self.asksleep = asksleep <NEW_LINE> <DEDENT> def readuntil(self, term=''): <NEW_LINE> <INDENT> mystr = '' <NEW_LINE> numterms = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> s = self.read(len=1) <NEW_LINE> <DEDENT> except GpibError: <NEW_LINE> <INDENT> return mystr <NEW_LINE> <DEDENT> if s == term: <NEW_LINE> <INDENT> numterms += 1 <NEW_LINE> <DEDENT> mystr += s <NEW_LINE> while numterms < 2: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> s = self.read(len=1) <NEW_LINE> <DEDENT> except GpibError: <NEW_LINE> <INDENT> return mystr <NEW_LINE> <DEDENT> if s == term: <NEW_LINE> <INDENT> numterms += 1 <NEW_LINE> <DEDENT> mystr += s <NEW_LINE> <DEDENT> return mystr <NEW_LINE> <DEDENT> def askuntil(self, text): <NEW_LINE> <INDENT> self.write(text) <NEW_LINE> time.sleep(self.asksleep) <NEW_LINE> return self.readuntil() <NEW_LINE> <DEDENT> def setup_dc(self, nplc=10, range=10.0, nrdgs=2): <NEW_LINE> <INDENT> for command in ('TRIG HOLD', 'FIXEDZ 1', 'DCV %s,AUTO' % range, 'NPLC %s' % nplc, 'NRDGS %d,SYN' % nrdgs): <NEW_LINE> <INDENT> self.write(command) <NEW_LINE> time.sleep(self.asksleep) <NEW_LINE> <DEDENT> <DEDENT> def setup_ac(self, nplc=10.0, range=10.0, nrdgs=2, resolution=0.001): <NEW_LINE> <INDENT> self.nrdgs = nrdgs <NEW_LINE> cmd = 'F2' <NEW_LINE> cmd += 'R1' <NEW_LINE> cmd += 'Z1' <NEW_LINE> cmd += 'N5' <NEW_LINE> self.write(cmd) <NEW_LINE> time.sleep(self.asksleep) <NEW_LINE> <DEDENT> def take_readings(self, nrdgs=2): <NEW_LINE> <INDENT> self.write('T3') <NEW_LINE> time.sleep(self.asksleep) <NEW_LINE> volt = numpy.zeros(nrdgs, dtype=float) <NEW_LINE> for i in range(nrdgs): <NEW_LINE> <INDENT> self.trigger() <NEW_LINE> time.sleep(self.asksleep) <NEW_LINE> rdg = float(self.read()) <NEW_LINE> volt[i] = rdg <NEW_LINE> logger.debug("%d, %s" % (i, rdg)) <NEW_LINE> <DEDENT> self.write('T1') <NEW_LINE> time.sleep(self.asksleep) <NEW_LINE> return volt.mean(), volt.std()
|
A Gpib helper class for interfacing with the HP3457A
digital multimeter
|
6259902326238365f5fada30
|
class PamService(Model): <NEW_LINE> <INDENT> topic = SystemInfoTopic <NEW_LINE> service = fields.String() <NEW_LINE> modules = fields.List(fields.String())
|
Pam service description
This model contains information about pam modules used by specific PAM
service/filename
|
62599023d18da76e235b78bd
|
class NEP6DiskWallet(Wallet): <NEW_LINE> <INDENT> _default_path = './wallet.json' <NEW_LINE> def __init__(self, path: str, name: Optional[str] = None, version: str = Wallet._wallet_version, scrypt: Optional[ScryptParameters] = None, accounts: List[Account] = None, default_account: Optional[Account] = None, extra: Optional[dict] = None): <NEW_LINE> <INDENT> filepath, extension = os.path.splitext(path) <NEW_LINE> if len(extension) == 0: <NEW_LINE> <INDENT> path += '.json' <NEW_LINE> <DEDENT> if name is None: <NEW_LINE> <INDENT> dir_path, name = os.path.split(path) <NEW_LINE> name, extension = os.path.splitext(name) <NEW_LINE> <DEDENT> self.path: str = path <NEW_LINE> super().__init__(name=name, version=version, scrypt=scrypt, accounts=accounts, default_account=default_account, extra=extra) <NEW_LINE> <DEDENT> def save(self) -> None: <NEW_LINE> <INDENT> with open(self.path, 'w') as json_file: <NEW_LINE> <INDENT> json.dump(self.to_json(), json_file) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def default(cls, path: str = _default_path, name: Optional[str] = 'wallet.json') -> NEP6DiskWallet: <NEW_LINE> <INDENT> return cls(path=path, name=name, version=cls._wallet_version, scrypt=ScryptParameters(), accounts=[], extra=None)
|
A specialised wallet for persisting wallets to media.
|
6259902391af0d3eaad3ad06
|
class Quit(flow.SWFlow): <NEW_LINE> <INDENT> def __init__(self, ui_spawner): <NEW_LINE> <INDENT> super().__init__(None, ui_spawner, None) <NEW_LINE> self.register_entry_point(ENTRY_POINT, self.quit) <NEW_LINE> <DEDENT> def quit(self): <NEW_LINE> <INDENT> self.ui_spawner.finish() <NEW_LINE> raise flow.EndFlow()
|
Game shutdown.
|
625990235166f23b2e2442b5
|
class WikiCandidatesSelector: <NEW_LINE> <INDENT> def __init__(self, logger=DEFAULT_LOGGER, separate: bool = True, n: int = 3, **kwargs): <NEW_LINE> <INDENT> self.profiler = kwargs.get('profiler', DEFAULT_MEASURER) <NEW_LINE> self.logger = logger <NEW_LINE> self.tagger = SequenceTagger.load('ner-fast') <NEW_LINE> self.wikipedia = MediaWiki() <NEW_LINE> self.separate = separate <NEW_LINE> self.n = n <NEW_LINE> self.logger.info("Candidate selector is loaded and ready to use.") <NEW_LINE> <DEDENT> def get_wiki_candidates_raw(self, query: str) -> List[str]: <NEW_LINE> <INDENT> search_results = self.wikipedia.search(query, results=self.n) <NEW_LINE> return [t.replace(' ', '_') for t in search_results] <NEW_LINE> <DEDENT> def get_entities(self, text: str) -> List[str]: <NEW_LINE> <INDENT> sentence = Sentence(text) <NEW_LINE> self.tagger.predict(sentence) <NEW_LINE> entities = [] <NEW_LINE> for entity in sentence.get_spans('ner'): <NEW_LINE> <INDENT> entities.append(entity.text) <NEW_LINE> <DEDENT> return entities <NEW_LINE> <DEDENT> def get_wiki_candidates_NER(self, query: str) -> Set[str]: <NEW_LINE> <INDENT> self.profiler.start_measure_local('NER_model') <NEW_LINE> entities = self.get_entities(query) <NEW_LINE> self.profiler.finish_measure_local() <NEW_LINE> self.profiler.start_measure_local('wiki_search') <NEW_LINE> search_results = self.get_wiki_candidates_raw(query) <NEW_LINE> if not self.separate: <NEW_LINE> <INDENT> search_results_en = self.get_wiki_candidates_raw(' '.join(entities)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> search_results_en = [] <NEW_LINE> for e in entities: <NEW_LINE> <INDENT> search_results_en += self.get_wiki_candidates_raw(e) <NEW_LINE> <DEDENT> <DEDENT> self.profiler.finish_measure_local() <NEW_LINE> return set([t for t in search_results + search_results_en]) <NEW_LINE> <DEDENT> def get_wiki_texts(self, articles_names: Set[str]) -> Dict: <NEW_LINE> <INDENT> result = {} <NEW_LINE> for name in articles_names: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> page = self.wikipedia.page(name) <NEW_LINE> result[name] = page.summary.replace('\n', ' ').split('. ') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.warning(f"[Candidates picker] Page for id {name} is not found.") <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def get_candidates(self, claim: str) -> Dict: <NEW_LINE> <INDENT> candidates = self.get_wiki_candidates_NER(claim) <NEW_LINE> self.logger.info(f"[Candidates picker] Candidates found: {', '.join(candidates)}") <NEW_LINE> self.profiler.start_measure_local('wiki_texts') <NEW_LINE> texts_dict = self.get_wiki_texts(candidates) <NEW_LINE> self.profiler.finish_measure_local() <NEW_LINE> return texts_dict
|
Class responsible for model of candidates selection from Wikipedia (Model level one)
:param logger: logger to use in model
:param separate: if make separate queries for each found entity
:param n: number of results return after each query
|
6259902356b00c62f0fb37a1
|
class TestProjection(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.atol = 1e-12 <NEW_LINE> self.rtol = 1e-12 <NEW_LINE> <DEDENT> def test_inside(self): <NEW_LINE> <INDENT> assert_allclose(projgrad.project(np.array([1.0, 0.0])), np.array([1.0, 0.0]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([0.6, 0.4])), np.array([0.6, 0.4]), atol=self.atol, rtol=self.rtol) <NEW_LINE> <DEDENT> def test_nonnormalized(self): <NEW_LINE> <INDENT> assert_allclose(projgrad.project(np.array([0.5, 0.0])), np.array([0.75, 0.25]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([0.25, 0.25])), np.array([0.5, 0.5]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([0.3, 0.5])), np.array([0.4, 0.6]), atol=self.atol, rtol=self.rtol) <NEW_LINE> <DEDENT> def test_outside(self): <NEW_LINE> <INDENT> assert_allclose(projgrad.project(np.array([1.0, -1.0])), np.array([1.0, 0.0]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([-1.0, 2.0])), np.array([0.0, 1.0]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([1.0, 0.5, -1.0])), np.array([0.75, 0.25, 0.0]), atol=self.atol, rtol=self.rtol) <NEW_LINE> <DEDENT> def test_masked(self): <NEW_LINE> <INDENT> assert_allclose(projgrad.project(np.array([0.5, 0.0]), mask=[False, False]), np.array([0.75, 0.25]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([0.7, 0.7, 0.0]), mask=[False, False, True]), np.array([0.5, 0.5, 0.0]), atol=self.atol, rtol=self.rtol) <NEW_LINE> assert_allclose(projgrad.project(np.array([0.3, 0.3, 0.0]), mask=[False, False, True]), np.array([0.5, 0.5, 0.0]), atol=self.atol, rtol=self.rtol)
|
Test projection onto probability simplex for simple examples.
|
625990235166f23b2e2442b7
|
class AlarmDiginTest(BaseTest): <NEW_LINE> <INDENT> ID = "T2" <NEW_LINE> REQS = ["R1.1"]
|
Digin alarm temp test
|
62599023796e427e5384f661
|
class DummyFile: <NEW_LINE> <INDENT> def __init__(self,data): <NEW_LINE> <INDENT> self.data = data; <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def readlines(self): <NEW_LINE> <INDENT> return self.data.split(u"\n")
|
wrap a bunch of string data in a file interface
|
6259902391af0d3eaad3ad0a
|
class Resnet(BaseModel): <NEW_LINE> <INDENT> def __init__(self, pretrained, num_classes=10): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.resnet = models.resnet18(pretrained) <NEW_LINE> num_ftrs = self.resnet.fc.in_features <NEW_LINE> self.resnet.fc = nn.Linear(num_ftrs, num_classes) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return F.log_softmax(self.resnet.forward(x), dim=1)
|
resnet18
|
62599023a4f1c619b294f4d8
|
class GraphVerticalLine(object): <NEW_LINE> <INDENT> pass
|
Draw a vertical line at time. Its color is composed from three
hexadecimal numbers specifying the rgb color components (00 is
off, FF is maximum) red, green and blue. Optionally, a legend
box and string is printed in the legend section. time may be a
number or a variable from a VDEF. It is an error to use vnames
from DEF or CDEF here.
|
625990239b70327d1c57fc66
|
class _ForwardHandler(socketserver.BaseRequestHandler): <NEW_LINE> <INDENT> remote_address = None <NEW_LINE> ssh_transport = None <NEW_LINE> logger = None <NEW_LINE> info = None <NEW_LINE> def _redirect(self, chan): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> rqst, _, _ = select([self.request, chan], [], [], 5) <NEW_LINE> if self.request in rqst: <NEW_LINE> <INDENT> data = self.request.recv(1024) <NEW_LINE> self.logger.log(TRACE_LEVEL, '<<< In {0} recv: {1} <<<'.format(self.info, repr(data))) <NEW_LINE> chan.send(data) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if chan in rqst: <NEW_LINE> <INDENT> data = chan.recv(1024) <NEW_LINE> self.logger.log(TRACE_LEVEL, '>>> Out {0} send to {1}: {2} >>>'.format( self.info, self.remote_address, repr(data) )) <NEW_LINE> self.request.send(data) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def handle(self): <NEW_LINE> <INDENT> uid = get_connection_id() <NEW_LINE> self.info = '#{0} <-- {1}'.format(uid, self.client_address or self.server.local_address) <NEW_LINE> try: <NEW_LINE> <INDENT> src_address = self.request.getpeername() <NEW_LINE> if not isinstance(src_address, tuple): <NEW_LINE> <INDENT> src_address = ('dummy', 12345) <NEW_LINE> <DEDENT> chan = self.ssh_transport.open_channel('direct-tcpip', self.remote_address, src_address) <NEW_LINE> <DEDENT> except paramiko.SSHException as e: <NEW_LINE> <INDENT> msg = '{0} to {1} failed: {2}'.format(self.info, self.remote_address, repr(e)) <NEW_LINE> self.logger.error(msg) <NEW_LINE> raise HandlerSSHTunnelForwarderError(msg) <NEW_LINE> <DEDENT> if chan is None: <NEW_LINE> <INDENT> msg = '{0} to {1} was rejected by the SSH server'.format( self.info, self.remote_address ) <NEW_LINE> self.logger.error(msg) <NEW_LINE> raise HandlerSSHTunnelForwarderError(msg) <NEW_LINE> <DEDENT> self.logger.info('{0} connected'.format(self.info)) <NEW_LINE> try: <NEW_LINE> <INDENT> self._redirect(chan) <NEW_LINE> <DEDENT> except socket.error: <NEW_LINE> <INDENT> self.logger.warning('{0} sending RST'.format(self.info)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.error('{0} error: {1}'.format(self.info, repr(e))) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> chan.close() <NEW_LINE> self.request.close() <NEW_LINE> self.logger.info('{0} connection closed.'.format(self.info))
|
Base handler for tunnel connections
|
625990238c3a8732951f743c
|
class IncrementalPushTests(AsyncTestCase): <NEW_LINE> <INDENT> def test_less_data(self): <NEW_LINE> <INDENT> pool = build_pool(self) <NEW_LINE> service = service_for_pool(self, pool) <NEW_LINE> volume = service.get(MY_VOLUME) <NEW_LINE> creating = pool.create(volume) <NEW_LINE> def created(filesystem): <NEW_LINE> <INDENT> self.filesystem = filesystem <NEW_LINE> path = filesystem.get_path() <NEW_LINE> path.child(b"some-data").setContent(b"hello world" * 1024) <NEW_LINE> with filesystem.reader() as reader: <NEW_LINE> <INDENT> self.complete_size = len(reader.read()) <NEW_LINE> <DEDENT> snapshots = filesystem.snapshots() <NEW_LINE> return snapshots <NEW_LINE> <DEDENT> loading = creating.addCallback(created) <NEW_LINE> def loaded(snapshots): <NEW_LINE> <INDENT> with self.filesystem.reader(snapshots) as reader: <NEW_LINE> <INDENT> incremental_size = len(reader.read()) <NEW_LINE> <DEDENT> self.assertTrue( incremental_size < self.complete_size, "Bytes of data for incremental send ({}) was not fewer than " "bytes of data for complete send ({}).".format( incremental_size, self.complete_size) ) <NEW_LINE> <DEDENT> loading.addCallback(loaded) <NEW_LINE> return loading
|
Tests for incremental push based on ZFS snapshots.
|
6259902356b00c62f0fb37a5
|
class ActivitySerializer(BaseSerializer): <NEW_LINE> <INDENT> def dumps(self, activity): <NEW_LINE> <INDENT> self.check_type(activity) <NEW_LINE> activity_time = datetime_to_epoch(activity.time) <NEW_LINE> parts = [activity.actor_id, activity.verb.id, activity.object_id, activity.target_id or 0] <NEW_LINE> extra_context = activity.extra_context.copy() <NEW_LINE> pickle_string = '' <NEW_LINE> if extra_context: <NEW_LINE> <INDENT> pickle_string = pickle.dumps(activity.extra_context) <NEW_LINE> <DEDENT> parts += [activity_time, pickle_string] <NEW_LINE> serialized_activity = '|'.join(map(str, parts)) <NEW_LINE> return serialized_activity <NEW_LINE> <DEDENT> def loads(self, serialized_activity): <NEW_LINE> <INDENT> parts = serialized_activity.split('|') <NEW_LINE> actor_id, verb_id, object_id, target_id = map( int, parts[:4]) <NEW_LINE> activity_datetime = epoch_to_datetime(float(parts[4])) <NEW_LINE> pickle_string = str(parts[5]) <NEW_LINE> if not target_id: <NEW_LINE> <INDENT> target_id = None <NEW_LINE> <DEDENT> verb = get_verb_by_id(verb_id) <NEW_LINE> extra_context = {} <NEW_LINE> if pickle_string: <NEW_LINE> <INDENT> extra_context = pickle.loads(pickle_string) <NEW_LINE> <DEDENT> activity = Activity(actor_id, verb, object_id, target_id, time=activity_datetime, extra_context=extra_context) <NEW_LINE> return activity
|
Serializer optimized for taking as little memory as possible to store an
Activity
Serialization consists of 5 parts
- actor_id
- verb_id
- object_id
- target_id
- extra_context (pickle)
None values are stored as 0
|
62599023be8e80087fbbff5c
|
class SchemaCacheBase(SchemaNonCache): <NEW_LINE> <INDENT> pass
|
Implements methods
This is a base class, use either SchemaStrongCache or SchemaWeakCache.
DONT USE THIS DIRECTLY.
|
62599023a8ecb03325872104
|
class AuctionList: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> self.auctions = [] <NEW_LINE> self.winning_bids_logs = [] <NEW_LINE> <DEDENT> def compile_winning_bids_info(self) -> None: <NEW_LINE> <INDENT> for auction in self.auctions: <NEW_LINE> <INDENT> if auction.highest_bid is None: <NEW_LINE> <INDENT> self.winning_bids_logs.append(f'For Auction: {auction.auction_id} - no valid bid was received \n') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.winning_bids_logs.append(f'For Auction: {auction.auction_id} - Account: {auction.highest_bid.account_id} won the bid of amount: {auction.highest_bid.amount} \n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_auction_by_id(self, id: int) -> Auction: <NEW_LINE> <INDENT> found_auction = list(filter(lambda auction: auction.auction_id == id, self.auctions)) <NEW_LINE> auction = None <NEW_LINE> if found_auction: <NEW_LINE> <INDENT> auction = found_auction[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> auction = Auction(id) <NEW_LINE> self.auctions.append(auction) <NEW_LINE> <DEDENT> return auction <NEW_LINE> <DEDENT> def process_bid(self, bid: Bid) -> str: <NEW_LINE> <INDENT> matching_auction = self.get_auction_by_id(bid.auction_id) <NEW_LINE> bid_messsage_log = matching_auction.process_bid(bid) <NEW_LINE> return bid_messsage_log
|
This class is responsible for processing the outcome auctions' bids.
|
62599023ac7a0e7691f733d0
|
class BinQuery(abc_resource_queries.BinQuery, osid_queries.OsidCatalogQuery): <NEW_LINE> <INDENT> def __init__(self, runtime): <NEW_LINE> <INDENT> self._runtime = runtime <NEW_LINE> <DEDENT> @utilities.arguments_not_none <NEW_LINE> def match_resource_id(self, resource_id, match): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def clear_resource_id_terms(self): <NEW_LINE> <INDENT> self._clear_terms('resourceId') <NEW_LINE> <DEDENT> resource_id_terms = property(fdel=clear_resource_id_terms) <NEW_LINE> def supports_resource_query(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def get_resource_query(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> resource_query = property(fget=get_resource_query) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def match_any_resource(self, match): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def clear_resource_terms(self): <NEW_LINE> <INDENT> self._clear_terms('resource') <NEW_LINE> <DEDENT> resource_terms = property(fdel=clear_resource_terms) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def match_ancestor_bin_id(self, binid, match): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def clear_ancestor_bin_id_terms(self): <NEW_LINE> <INDENT> self._clear_terms('ancestorBinId') <NEW_LINE> <DEDENT> ancestor_bin_id_terms = property(fdel=clear_ancestor_bin_id_terms) <NEW_LINE> def supports_ancestor_bin_query(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def get_ancestor_bin_query(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> ancestor_bin_query = property(fget=get_ancestor_bin_query) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def match_any_ancestor_bin(self, match): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def clear_ancestor_bin_terms(self): <NEW_LINE> <INDENT> self._clear_terms('ancestorBin') <NEW_LINE> <DEDENT> ancestor_bin_terms = property(fdel=clear_ancestor_bin_terms) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def match_descendant_bin_id(self, binid, match): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def clear_descendant_bin_id_terms(self): <NEW_LINE> <INDENT> self._clear_terms('descendantBinId') <NEW_LINE> <DEDENT> descendant_bin_id_terms = property(fdel=clear_descendant_bin_id_terms) <NEW_LINE> def supports_descendant_bin_query(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def get_descendant_bin_query(self): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> descendant_bin_query = property(fget=get_descendant_bin_query) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def match_any_descendant_bin(self, match): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> def clear_descendant_bin_terms(self): <NEW_LINE> <INDENT> self._clear_terms('descendantBin') <NEW_LINE> <DEDENT> descendant_bin_terms = property(fdel=clear_descendant_bin_terms) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def get_bin_query_record(self, bin_record_type): <NEW_LINE> <INDENT> raise errors.Unimplemented()
|
This is the query for searching bins.
Each method specifies an ``AND`` term while multiple invocations of
the same method produce a nested ``OR``.
|
62599023bf627c535bcb239b
|
class JsonConfig: <NEW_LINE> <INDENT> def __init__(self, json_file, schema): <NEW_LINE> <INDENT> self._file = json_file <NEW_LINE> self._schema = schema <NEW_LINE> self._data = {} <NEW_LINE> self.read_data() <NEW_LINE> <DEDENT> def reset_data(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._data = self._schema({}) <NEW_LINE> <DEDENT> except vol.Invalid as ex: <NEW_LINE> <INDENT> _LOGGER.error("Can't reset %s: %s", self._file, humanize_error(self._data, ex)) <NEW_LINE> <DEDENT> <DEDENT> def read_data(self): <NEW_LINE> <INDENT> if self._file.is_file(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._data = read_json_file(self._file) <NEW_LINE> <DEDENT> except (OSError, json.JSONDecodeError): <NEW_LINE> <INDENT> _LOGGER.warning("Can't read %s", self._file) <NEW_LINE> self._data = {} <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> self._data = self._schema(self._data) <NEW_LINE> <DEDENT> except vol.Invalid as ex: <NEW_LINE> <INDENT> _LOGGER.error("Can't parse %s: %s", self._file, humanize_error(self._data, ex)) <NEW_LINE> _LOGGER.warning("Reset %s to default", self._file) <NEW_LINE> self._data = self._schema({}) <NEW_LINE> <DEDENT> <DEDENT> def save_data(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._data = self._schema(self._data) <NEW_LINE> <DEDENT> except vol.Invalid as ex: <NEW_LINE> <INDENT> _LOGGER.error("Can't parse data: %s", humanize_error(self._data, ex)) <NEW_LINE> _LOGGER.warning("Reset %s to last version", self._file) <NEW_LINE> self.read_data() <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> write_json_file(self._file, self._data) <NEW_LINE> <DEDENT> except (OSError, json.JSONDecodeError) as err: <NEW_LINE> <INDENT> _LOGGER.error("Can't store config in %s: %s", self._file, err)
|
Hass core object for handle it.
|
6259902321bff66bcd723b49
|
class AbstractResponseInterceptor(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def process(self, handler_input, dispatch_output): <NEW_LINE> <INDENT> raise NotImplementedError
|
Interceptor that runs after the handler is called.
The ``process`` method has to be implemented, to run custom logic on
the input and the dispatch output generated after the handler is
executed on the input.
|
62599023d164cc6175821e5d
|
class SVR_function: <NEW_LINE> <INDENT> def __init__(self, bounds=None,sd=None): <NEW_LINE> <INDENT> self.input_dim = 3 <NEW_LINE> if bounds == None: <NEW_LINE> <INDENT> self.bounds = OrderedDict([('C',(0.1,1000)),('epsilon',(0.000001,1)),('gamma',(0.00001,5))]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bounds = bounds <NEW_LINE> <DEDENT> self.min = [(0.)*self.input_dim] <NEW_LINE> self.fmin = 0 <NEW_LINE> self.ismax=-1 <NEW_LINE> self.name='SVR_function' <NEW_LINE> <DEDENT> def get_data(self,mystr): <NEW_LINE> <INDENT> data = load_svmlight_file(mystr) <NEW_LINE> return data[0], data[1] <NEW_LINE> <DEDENT> def run_SVR(self,X,X_train,y_train,X_test,y_test): <NEW_LINE> <INDENT> x1=X[0] <NEW_LINE> x2=X[1] <NEW_LINE> x3=X[2] <NEW_LINE> if x3<0.000001: <NEW_LINE> <INDENT> x3=0.000001 <NEW_LINE> <DEDENT> if x2<0.000001: <NEW_LINE> <INDENT> x2=0.000001 <NEW_LINE> <DEDENT> svr_model = SVR(kernel='rbf', C=x1, epsilon=x2,gamma=x3) <NEW_LINE> y_pred = svr_model.fit(X_train, y_train).predict(X_test) <NEW_LINE> squared_error=y_pred-y_test <NEW_LINE> squared_error=np.mean(squared_error**2) <NEW_LINE> RMSE=np.sqrt(squared_error) <NEW_LINE> return RMSE <NEW_LINE> <DEDENT> def func(self,X): <NEW_LINE> <INDENT> X=np.asarray(X) <NEW_LINE> Xdata, ydata = self.get_data("F:\\Data\\regression\\abalone_scale") <NEW_LINE> nTrain=np.int(0.7*len(ydata)) <NEW_LINE> X_train, y_train = Xdata[:nTrain], ydata[:nTrain] <NEW_LINE> X_test, y_test = Xdata[nTrain+1:], ydata[nTrain+1:] <NEW_LINE> if len(X.shape)==1: <NEW_LINE> <INDENT> RMSE=self.run_SVR(X,X_train,y_train,X_test,y_test) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> RMSE=np.apply_along_axis( self.run_SVR,1,X,X_train,y_train,X_test,y_test) <NEW_LINE> <DEDENT> return RMSE*self.ismax
|
SVR_function: function
:param sd: standard deviation, to generate noisy evaluations of the function.
|
6259902330c21e258be996fd
|
class aeroo_add_print_button(models.TransientModel): <NEW_LINE> <INDENT> _name = 'aeroo.add_print_button' <NEW_LINE> _description = 'Add print button' <NEW_LINE> @api.model <NEW_LINE> def _check(self): <NEW_LINE> <INDENT> irval_mod = self.env.get('ir.values') <NEW_LINE> report = self.env.get(self._context['active_model']).browse(self._context['active_id']) <NEW_LINE> if report.report_name in special_reports: <NEW_LINE> <INDENT> return 'exception' <NEW_LINE> <DEDENT> if report.report_wizard: <NEW_LINE> <INDENT> act_win_obj = self.env.get('ir.actions.act_window') <NEW_LINE> act_win_ids = act_win_obj.search([('res_model','=','aeroo.print_actions')]) <NEW_LINE> for act_win in act_win_obj.browse(act_win_ids): <NEW_LINE> <INDENT> act_win_context = eval(act_win.context, {}) <NEW_LINE> if act_win_context.get('report_action_id')==report.id: <NEW_LINE> <INDENT> return 'exist' <NEW_LINE> <DEDENT> <DEDENT> return 'add' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ids = irval_mod.search([('value','=',report.type+','+str(report.id))]) <NEW_LINE> if not ids: <NEW_LINE> <INDENT> return 'add' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'exist' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def do_action(self): <NEW_LINE> <INDENT> irval_mod = self.env.get('ir.values') <NEW_LINE> this = self.browse(cr, uid, ids[0], context=context) <NEW_LINE> report = self.pool.get(context['active_model']).browse(cr, uid, context['active_id'], context=context) <NEW_LINE> event_id = irval_mod.set_action(cr, uid, report.report_name, 'client_print_multi', report.model, 'ir.actions.report,%d' % context['active_id']) <NEW_LINE> if report.report_wizard: <NEW_LINE> <INDENT> report._set_report_wizard(report.id) <NEW_LINE> <DEDENT> this.write({'state':'done'}) <NEW_LINE> if not this.open_action: <NEW_LINE> <INDENT> return _reopen(self, this.id, this._model) <NEW_LINE> <DEDENT> irmod_mod = self.pool.get('ir.model.data') <NEW_LINE> iract_mod = self.pool.get('ir.actions.act_window') <NEW_LINE> mod_id = irmod_mod.search(cr, uid, [('name', '=', 'act_values_form_action')])[0] <NEW_LINE> res_id = irmod_mod.read(cr, uid, mod_id, ['res_id'])['res_id'] <NEW_LINE> act_win = iract_mod.read(cr, uid, res_id, []) <NEW_LINE> act_win['domain'] = [('id','=',event_id)] <NEW_LINE> act_win['name'] = _('Client Events') <NEW_LINE> return act_win <NEW_LINE> <DEDENT> open_action = fields.Boolean(string='Open added action') <NEW_LINE> state = fields.Selection([ ('add','Add'), ('exist','Exist'), ('exception','Exception'), ('done','Done'), ], string='State', index=True, readonly=True, default=_check )
|
Add Print Button
|
62599023a8ecb03325872106
|
class LazySettings(LazyObject): <NEW_LINE> <INDENT> pass
|
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
|
62599023ac7a0e7691f733d2
|
class Permute(Layer): <NEW_LINE> <INDENT> def __init__(self, dims, **kwargs): <NEW_LINE> <INDENT> super(Permute, self).__init__(**kwargs) <NEW_LINE> self.dims = tuple(dims) <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_shape(self): <NEW_LINE> <INDENT> input_shape = list(self.input_shape) <NEW_LINE> output_shape = copy.copy(input_shape) <NEW_LINE> for i, dim in enumerate(self.dims): <NEW_LINE> <INDENT> target_dim = input_shape[dim] <NEW_LINE> output_shape[i+1] = target_dim <NEW_LINE> <DEDENT> return tuple(output_shape) <NEW_LINE> <DEDENT> def get_output(self, train=False): <NEW_LINE> <INDENT> X = self.get_input(train) <NEW_LINE> return K.permute_dimensions(X, (0,) + self.dims) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'name': self.__class__.__name__, 'dims': self.dims} <NEW_LINE> base_config = super(Permute, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items()))
|
Permute the dimensions of the input according to a given pattern.
Useful for e.g. connecting RNNs and convnets together.
# Input shape
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
# Output shape
Same as the input shape, but with the dimensions re-ordered according
to the specified pattern.
# Arguments
dims: Tuple of integers. Permutation pattern, does not include the
samples dimension. Indexing starts at 1.
For instance, `(2, 1)` permutes the first and second dimension
of the input.
|
625990236fece00bbaccc8a1
|
class TestSearchQuestionsViewTemplate(TestCaseWithFactory): <NEW_LINE> <INDENT> layer = DatabaseFunctionalLayer <NEW_LINE> def assertViewTemplate(self, context, file_name): <NEW_LINE> <INDENT> view = create_initialized_view(context, '+questions') <NEW_LINE> self.assertEqual( file_name, os.path.basename(view.template.filename)) <NEW_LINE> <DEDENT> def test_template_product_answers_usage_unknown(self): <NEW_LINE> <INDENT> product = self.factory.makeProduct() <NEW_LINE> self.assertViewTemplate(product, 'unknown-support.pt') <NEW_LINE> <DEDENT> def test_template_product_answers_usage_launchpad(self): <NEW_LINE> <INDENT> product = self.factory.makeProduct() <NEW_LINE> with person_logged_in(product.owner): <NEW_LINE> <INDENT> product.answers_usage = ServiceUsage.LAUNCHPAD <NEW_LINE> <DEDENT> self.assertViewTemplate(product, 'question-listing.pt') <NEW_LINE> <DEDENT> def test_template_projectgroup_answers_usage_unknown(self): <NEW_LINE> <INDENT> product = self.factory.makeProduct() <NEW_LINE> project_group = self.factory.makeProject(owner=product.owner) <NEW_LINE> with person_logged_in(product.owner): <NEW_LINE> <INDENT> product.project = project_group <NEW_LINE> <DEDENT> self.assertViewTemplate(project_group, 'unknown-support.pt') <NEW_LINE> <DEDENT> def test_template_projectgroup_answers_usage_launchpad(self): <NEW_LINE> <INDENT> product = self.factory.makeProduct() <NEW_LINE> project_group = self.factory.makeProject(owner=product.owner) <NEW_LINE> with person_logged_in(product.owner): <NEW_LINE> <INDENT> product.project = project_group <NEW_LINE> product.answers_usage = ServiceUsage.LAUNCHPAD <NEW_LINE> <DEDENT> self.assertViewTemplate(project_group, 'question-listing.pt') <NEW_LINE> <DEDENT> def test_template_distribution_answers_usage_unknown(self): <NEW_LINE> <INDENT> distribution = self.factory.makeDistribution() <NEW_LINE> self.assertViewTemplate(distribution, 'unknown-support.pt') <NEW_LINE> <DEDENT> def test_template_distribution_answers_usage_launchpad(self): <NEW_LINE> <INDENT> distribution = self.factory.makeDistribution() <NEW_LINE> with person_logged_in(distribution.owner): <NEW_LINE> <INDENT> distribution.answers_usage = ServiceUsage.LAUNCHPAD <NEW_LINE> <DEDENT> self.assertViewTemplate(distribution, 'question-listing.pt') <NEW_LINE> <DEDENT> def test_template_DSP_answers_usage_unknown(self): <NEW_LINE> <INDENT> dsp = self.factory.makeDistributionSourcePackage() <NEW_LINE> self.assertViewTemplate(dsp, 'unknown-support.pt') <NEW_LINE> <DEDENT> def test_template_DSP_answers_usage_launchpad(self): <NEW_LINE> <INDENT> dsp = self.factory.makeDistributionSourcePackage() <NEW_LINE> with person_logged_in(dsp.distribution.owner): <NEW_LINE> <INDENT> dsp.distribution.answers_usage = ServiceUsage.LAUNCHPAD <NEW_LINE> <DEDENT> self.assertViewTemplate(dsp, 'question-listing.pt') <NEW_LINE> <DEDENT> def test_template_question_set(self): <NEW_LINE> <INDENT> question_set = getUtility(IQuestionSet) <NEW_LINE> self.assertViewTemplate(question_set, 'question-listing.pt')
|
Test the behavior of SearchQuestionsView.template
|
6259902321a7993f00c66e65
|
class ListSeed(wx.Panel): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> wx.Panel.__init__(self, parent, style = (wx.CLIP_CHILDREN | wx.TAB_TRAVERSAL | wx.FULL_REPAINT_ON_RESIZE)) <NEW_LINE> self.list_ctrl = wx.ListCtrl(self, wx.NewId(), (0,0), (0,0), wx.LC_REPORT) <NEW_LINE> sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> sizer.Add(self.list_ctrl, 5, wx.EXPAND, 5) <NEW_LINE> self.SetSizer(sizer) <NEW_LINE> self.__buildColumn() <NEW_LINE> <DEDENT> def __buildColumn(self): <NEW_LINE> <INDENT> info = wx.ListItem() <NEW_LINE> info.m_mask = wx.LIST_MASK_TEXT | wx.LIST_MASK_FORMAT <NEW_LINE> info.m_format = 0 <NEW_LINE> info.m_text = "Seed Package" <NEW_LINE> self.list_ctrl.InsertColumnInfo(0, info) <NEW_LINE> self.list_ctrl.SetColumnWidth(0, 150) <NEW_LINE> <DEDENT> def from_list(self, list_pkg): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> for pkg in list_pkg: <NEW_LINE> <INDENT> self.list_ctrl.InsertStringItem(i, pkg) <NEW_LINE> i += 1
|
@author rpereira
Panel da lista de seed de pacotes
|
625990238c3a8732951f7440
|
class Atm(): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for arg, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, arg, value) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> clean_dict = self.__dict__.copy() <NEW_LINE> return '{0}({1})'.format(self.__class__, clean_dict) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_response(cls, client, response): <NEW_LINE> <INDENT> return cls( client = client, json=response, address_city=response['atmLocation'].get('address').get('city'), address_country=response['atmLocation'].get('address').get('country'), address_postal_code=response['atmLocation'].get('address').get('postalCode'), address_state=response['atmLocation'].get('address').get('state'), address_street=response['atmLocation'].get('address').get('street'), latitude=response['atmLocation'].get('coordinates').get('latitude'), longitude=response['atmLocation'].get('coordinates').get('longitude'), id=response['atmLocation']['id'], isAvailable24Hours=response['atmLocation']['isAvailable24Hours'], isDepositAvailable=response['atmLocation']['isDepositAvailable'], isHandicappedAccessible=response['atmLocation']['isHandicappedAccessible'], isOffPremise=response['atmLocation']['isOffPremise'], isSeasonal=response['atmLocation']['isSeasonal'], languageType=response['atmLocation']['languageType'], locationDescription=response['atmLocation']['locationDescription'], logoName=response['atmLocation']['logoName'], name=response['atmLocation']['name'], distance=response['distance'] ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def multiple_from_response(cls, client, response): <NEW_LINE> <INDENT> atms = [cls.from_response(client, atm_data) for atm_data in response] <NEW_LINE> return atms <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def locate(cls,client=None, **kwargs): <NEW_LINE> <INDENT> response = client.atms.locate(**kwargs) <NEW_LINE> return cls.multiple_from_response(client, response['atms'])
|
Represents an atm location record with methods for constructing atm location methods
instances.
|
62599023d164cc6175821e5f
|
class TensorProducts(TensorProductsCategory): <NEW_LINE> <INDENT> @cached_method <NEW_LINE> def extra_super_categories(self): <NEW_LINE> <INDENT> return [self.base_category()]
|
The category of modules constructed by tensor product of modules.
|
625990231d351010ab8f4a00
|
class MaxEnterError(Exception): <NEW_LINE> <INDENT> def __init__(self, ErrorInfo): <NEW_LINE> <INDENT> super().__init__(self) <NEW_LINE> self.errorinfo = ErrorInfo <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.errorinfo
|
输入关键字最大尝试次数
|
62599023c432627299fa3edc
|
@attr.s(frozen=True, slots=True) <NEW_LINE> class CompositionDescription: <NEW_LINE> <INDENT> component: str = attr.ib(validator=instance_of(str)) <NEW_LINE> molar_fraction = attrib_scalar(default=Scalar(0, "mol/mol")) <NEW_LINE> reference_enthalpy = attrib_scalar(default=Scalar(0, "J/mol"))
|
:ivar component:
Name of the component available created on:
PvtModelCompositionalDescription.light_components
PvtModelCompositionalDescription.heavy_components
.. note:: CompositionDescription can only refer to components created from the same PvtModelCompositionalDescription
.. include:: /alfacase_definitions/CompositionDescription.txt
.. include:: /alfacase_definitions/list_of_unit_for_mole_per_mole.txt
.. include:: /alfacase_definitions/list_of_unit_for_molar_thermodynamic_energy.txt
|
625990236fece00bbaccc8a3
|
class UnsetRouter(command.Command): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(UnsetRouter, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( '--route', metavar='destination=<subnet>,gateway=<ip-address>', action=parseractions.MultiKeyValueAction, dest='routes', default=None, required_keys=['destination', 'gateway'], help=_("Routes to be removed from the router " "destination: destination subnet (in CIDR notation) " "gateway: nexthop IP address " "(repeat option to unset multiple routes)")) <NEW_LINE> parser.add_argument( 'router', metavar="<router>", help=_("Router to modify (name or ID)") ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> client = self.app.client_manager.network <NEW_LINE> obj = client.find_router(parsed_args.router, ignore_missing=False) <NEW_LINE> tmp_routes = copy.deepcopy(obj.routes) <NEW_LINE> attrs = {} <NEW_LINE> if parsed_args.routes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for route in parsed_args.routes: <NEW_LINE> <INDENT> tmp_routes.remove(route) <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> msg = (_("Router does not contain route %s") % route) <NEW_LINE> raise exceptions.CommandError(msg) <NEW_LINE> <DEDENT> for route in tmp_routes: <NEW_LINE> <INDENT> route['nexthop'] = route.pop('gateway') <NEW_LINE> <DEDENT> attrs['routes'] = tmp_routes <NEW_LINE> <DEDENT> if attrs: <NEW_LINE> <INDENT> client.update_router(obj, **attrs)
|
Unset router properties
|
62599023d164cc6175821e61
|
class ObjectReplicationPolicy(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'policy_id': {'readonly': True}, 'enabled_time': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'policy_id': {'key': 'properties.policyId', 'type': 'str'}, 'enabled_time': {'key': 'properties.enabledTime', 'type': 'iso-8601'}, 'source_account': {'key': 'properties.sourceAccount', 'type': 'str'}, 'destination_account': {'key': 'properties.destinationAccount', 'type': 'str'}, 'rules': {'key': 'properties.rules', 'type': '[ObjectReplicationPolicyRule]'}, } <NEW_LINE> def __init__( self, *, source_account: Optional[str] = None, destination_account: Optional[str] = None, rules: Optional[List["ObjectReplicationPolicyRule"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ObjectReplicationPolicy, self).__init__(**kwargs) <NEW_LINE> self.policy_id = None <NEW_LINE> self.enabled_time = None <NEW_LINE> self.source_account = source_account <NEW_LINE> self.destination_account = destination_account <NEW_LINE> self.rules = rules
|
The replication policy between two storage accounts. Multiple rules can be defined in one policy.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:ivar policy_id: A unique id for object replication policy.
:vartype policy_id: str
:ivar enabled_time: Indicates when the policy is enabled on the source account.
:vartype enabled_time: ~datetime.datetime
:ivar source_account: Required. Source account name. It should be full resource id if
allowCrossTenantReplication set to false.
:vartype source_account: str
:ivar destination_account: Required. Destination account name. It should be full resource id if
allowCrossTenantReplication set to false.
:vartype destination_account: str
:ivar rules: The storage account object replication rules.
:vartype rules: list[~azure.mgmt.storage.v2021_06_01.models.ObjectReplicationPolicyRule]
|
62599023d18da76e235b78c3
|
class NewsTag(models.Model): <NEW_LINE> <INDENT> label = models.CharField(max_length=100) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.label
|
Тэг.
|
625990231d351010ab8f4a02
|
class print_purchase_report(osv.TransientModel): <NEW_LINE> <INDENT> _name = "print.purchase.report" <NEW_LINE> def __get_company_object(self, cr, uid): <NEW_LINE> <INDENT> user = self.pool.get('res.users').browse(cr, uid, uid) <NEW_LINE> if not user.company_id: <NEW_LINE> <INDENT> raise except_osv(_('ERROR !'), _( 'There is no company configured for this user')) <NEW_LINE> <DEDENT> return user.company_id <NEW_LINE> <DEDENT> def _get_company(self, cr, uid, context=None): <NEW_LINE> <INDENT> return self.__get_company_object(cr, uid).partner_id.name <NEW_LINE> <DEDENT> def _get_report(self, cr, uid, context=None): <NEW_LINE> <INDENT> purch_order = self.pool.get("purchase.order").browse( cr, uid, context['active_ids'][0]) <NEW_LINE> if purch_order.state == 'approved': <NEW_LINE> <INDENT> report = self.__get_company_object(cr, uid).purchase_report_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report = self.__get_company_object(cr, uid).purchase_request_id <NEW_LINE> <DEDENT> if not report: <NEW_LINE> <INDENT> rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'purchase.order'), ], order="id")[0] <NEW_LINE> report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) <NEW_LINE> <DEDENT> service = netsvc.LocalService('report.' + report.report_name) <NEW_LINE> (result, format) = service.create(cr, uid, context[ 'active_ids'], {'model': context['active_model']}, {}) <NEW_LINE> return base64.encodestring(result) <NEW_LINE> <DEDENT> def _get_report_name(self, cr, uid, context): <NEW_LINE> <INDENT> purch_order = self.pool.get("purchase.order").browse( cr, uid, context['active_ids'][0]) <NEW_LINE> if purch_order.state == 'approved': <NEW_LINE> <INDENT> report = self.__get_company_object(cr, uid).purchase_report_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report = self.__get_company_object(cr, uid).purchase_request_id <NEW_LINE> <DEDENT> if not report: <NEW_LINE> <INDENT> rep_id = self.pool.get("ir.actions.report.xml").search( cr, uid, [('model', '=', 'purchase.order'), ], order="id")[0] <NEW_LINE> report = self.pool.get( "ir.actions.report.xml").browse(cr, uid, rep_id) <NEW_LINE> <DEDENT> return report.report_name <NEW_LINE> <DEDENT> def print_report(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> return {'type': 'ir.actions.report.xml', 'report_name': self._get_report_name(cr, uid, context), 'datas': {'ids': context['active_ids']}} <NEW_LINE> <DEDENT> _columns = { 'company': fields.char('Company', 64, readonly=True, requied=True), 'report_format': fields.binary("Report", readonly=True, required=True) } <NEW_LINE> _defaults = { 'company': _get_company, 'report_format': _get_report, }
|
OpenERP Wizard : print.purchase.report
|
62599023ac7a0e7691f733d6
|
class Ball: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.radius = randint(20, 50) <NEW_LINE> self.x = randint(0 + self.radius, root_width - self.radius) <NEW_LINE> self.y = randint(0 + self.radius, root_height - self.radius) <NEW_LINE> self.dx, self.dy = 2, 2 <NEW_LINE> self.colors = ['yellow', 'green', 'grey', 'blue', 'black', 'red', 'magenta'] <NEW_LINE> self.ball_draw() <NEW_LINE> self.ball_move() <NEW_LINE> <DEDENT> def ball_draw(self): <NEW_LINE> <INDENT> self.ball = canvas.create_oval( self.x - self.radius, self.y - self.radius, self.x + self.radius, self.y + self.radius, fill = choice(self.colors), width = 0) <NEW_LINE> <DEDENT> def ball_move(self): <NEW_LINE> <INDENT> self.x += self.dx <NEW_LINE> self.y += self.dy <NEW_LINE> if self.x <= 0 + self.radius or self.x >= root_width - self.radius: <NEW_LINE> <INDENT> self.dx = - self.dx <NEW_LINE> <DEDENT> if self.y <= 0 + self.radius or self.y >= root_height - self.radius: <NEW_LINE> <INDENT> self.dy = - self.dy <NEW_LINE> <DEDENT> canvas.move(self.ball, self.dx, self.dy) <NEW_LINE> root.after(50, self.ball_move)
|
Initiate ball with movement
radius - random 20 - 50
x, y - coordinates of ball's centre (random in canvas)
dx, dy - move steps
colors - list of colors, selected randomly
|
62599023c432627299fa3ede
|
class Default(HttpLocust): <NEW_LINE> <INDENT> task_set = DefaultBehavior <NEW_LINE> min_wait = 3000 <NEW_LINE> max_wait = 10000
|
Default Locust Class
|
625990238c3a8732951f7444
|
class SimulationTime(object): <NEW_LINE> <INDENT> def __init__(self, simdata): <NEW_LINE> <INDENT> self.simdata = simdata <NEW_LINE> <DEDENT> def get_total_time(self): <NEW_LINE> <INDENT> period_data = self.simdata.mfdata[ ('tdis', 'perioddata', 'perioddata')].get_data() <NEW_LINE> total_time = 0.0 <NEW_LINE> for period in period_data: <NEW_LINE> <INDENT> total_time += period[0] <NEW_LINE> <DEDENT> return total_time <NEW_LINE> <DEDENT> def get_num_stress_periods(self): <NEW_LINE> <INDENT> return self.simdata.mfdata[('tdis', 'dimensions', 'nper')].get_data() <NEW_LINE> <DEDENT> def get_sp_time_steps(self, sp_num): <NEW_LINE> <INDENT> period_data = self.simdata.mfdata[ ('tdis', 'perioddata', 'perioddata')].get_data() <NEW_LINE> if len(period_data) <= sp_num: <NEW_LINE> <INDENT> raise FlopyException('Stress period {} was requested but does not ' 'exist.'.format(sp_num)) <NEW_LINE> <DEDENT> return period_data[sp_num][1]
|
Represents a block in a MF6 input file
Parameters
----------
Attributes
----------
Methods
-------
See Also
--------
Notes
-----
Examples
--------
|
62599023d164cc6175821e64
|
class MainPage(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> fc = self.application.fig_container <NEW_LINE> figure = self.application.fig_container['fig1'] <NEW_LINE> fc.get_manager('fig2').set_window_title("HEY!!") <NEW_LINE> fc.flush('fig2') <NEW_LINE> loop = tornado.ioloop.IOLoop.instance() <NEW_LINE> host = self.request.host <NEW_LINE> ws_uri = f"ws://{host}" <NEW_LINE> content = html_content % { "ws_uri": ws_uri, "fig_id": id(figure)} <NEW_LINE> self.render("connect.html", ws_uri=ws_uri, fc=fc, host=host, rproxy="apps/tektronix")
|
Serves the main HTML page.
|
62599023c432627299fa3ee0
|
class Location(ndb.Model): <NEW_LINE> <INDENT> country = ndb.StringProperty(indexed=True) <NEW_LINE> countryCode = ndb.StringProperty(indexed=True) <NEW_LINE> name = ndb.StringProperty(indexed=True) <NEW_LINE> parentid = ndb.IntegerProperty(indexed=True) <NEW_LINE> placeTypeCode = ndb.IntegerProperty(indexed=True) <NEW_LINE> placeTypeName = ndb.StringProperty(indexed=True) <NEW_LINE> url = ndb.StringProperty(indexed=True) <NEW_LINE> woeid = ndb.IntegerProperty(indexed=True) <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> dict = super(Location, self).to_dict() <NEW_LINE> dict['placeType'] = { 'code': self.placeTypeCode, 'name': self.placeTypeName } <NEW_LINE> return dict
|
A model for representing locations
|
6259902321a7993f00c66e6b
|
class ForestRegressor(six.with_metaclass(ABCMeta, BaseForest, RegressorMixin)): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def __init__(self, base_estimator, n_estimators=10, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False): <NEW_LINE> <INDENT> super(ForestRegressor, self).__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start) <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> check_is_fitted(self, 'estimators_') <NEW_LINE> X = self._validate_X_predict(X) <NEW_LINE> n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) <NEW_LINE> if self.n_outputs_ > 1: <NEW_LINE> <INDENT> y_hat = np.zeros((X.shape[0], self.n_outputs_), dtype=np.float64) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> y_hat = np.zeros((X.shape[0]), dtype=np.float64) <NEW_LINE> <DEDENT> Parallel(n_jobs=n_jobs, verbose=self.verbose, backend="threading")( delayed(accumulate_prediction)(e.predict, X, [y_hat]) for e in self.estimators_) <NEW_LINE> y_hat /= len(self.estimators_) <NEW_LINE> return y_hat <NEW_LINE> <DEDENT> def _set_oob_score(self, X, y): <NEW_LINE> <INDENT> X = check_array(X, dtype=DTYPE, accept_sparse='csr') <NEW_LINE> n_samples = y.shape[0] <NEW_LINE> predictions = np.zeros((n_samples, self.n_outputs_)) <NEW_LINE> n_predictions = np.zeros((n_samples, self.n_outputs_)) <NEW_LINE> for estimator in self.estimators_: <NEW_LINE> <INDENT> unsampled_indices = _generate_unsampled_indices( estimator.random_state, n_samples) <NEW_LINE> p_estimator = estimator.predict( X[unsampled_indices, :], check_input=False) <NEW_LINE> if self.n_outputs_ == 1: <NEW_LINE> <INDENT> p_estimator = p_estimator[:, np.newaxis] <NEW_LINE> <DEDENT> predictions[unsampled_indices, :] += p_estimator <NEW_LINE> n_predictions[unsampled_indices, :] += 1 <NEW_LINE> <DEDENT> if (n_predictions == 0).any(): <NEW_LINE> <INDENT> warn("Some inputs do not have OOB scores. " "This probably means too few trees were used " "to compute any reliable oob estimates.") <NEW_LINE> n_predictions[n_predictions == 0] = 1 <NEW_LINE> <DEDENT> predictions /= n_predictions <NEW_LINE> self.oob_prediction_ = predictions <NEW_LINE> if self.n_outputs_ == 1: <NEW_LINE> <INDENT> self.oob_prediction_ = self.oob_prediction_.reshape((n_samples, )) <NEW_LINE> <DEDENT> self.oob_score_ = 0.0 <NEW_LINE> for k in range(self.n_outputs_): <NEW_LINE> <INDENT> self.oob_score_ += r2_score(y[:, k], predictions[:, k]) <NEW_LINE> <DEDENT> self.oob_score_ /= self.n_outputs_
|
Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
|
62599023d164cc6175821e66
|
class DataSchemeWennerAlpha(DataSchemeBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> DataSchemeBase.__init__(self) <NEW_LINE> self.name = "Wenner Alpha (C-P-P-C)" <NEW_LINE> self.prefix = "wa" <NEW_LINE> self.type = Pseudotype.WennerAlpha <NEW_LINE> <DEDENT> def createData(self, **kwargs): <NEW_LINE> <INDENT> nElectrodes = self.nElectrodes_ <NEW_LINE> maxSep = nElectrodes - 2 <NEW_LINE> if self.maxSeparation < maxSep: <NEW_LINE> <INDENT> maxSep = self.maxSeparation <NEW_LINE> <DEDENT> self.data_.resize(nElectrodes * nElectrodes) <NEW_LINE> count = 0 <NEW_LINE> for sep in range(1, maxSep + 1): <NEW_LINE> <INDENT> for i in range((nElectrodes - 2) - sep): <NEW_LINE> <INDENT> a = i <NEW_LINE> m = a + sep <NEW_LINE> n = m + sep <NEW_LINE> b = n + sep <NEW_LINE> count = self.createDatum_(a, b, m, n, count) <NEW_LINE> <DEDENT> <DEDENT> self.data_.removeInvalid() <NEW_LINE> return self.data_
|
Wenner alpha (C--P--P--C) data scheme with equal distances.
|
62599023a8ecb0332587210e
|
class DataSourceDetails(Model): <NEW_LINE> <INDENT> _attribute_map = { 'data_source_name': {'key': 'dataSourceName', 'type': 'str'}, 'data_source_url': {'key': 'dataSourceUrl', 'type': 'str'}, 'headers': {'key': 'headers', 'type': '[AuthorizationHeader]'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, 'result_selector': {'key': 'resultSelector', 'type': 'str'} } <NEW_LINE> def __init__(self, data_source_name=None, data_source_url=None, headers=None, parameters=None, resource_url=None, result_selector=None): <NEW_LINE> <INDENT> super(DataSourceDetails, self).__init__() <NEW_LINE> self.data_source_name = data_source_name <NEW_LINE> self.data_source_url = data_source_url <NEW_LINE> self.headers = headers <NEW_LINE> self.parameters = parameters <NEW_LINE> self.resource_url = resource_url <NEW_LINE> self.result_selector = result_selector
|
DataSourceDetails.
:param data_source_name: Gets or sets the data source name.
:type data_source_name: str
:param data_source_url: Gets or sets the data source url.
:type data_source_url: str
:param headers: Gets or sets the request headers.
:type headers: list of :class:`AuthorizationHeader <service-endpoint.v4_1.models.AuthorizationHeader>`
:param parameters: Gets the parameters of data source.
:type parameters: dict
:param resource_url: Gets or sets the resource url of data source.
:type resource_url: str
:param result_selector: Gets or sets the result selector.
:type result_selector: str
|
625990231d351010ab8f4a06
|
class DomainModel(TimestampMixin, Model): <NEW_LINE> <INDENT> __abstract__ = True
|
Abstract base model class for defining domain models providing a UUID
backed primary key field and timestamp fields.
|
625990233eb6a72ae038b554
|
class AlphaExample(): <NEW_LINE> <INDENT> def __init__(self, separator=': ', **kwargs): <NEW_LINE> <INDENT> self.target = kwargs <NEW_LINE> self.data = np.array(kwargs['data']) <NEW_LINE> self.difference = krippendorff.Difference(*kwargs['args']) <NEW_LINE> self.separator = separator <NEW_LINE> self.labels = ( 'Data', 'Data type', 'Difference method', 'Observed agreement', 'Expected agreement', 'Alpha score' ) <NEW_LINE> self.values = set(v for v in self.data.flatten() if v == v) <NEW_LINE> self.codebook = {v : i for (i, v) in enumerate(self.values)} <NEW_LINE> self.inverse_codebook = dict(enumerate(self.values)) <NEW_LINE> self.cm = krippendorff.get_coincidence_matrix(self.data, self.codebook) <NEW_LINE> self.d = krippendorff.delta( self.cm, self.inverse_codebook, self.difference ) <NEW_LINE> self.observed = krippendorff.observation(self.cm, self.d) <NEW_LINE> self.expected = krippendorff.expectation(self.cm, self.d) <NEW_LINE> self.alpha = krippendorff.alpha(self.data, self.difference) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> width = max(map(len, self.labels)) <NEW_LINE> indent = ' ' * (width + len(self.separator)) <NEW_LINE> matrix = ('\n' + indent).join(self.prettify(self.data).split('\n')) <NEW_LINE> line = '{{:>{w}}}{sep}{{}}'.format(w=width, sep=self.separator) <NEW_LINE> values = ( matrix, self.difference.dtype.__name__, self.difference.method, self.observed, self.expected, self.alpha ) <NEW_LINE> records = zip(self.labels, values) <NEW_LINE> lines = [line.format(*record) for record in records] <NEW_LINE> return '\n'.join(lines) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def prettify(data, rowsep=(',\n ', '\n'), colsep=(', ', ' ')): <NEW_LINE> <INDENT> cluttered = repr(data) <NEW_LINE> clean = cluttered.strip('array()')[1:-1] <NEW_LINE> pretty = clean.replace(*rowsep).replace(*colsep) <NEW_LINE> return pretty
|
A class to facilitate printing alpha examples.
|
62599023a4f1c619b294f4e4
|
class EndStopCheck(TargetCheck): <NEW_LINE> <INDENT> check_id = "end_stop" <NEW_LINE> name = _("Trailing stop") <NEW_LINE> description = _("Source and translation do not both end with a full stop") <NEW_LINE> severity = "warning" <NEW_LINE> def check_single(self, source, target, unit): <NEW_LINE> <INDENT> if len(source) <= 4: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not target: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.is_language(unit, ("th", "jbo")): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if source.endswith("...") and target[-1] == "…": <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.is_language(unit, ("ja",)) and source[-1] in (":", ";"): <NEW_LINE> <INDENT> return self.check_chars(source, target, -1, (";", ":", ":", ".", "。")) <NEW_LINE> <DEDENT> if self.is_language(unit, ("hy",)): <NEW_LINE> <INDENT> return self.check_chars( source, target, -1, (".", "。", "।", "۔", "։", "·", "෴", "។", ":", "՝", "?", "!", "`"), ) <NEW_LINE> <DEDENT> if self.is_language(unit, ("hi", "bn", "or")): <NEW_LINE> <INDENT> return self.check_chars(source, target, -1, (".", "।", "|")) <NEW_LINE> <DEDENT> return self.check_chars( source, target, -1, (".", "。", "।", "۔", "։", "·", "෴", "។") )
|
Check for final stop.
|
625990236fece00bbaccc8a9
|
class RiskModel(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def __call__(self, dt, weights): <NEW_LINE> <INDENT> raise NotImplementedError( "Should implement __call__()" )
|
Abstract interface for an RiskModel callable.
A derived-class instance of RiskModel takes in an Asset
Universe and an optional DataHandler instance in order
to modify weights on Assets generated by an AlphaModel.
These adjusted weights are used within the PortfolioConstructionModel
to generate new target weights for the portfolio.
Implementing __call__ produces a dictionary keyed by
Asset and with a scalar value as the signal.
|
62599023507cdc57c63a5c96
|
class OhUnknownException(Exception): <NEW_LINE> <INDENT> pass
|
This exception should be raised when some improper condition has been
reached and no further information is available
|
62599023be8e80087fbbff68
|
class VectorVelocityDataParticleKey(BaseEnum): <NEW_LINE> <INDENT> ANALOG_INPUT2 = "analog_input_2" <NEW_LINE> COUNT = "ensemble_counter" <NEW_LINE> PRESSURE = "seawater_pressure" <NEW_LINE> ANALOG_INPUT1 = "analog_input_1" <NEW_LINE> VELOCITY_BEAM1 = "turbulent_velocity_east" <NEW_LINE> VELOCITY_BEAM2 = "turbulent_velocity_north" <NEW_LINE> VELOCITY_BEAM3 = "turbulent_velocity_vertical" <NEW_LINE> AMPLITUDE_BEAM1 = "amplitude_beam_1" <NEW_LINE> AMPLITUDE_BEAM2 = "amplitude_beam_2" <NEW_LINE> AMPLITUDE_BEAM3 = "amplitude_beam_3" <NEW_LINE> CORRELATION_BEAM1 = "correlation_beam_1" <NEW_LINE> CORRELATION_BEAM2 = "correlation_beam_2" <NEW_LINE> CORRELATION_BEAM3 = "correlation_beam_3"
|
Velocity Data Paticles
|
6259902366673b3332c312de
|
class Deallocate(Statement): <NEW_LINE> <INDENT> match = re.compile(r'deallocate\s*\(.*\)\Z',re.I).match <NEW_LINE> def process_item(self): <NEW_LINE> <INDENT> line = self.item.get_line()[10:].lstrip()[1:-1].strip() <NEW_LINE> self.items = specs_split_comma(line, self.item) <NEW_LINE> return <NEW_LINE> <DEDENT> def tofortran(self, isfix=None): return self.get_indent_tab(isfix=isfix) + 'DEALLOCATE (%s)' % (', '.join(self.items)) <NEW_LINE> def analyze(self): return
|
DEALLOCATE ( <allocate-object-list> [ , <dealloc-opt-list> ] )
<allocate-object> = <variable-name>
| <structure-component>
<structure-component> = <data-ref>
<dealloc-opt> = STAT = <stat-variable>
| ERRMSG = <errmsg-variable>
|
625990235e10d32532ce407d
|
class Test17(unittest.TestCase): <NEW_LINE> <INDENT> def test_cbc_padding_oracle_attack(self) -> None: <NEW_LINE> <INDENT> c = m17.cbc_oracle() <NEW_LINE> m = m17.attack(c) <NEW_LINE> self.assertTrue(m)
|
The CBC padding oracle
|
625990231d351010ab8f4a08
|
class SetPasswordForm(auth_forms.SetPasswordForm): <NEW_LINE> <INDENT> new_password1 = forms.CharField( label=_('New password'), min_length=6, widget=forms.PasswordInput(attrs={'class': 'text'}), error_messages={'required': _('Password required'), 'min_length': _('Password length should be at lease 6 ' 'symbols')}) <NEW_LINE> new_password2 = forms.CharField( label=_('Password repeat:'), min_length=6, widget=forms.PasswordInput(attrs={'class': 'text'}), error_messages={'required': _('Password repeat required'), 'min_length': _('Password length should be at lease 6 ' 'symbols')}) <NEW_LINE> class Media: <NEW_LINE> <INDENT> css = {'all': ('css/forms/set_password_form.css',)} <NEW_LINE> js = ('js/forms/set_password_form.js',) <NEW_LINE> <DEDENT> def clean_new_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get('new_password1') <NEW_LINE> password2 = self.cleaned_data.get('new_password2') <NEW_LINE> if password1 and password2: <NEW_LINE> <INDENT> if password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError(_('Passwords are not equal')) <NEW_LINE> <DEDENT> <DEDENT> return password2
|
User set password form.
|
62599023ac7a0e7691f733dc
|
class AudioMetaData(Mapping): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self._path = path <NEW_LINE> self._name = path.stem <NEW_LINE> self._extension = path.suffix <NEW_LINE> self._collection = {} <NEW_LINE> try: <NEW_LINE> <INDENT> self._tag = File(path).tags <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise ValueError(f'Can\'t open "{path}". Please check whether it is an appropriate audio file.') <NEW_LINE> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> return self._collection.get(key) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return self._collection.items() <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._collection.keys() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return self._collection.values() <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self._collection[item] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for key, value in sorted(self._collection.items(), key=itemgetter(0)): <NEW_LINE> <INDENT> yield key, value <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._collection) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if not self: <NEW_LINE> <INDENT> return "%s()" % (self.__class__.__name__,) <NEW_LINE> <DEDENT> return "%s(%r)" % (self.__class__.__name__, list(self)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fromdirectory(cls, path, *, excluded=None): <NEW_LINE> <INDENT> for file in Files(path, excluded=excluded): <NEW_LINE> <INDENT> yield list(cls(file)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def extension(self): <NEW_LINE> <INDENT> return self._extension <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> return self._path
|
Undocumented.
|
625990238c3a8732951f744a
|
class TestExpectations(object): <NEW_LINE> <INDENT> def __init__(self, url=DEFAULT_TEST_EXPECTATIONS_LOCATION): <NEW_LINE> <INDENT> self.all_test_expectation_info = {} <NEW_LINE> resp = urllib2.urlopen(url) <NEW_LINE> if resp.code != 200: <NEW_LINE> <INDENT> raise NameError('Test expectation file does not exist in %s' % url) <NEW_LINE> <DEDENT> for line in resp.read().split('\n'): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if line.startswith('#'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> testname, te_info = self.ParseLine(line) <NEW_LINE> if not testname or not te_info: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if testname in self.all_test_expectation_info: <NEW_LINE> <INDENT> for k in te_info.keys(): <NEW_LINE> <INDENT> if (isinstance(te_info[k], list) and k in self.all_test_expectation_info[testname]): <NEW_LINE> <INDENT> self.all_test_expectation_info[testname][0][k] += te_info[k] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.all_test_expectation_info[testname][0][k] = te_info[k] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.all_test_expectation_info[testname] = [te_info] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def ParseLine(line): <NEW_LINE> <INDENT> test_expectation_info = {} <NEW_LINE> parsed = TestExpectationParser._tokenize_line('TestExpectations', line, 0) <NEW_LINE> if parsed.is_invalid(): <NEW_LINE> <INDENT> return None, None <NEW_LINE> <DEDENT> test_expectation_info['Comments'] = parsed.comment or '' <NEW_LINE> remaining_modifiers = list(parsed.modifiers) <NEW_LINE> test_expectation_info['Bugs'] = [] <NEW_LINE> for m in parsed.modifiers: <NEW_LINE> <INDENT> if m.startswith('BUG'): <NEW_LINE> <INDENT> test_expectation_info['Bugs'].append(m) <NEW_LINE> remaining_modifiers.remove(m) <NEW_LINE> <DEDENT> elif m in KNOWN_TE_KEYWORDS: <NEW_LINE> <INDENT> test_expectation_info[m] = True <NEW_LINE> remaining_modifiers.remove(m) <NEW_LINE> <DEDENT> <DEDENT> test_expectation_info['Platforms'] = list(remaining_modifiers) <NEW_LINE> for m in parsed.expectations + remaining_modifiers: <NEW_LINE> <INDENT> test_expectation_info[m] = True <NEW_LINE> <DEDENT> return parsed.name, test_expectation_info
|
A class to model the content of test expectation file for analysis.
This class retrieves the TestExpectations file via HTTP from WebKit and uses
the WebKit layout test processor to process each line.
The resulting dictionary is stored in |all_test_expectation_info| and looks
like:
{'<test name>': [{'<modifier0>': True, '<modifier1>': True, ...,
'Platforms: ['<platform0>', ... ], 'Bugs': ['....']}]}
Duplicate keys are merged (though technically they shouldn't exist).
Example:
crbug.com/145590 [ Android ] platform/chromium/media/video-frame-size-change.html [ Timeout ]
webkit.org/b/84724 [ SnowLeopard ] platform/chromium/media/video-frame-size-change.html [ ImageOnlyFailure Pass ]
{'platform/chromium/media/video-frame-size-change.html': [{'IMAGE': True,
'Bugs': ['BUGWK84724', 'BUGCR145590'], 'Comments': '',
'Platforms': ['SNOWLEOPARD', 'ANDROID'], 'TIMEOUT': True, 'PASS': True}]}
|
625990238c3a8732951f744b
|
class state(GenericGetSetCommandClass): <NEW_LINE> <INDENT> cmd = 'Input:Pdiode:Filter:Lpass:STATe' <NEW_LINE> full_acces = 'input.pdiode.filter.lpass.state' <NEW_LINE> value = Argument(0, ["OFF", "0", "ON", "1"])
|
Sets the bandwidth of the photodiode input stage
|
62599023d18da76e235b78c7
|
class ConsumptionDataset(UrbansimDataset): <NEW_LINE> <INDENT> id_name_default = ["grid_id","billyear","billmonth"] <NEW_LINE> in_table_name_default = "WCSR_grid" <NEW_LINE> out_table_name_default = "WCSR_grid" <NEW_LINE> entity_name_default = "consumption"
|
Set of consumption data.
|
62599023d164cc6175821e6c
|
class Decoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_ch_enc, num_out_ch, oct_map_size): <NEW_LINE> <INDENT> super(Decoder, self).__init__() <NEW_LINE> self.num_output_channels = num_out_ch <NEW_LINE> self.num_ch_enc = num_ch_enc <NEW_LINE> self.num_ch_dec = np.array([16, 32, 64]) <NEW_LINE> self.oct_map_size = oct_map_size <NEW_LINE> self.pool = nn.MaxPool2d(2) <NEW_LINE> self.convs = OrderedDict() <NEW_LINE> for i in range(2, -1, -1): <NEW_LINE> <INDENT> num_ch_in = 64 if i == 2 else self.num_ch_dec[i + 1] <NEW_LINE> num_ch_out = self.num_ch_dec[i] <NEW_LINE> self.convs[("upconv", i, 0)] = nn.Conv2d( num_ch_in, num_ch_out, 3, 1, 1) <NEW_LINE> self.convs[("norm", i, 0)] = nn.BatchNorm2d(num_ch_out) <NEW_LINE> self.convs[("relu", i, 0)] = nn.ReLU(True) <NEW_LINE> self.convs[("upconv", i, 1)] = nn.Conv2d( num_ch_out, num_ch_out, 3, 1, 1) <NEW_LINE> self.convs[("norm", i, 1)] = nn.BatchNorm2d(num_ch_out) <NEW_LINE> <DEDENT> self.convs["topview"] = Conv3x3( self.num_ch_dec[0], self.num_output_channels) <NEW_LINE> self.dropout = nn.Dropout3d(0.2) <NEW_LINE> self.decoder = nn.ModuleList(list(self.convs.values())) <NEW_LINE> <DEDENT> def forward(self, x, is_training=True): <NEW_LINE> <INDENT> for i in range(2, -1, -1): <NEW_LINE> <INDENT> x = self.convs[("upconv", i, 0)](x) <NEW_LINE> x = self.convs[("norm", i, 0)](x) <NEW_LINE> x = self.convs[("relu", i, 0)](x) <NEW_LINE> x = upsample(x) <NEW_LINE> x = self.convs[("upconv", i, 1)](x) <NEW_LINE> x = self.convs[("norm", i, 1)](x) <NEW_LINE> <DEDENT> x = self.pool(x) <NEW_LINE> if is_training: <NEW_LINE> <INDENT> x = self.convs["topview"](x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> softmax = nn.Softmax2d() <NEW_LINE> x = softmax(self.convs["topview"](x)) <NEW_LINE> <DEDENT> return x
|
Encodes the Image into low-dimensional feature representation
Attributes
----------
num_ch_enc : list
channels used by the ResNet Encoder at different layers
Methods
-------
forward(x, ):
Processes input image features into output occupancy maps/layouts
|
625990238c3a8732951f744d
|
class TestMonthlyTelemetrySchedule(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMonthlyTelemetrySchedule(self): <NEW_LINE> <INDENT> pass
|
MonthlyTelemetrySchedule unit test stubs
|
625990233eb6a72ae038b55a
|
class CoberturaPackage(object): <NEW_LINE> <INDENT> def __init__(self, name: str) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.classes = {} <NEW_LINE> self.packages = {} <NEW_LINE> self.total_lines = 0 <NEW_LINE> self.covered_lines = 0 <NEW_LINE> <DEDENT> def as_xml(self) -> Any: <NEW_LINE> <INDENT> package_element = etree.Element('package', name=self.name, complexity='1.0') <NEW_LINE> package_element.attrib['branch-rate'] = '0' <NEW_LINE> package_element.attrib['line-rate'] = get_line_rate(self.covered_lines, self.total_lines) <NEW_LINE> classes_element = etree.SubElement(package_element, 'classes') <NEW_LINE> for class_name in sorted(self.classes): <NEW_LINE> <INDENT> classes_element.append(self.classes[class_name]) <NEW_LINE> <DEDENT> self.add_packages(package_element) <NEW_LINE> return package_element <NEW_LINE> <DEDENT> def add_packages(self, parent_element: Any) -> None: <NEW_LINE> <INDENT> if self.packages: <NEW_LINE> <INDENT> packages_element = etree.SubElement(parent_element, 'packages') <NEW_LINE> for package in sorted(self.packages.values(), key=attrgetter('name')): <NEW_LINE> <INDENT> packages_element.append(package.as_xml())
|
Container for XML and statistics mapping python modules to Cobertura package
|
625990236e29344779b01547
|
class pyCisCommandError(Exception): <NEW_LINE> <INDENT> pass
|
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
|
62599023be8e80087fbbff6e
|
class Diagnose: <NEW_LINE> <INDENT> def __init__(self, demo): <NEW_LINE> <INDENT> self.demo = demo <NEW_LINE> <DEDENT> def cover(self, decision, base): <NEW_LINE> <INDENT> if decision[0]: <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print('Generating set of possible hypotheses...', end='') <NEW_LINE> <DEDENT> if decision[1] == 'upper': <NEW_LINE> <INDENT> hypotheses = base.too_high() <NEW_LINE> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return hypotheses <NEW_LINE> <DEDENT> elif decision[1] == 'lower': <NEW_LINE> <INDENT> hypotheses = base.too_low() <NEW_LINE> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return hypotheses <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Could not generate hypotheses set. Unknown decision category.') <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> elif not decision[0]: <NEW_LINE> <INDENT> print('Error. In Diagnose step while no intervention needed.') <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> def select(self, hypotheses): <NEW_LINE> <INDENT> return hypotheses.pop(), hypotheses <NEW_LINE> <DEDENT> def specify(self, hypothesis, base): <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print(f'Specifying the observables for hypothesis {hypothesis}...', end='') <NEW_LINE> <DEDENT> observable = base.causal_model(hypothesis) <NEW_LINE> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return observable <NEW_LINE> <DEDENT> def obtain(self, observable, nutrition, exercise, insulin): <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print(f'Obtaining observable {observable}...', end='') <NEW_LINE> <DEDENT> if observable == 'nutrition': <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return nutrition <NEW_LINE> <DEDENT> elif observable == 'insulin': <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return insulin <NEW_LINE> <DEDENT> elif observable == 'exercise': <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return exercise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Error obtaining observables') <NEW_LINE> <DEDENT> <DEDENT> def verify(self, finding, hypothesis, base): <NEW_LINE> <INDENT> if not self.demo: <NEW_LINE> <INDENT> print(f'Verifying hypothesis {hypothesis}...', end='') <NEW_LINE> <DEDENT> result = base.check_hypothesis(hypothesis, finding) <NEW_LINE> if not self.demo: <NEW_LINE> <INDENT> print(' Done') <NEW_LINE> <DEDENT> return result
|
inference layer class responsible for diagnosing why behavior will be abnormal in the future
|
625990231d351010ab8f4a0d
|
class LicenseLibpng(LicenseOpen): <NEW_LINE> <INDENT> HIDDEN = False <NEW_LINE> DESCRIPTION = ("Permission is granted to use, copy, modify, and distribute the " "source code, or portions hereof, for any purpose, without fee, subject " "to 3 restrictions; http://libpng.org/pub/png/src/libpng-LICENSE.txt for full license")
|
The PNG license is derived from the zlib license,
http://libpng.org/pub/png/src/libpng-LICENSE.txt
|
625990231d351010ab8f4a0e
|
class UnableToAddCommentToWorkflow(WorkflowException): <NEW_LINE> <INDENT> pass
|
To be raised if the WorkflowActivity is unable to log a comment in the
WorkflowHistory
|
62599023c432627299fa3eea
|
class GatewayRouteListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[GatewayRoute]'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["GatewayRoute"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(GatewayRouteListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value
|
List of virtual network gateway routes.
:param value: List of gateway routes.
:type value: list[~azure.mgmt.network.v2020_07_01.models.GatewayRoute]
|
62599023287bf620b6272ae6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.