code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class BadOptionException(RuntimeError): <NEW_LINE> <INDENT> pass
This is raised if the section blueprint is improperly configured
625990283eb6a72ae038b5e4
class MySQLError(Exception): <NEW_LINE> <INDENT> __module__ = "MySQLdb"
Exception related to operation with MySQL.
62599028711fe17d825e1459
class DataTablesConfig(object): <NEW_LINE> <INDENT> classes = ['table', 'cell-border', 'nowrap'] <NEW_LINE> extensions = DataTablesExtensions <NEW_LINE> options = DataTablesOptions <NEW_LINE> limit = 1000 <NEW_LINE> sample_size = None <NEW_LINE> sort = False <NEW_LINE> warnings = True
Default configuration for Jupyter DataTables.
6259902826238365f5fadad1
class HTTPXDownloader(IDownloader): <NEW_LINE> <INDENT> async def download(self, url: str) -> str: <NEW_LINE> <INDENT> async with httpx.AsyncClient() as client: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = await client.get(url) <NEW_LINE> <DEDENT> except httpx.ConnectError as e: <NEW_LINE> <INDENT> raise DownloaderError(str(e)) from e <NEW_LINE> <DEDENT> if response.status_code != 200: <NEW_LINE> <INDENT> raise DownloaderError( f"The Linguee server returned {response.status_code}" ) <NEW_LINE> <DEDENT> return response.text
Real downloader. Sends request to linguee.com to read the page.
625990281d351010ab8f4a96
class RandomWalk(DiscoveryStrategy): <NEW_LINE> <INDENT> def __init__(self, overlay, timeout=3.0, window_size=5, reset_chance=50, target_interval=0): <NEW_LINE> <INDENT> super(RandomWalk, self).__init__(overlay) <NEW_LINE> self.intro_timeouts = {} <NEW_LINE> self.node_timeout = timeout <NEW_LINE> self.window_size = window_size <NEW_LINE> self.reset_chance = reset_chance <NEW_LINE> self.target_interval = target_interval <NEW_LINE> self.last_step = 0 <NEW_LINE> <DEDENT> def take_step(self): <NEW_LINE> <INDENT> with self.walk_lock: <NEW_LINE> <INDENT> to_remove = [] <NEW_LINE> for node in self.intro_timeouts: <NEW_LINE> <INDENT> if self.intro_timeouts[node] + self.node_timeout < time(): <NEW_LINE> <INDENT> to_remove.append(node) <NEW_LINE> <DEDENT> <DEDENT> for node in to_remove: <NEW_LINE> <INDENT> del self.intro_timeouts[node] <NEW_LINE> if not self.overlay.network.get_verified_by_address(node): <NEW_LINE> <INDENT> self.overlay.network.remove_by_address(node) <NEW_LINE> <DEDENT> <DEDENT> if self.target_interval > 0 and self.last_step + self.target_interval >= time(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.window_size and self.window_size > 0 and len(self.intro_timeouts) >= self.window_size: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> known = self.overlay.get_walkable_addresses() <NEW_LINE> available = list(set(known) - set(self.intro_timeouts.keys())) <NEW_LINE> if available and randint(0, 255) >= self.reset_chance: <NEW_LINE> <INDENT> peer = choice(available) <NEW_LINE> self.overlay.walk_to(peer) <NEW_LINE> self.intro_timeouts[peer] = time() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.overlay.get_new_introduction() <NEW_LINE> <DEDENT> self.last_step = time()
Walk randomly through the network.
62599028a4f1c619b294f574
class ControlPoint(RectangleShape): <NEW_LINE> <INDENT> def __init__(self, theCanvas, object, size, the_xoffset, the_yoffset, the_type): <NEW_LINE> <INDENT> RectangleShape.__init__(self, size, size) <NEW_LINE> self._canvas = theCanvas <NEW_LINE> self._shape = object <NEW_LINE> self._xoffset = the_xoffset <NEW_LINE> self._yoffset = the_yoffset <NEW_LINE> self._type = the_type <NEW_LINE> self.SetPen(BlackForegroundPen) <NEW_LINE> self.SetBrush(wx.BLACK_BRUSH) <NEW_LINE> self._oldCursor = None <NEW_LINE> self._visible = True <NEW_LINE> self._eraseObject = True <NEW_LINE> <DEDENT> def OnDrawContents(self, dc): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def OnDraw(self, dc): <NEW_LINE> <INDENT> self._xpos = self._shape.GetX() + self._xoffset <NEW_LINE> self._ypos = self._shape.GetY() + self._yoffset <NEW_LINE> RectangleShape.OnDraw(self, dc) <NEW_LINE> <DEDENT> def OnErase(self, dc): <NEW_LINE> <INDENT> RectangleShape.OnErase(self, dc) <NEW_LINE> <DEDENT> def OnDragLeft(self, draw, x, y, keys = 0, attachment = 0): <NEW_LINE> <INDENT> self._shape.GetEventHandler().OnSizingDragLeft(self, draw, x, y, keys, attachment) <NEW_LINE> <DEDENT> def OnBeginDragLeft(self, x, y, keys = 0, attachment = 0): <NEW_LINE> <INDENT> self._shape.GetEventHandler().OnSizingBeginDragLeft(self, x, y, keys, attachment) <NEW_LINE> <DEDENT> def OnEndDragLeft(self, x, y, keys = 0, attachment = 0): <NEW_LINE> <INDENT> self._shape.GetEventHandler().OnSizingEndDragLeft(self, x, y, keys, attachment) <NEW_LINE> <DEDENT> def GetNumberOfAttachments(self): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def GetAttachmentPosition(self, attachment, nth = 0, no_arcs = 1, line = None): <NEW_LINE> <INDENT> return self._xpos, self._ypos <NEW_LINE> <DEDENT> def SetEraseObject(self, er): <NEW_LINE> <INDENT> self._eraseObject = er
The :class:`ControlPoint` class.
625990281d351010ab8f4a97
class DenseCapsule(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_num_caps, in_dim_caps, out_num_caps, out_dim_caps, routings=3, gpu=0): <NEW_LINE> <INDENT> super(DenseCapsule, self).__init__() <NEW_LINE> self.in_num_caps = in_num_caps <NEW_LINE> self.in_dim_caps = in_dim_caps <NEW_LINE> self.out_num_caps = out_num_caps <NEW_LINE> self.out_dim_caps = out_dim_caps <NEW_LINE> self.routings = routings <NEW_LINE> self.gpu = gpu <NEW_LINE> self.weight = nn.Parameter(0.01 * torch.randn(out_num_caps, in_num_caps, out_dim_caps, in_dim_caps)) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x_hat = torch.squeeze(torch.matmul(self.weight, x[:, None, :, :, None]), dim=-1) <NEW_LINE> x_hat_detached = x_hat.detach() <NEW_LINE> b = Variable(torch.zeros(x.size(0), self.out_num_caps, self.in_num_caps)).cuda(self.gpu) <NEW_LINE> assert self.routings > 0, 'The \'routings\' should be > 0.' <NEW_LINE> for i in range(self.routings): <NEW_LINE> <INDENT> c = F.softmax(b, dim=1) <NEW_LINE> if i == self.routings - 1: <NEW_LINE> <INDENT> outputs = squash(torch.sum(c[:, :, :, None] * x_hat, dim=-2, keepdim=True)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outputs = squash(torch.sum(c[:, :, :, None] * x_hat_detached, dim=-2, keepdim=True)) <NEW_LINE> b = b + torch.sum(outputs * x_hat_detached, dim=-1) <NEW_LINE> <DEDENT> <DEDENT> return torch.squeeze(outputs, dim=-2)
The dense capsule layer. It is similar to Dense (FC) layer. Dense layer has `in_num` inputs, each is a scalar, the output of the neuron from the former layer, and it has `out_num` output neurons. DenseCapsule just expands the output of the neuron from scalar to vector. So its input size = [None, in_num_caps, in_dim_caps] and output size = [None, out_num_caps, out_dim_caps]. For Dense Layer, in_dim_caps = out_dim_caps = 1. :param in_num_caps: number of cpasules inputted to this layer :param in_dim_caps: dimension of input capsules :param out_num_caps: number of capsules outputted from this layer :param out_dim_caps: dimension of output capsules :param routings: number of iterations for the routing algorithm
6259902830c21e258be99797
class BaseDnsAdminTest(BaseDnsTest): <NEW_LINE> <INDENT> _interface = "json" <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(BaseDnsAdminTest, cls).setUpClass() <NEW_LINE> if (CONF.compute.allow_tenant_isolation or cls.force_tenant_isolation is True): <NEW_LINE> <INDENT> creds = cls.isolated_creds.get_admin_creds() <NEW_LINE> cls.os_adm = clients.Manager(credentials=creds, interface=cls._interface) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cls.os_adm = clients.DnsAdminManager( interface=cls._interface) <NEW_LINE> <DEDENT> except exceptions.InvalidCredentials: <NEW_LINE> <INDENT> msg = ("Missing Dns Admin API credentials " "in configuration.") <NEW_LINE> raise cls.skipException(msg)
Base test case class for Dns Admin API tests.
6259902866673b3332c3136f
class BinaryOpExpr(TypedExpr): <NEW_LINE> <INDENT> def __init__(self, typ, op, arg1, arg2, op_name_uni=None, op_name_latex=None, tcheck_args=True): <NEW_LINE> <INDENT> if tcheck_args: <NEW_LINE> <INDENT> args = [self.ensure_typed_expr(arg1, typ), self.ensure_typed_expr(arg2, typ)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args = [self.ensure_typed_expr(arg1), self.ensure_typed_expr(arg2)] <NEW_LINE> <DEDENT> super().__init__(op, *args) <NEW_LINE> self.type = typ <NEW_LINE> if op_name_uni is None: <NEW_LINE> <INDENT> self.op_name = op <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.op_name = op_name_uni <NEW_LINE> <DEDENT> if op_name_latex is None: <NEW_LINE> <INDENT> self.op_name_latex = self.op_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.op_name_latex = op_name_latex <NEW_LINE> <DEDENT> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return self.copy_local(*self.args) <NEW_LINE> <DEDENT> def copy_local(self, *args, type_check=True): <NEW_LINE> <INDENT> return op_expr_factory(self.op, *args) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s\nType: %s" % (repr(self), self.type) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "(%s %s %s)" % (repr(self.args[0]), self.op_name, repr(self.args[1])) <NEW_LINE> <DEDENT> def latex_str_long(self): <NEW_LINE> <INDENT> return self.latex_str() + "\\\\ Type: %s" % self.type.latex_str() <NEW_LINE> <DEDENT> def latex_str(self, **kwargs): <NEW_LINE> <INDENT> return ensuremath("(%s %s %s)" % (self.args[0].latex_str(**kwargs), self.op_name_latex, self.args[1].latex_str(**kwargs))) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def join(cls, *l): <NEW_LINE> <INDENT> if len(l) == 0: <NEW_LINE> <INDENT> return true_term <NEW_LINE> <DEDENT> if len(l) == 1: <NEW_LINE> <INDENT> return l[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur = l[0] <NEW_LINE> for i in range(len(l) - 1): <NEW_LINE> <INDENT> cur = cls(cur, l[i+1]) <NEW_LINE> <DEDENT> return cur <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def random(cls, ctrl): <NEW_LINE> <INDENT> return cls(ctrl(typ=type_t), ctrl(typ=type_t))
This class abstracts over expressions headed by specific binary operators. It is not necessarily designed to be instantiated directly, but rather subclassed for particular hard-coded operators. Because of the way the copy function works, it is currently not suited for direct instantiation at all.
6259902856b00c62f0fb3840
class CmdPasswordCreate(Command): <NEW_LINE> <INDENT> key = CMD_NOMATCH <NEW_LINE> locks = "cmd:all()" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> password = self.args <NEW_LINE> self.caller.msg(echo=False) <NEW_LINE> if not hasattr(self.menutree, 'playername'): <NEW_LINE> <INDENT> self.caller.msg("{rSomething went wrong! Playername not remembered from previous step!{n") <NEW_LINE> self.menutree.goto("node2a") <NEW_LINE> return <NEW_LINE> <DEDENT> playername = self.menutree.playername <NEW_LINE> if len(password) < 3: <NEW_LINE> <INDENT> string = "{rYour password must be at least 3 characters or longer." <NEW_LINE> string += "\n\rFor best security, make it at least 8 characters " <NEW_LINE> string += "long, avoid making it a real word and mix numbers " <NEW_LINE> string += "into it.{n" <NEW_LINE> self.caller.msg(string) <NEW_LINE> self.menutree.goto("node2b") <NEW_LINE> return <NEW_LINE> <DEDENT> from evennia.commands.default import unloggedin <NEW_LINE> try: <NEW_LINE> <INDENT> permissions = settings.PERMISSION_PLAYER_DEFAULT <NEW_LINE> typeclass = settings.BASE_CHARACTER_TYPECLASS <NEW_LINE> new_player = unloggedin._create_player(self.caller, playername, password, permissions) <NEW_LINE> if new_player: <NEW_LINE> <INDENT> if MULTISESSION_MODE < 2: <NEW_LINE> <INDENT> default_home = ObjectDB.objects.get_id(settings.DEFAULT_HOME) <NEW_LINE> unloggedin._create_character(self.caller, new_player, typeclass, default_home, permissions) <NEW_LINE> <DEDENT> <DEDENT> string = "{gA new account '%s' was created. Now go log in from the menu!{n" <NEW_LINE> self.caller.msg(string % (playername)) <NEW_LINE> self.menutree.goto("START") <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.caller.msg("An error occurred. Please e-mail an admin if the problem persists.") <NEW_LINE> logger.log_trace()
Handle the creation of a password. This also creates the actual Player/User object.
62599028bf627c535bcb2437
class Logging: <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self._logger = logging.getLogger( self.__module__+"."+self.__class__.__name__ )
Subclass for any class that could use a _logger attribute.
62599028d164cc6175821ef9
class MercatorLatitudeScale(mscale.ScaleBase): <NEW_LINE> <INDENT> name = 'mercator' <NEW_LINE> def __init__(self, axis, **kwargs): <NEW_LINE> <INDENT> mscale.ScaleBase.__init__(self) <NEW_LINE> thresh = kwargs.pop("thresh", np.radians(85)) <NEW_LINE> if thresh >= np.pi / 2.0: <NEW_LINE> <INDENT> raise ValueError("thresh must be less than pi/2") <NEW_LINE> <DEDENT> self.thresh = thresh <NEW_LINE> <DEDENT> def get_transform(self): <NEW_LINE> <INDENT> return self.MercatorLatitudeTransform(self.thresh) <NEW_LINE> <DEDENT> def set_default_locators_and_formatters(self, axis): <NEW_LINE> <INDENT> class DegreeFormatter(Formatter): <NEW_LINE> <INDENT> def __call__(self, x, pos=None): <NEW_LINE> <INDENT> return "%d\N{DEGREE SIGN}" % np.degrees(x) <NEW_LINE> <DEDENT> <DEDENT> axis.set_major_locator(FixedLocator( np.radians(np.arange(-90, 90, 10)))) <NEW_LINE> axis.set_major_formatter(DegreeFormatter()) <NEW_LINE> axis.set_minor_formatter(DegreeFormatter()) <NEW_LINE> <DEDENT> def limit_range_for_scale(self, vmin, vmax, minpos): <NEW_LINE> <INDENT> return max(vmin, -self.thresh), min(vmax, self.thresh) <NEW_LINE> <DEDENT> class MercatorLatitudeTransform(mtransforms.Transform): <NEW_LINE> <INDENT> input_dims = 1 <NEW_LINE> output_dims = 1 <NEW_LINE> is_separable = True <NEW_LINE> has_inverse = True <NEW_LINE> def __init__(self, thresh): <NEW_LINE> <INDENT> mtransforms.Transform.__init__(self) <NEW_LINE> self.thresh = thresh <NEW_LINE> <DEDENT> def transform_non_affine(self, a): <NEW_LINE> <INDENT> masked = ma.masked_where((a < -self.thresh) | (a > self.thresh), a) <NEW_LINE> if masked.mask.any(): <NEW_LINE> <INDENT> return ma.log(np.abs(ma.tan(masked) + 1.0 / ma.cos(masked))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.log(np.abs(np.tan(a) + 1.0 / np.cos(a))) <NEW_LINE> <DEDENT> <DEDENT> def inverted(self): <NEW_LINE> <INDENT> return MercatorLatitudeScale.InvertedMercatorLatitudeTransform( self.thresh) <NEW_LINE> <DEDENT> <DEDENT> class InvertedMercatorLatitudeTransform(mtransforms.Transform): <NEW_LINE> <INDENT> input_dims = 1 <NEW_LINE> output_dims = 1 <NEW_LINE> is_separable = True <NEW_LINE> has_inverse = True <NEW_LINE> def __init__(self, thresh): <NEW_LINE> <INDENT> mtransforms.Transform.__init__(self) <NEW_LINE> self.thresh = thresh <NEW_LINE> <DEDENT> def transform_non_affine(self, a): <NEW_LINE> <INDENT> return np.arctan(np.sinh(a)) <NEW_LINE> <DEDENT> def inverted(self): <NEW_LINE> <INDENT> return MercatorLatitudeScale.MercatorLatitudeTransform(self.thresh)
Scales data in range -pi/2 to pi/2 (-90 to 90 degrees) using the system used to scale latitudes in a Mercator projection. The scale function: ln(tan(y) + sec(y)) The inverse scale function: atan(sinh(y)) Since the Mercator scale tends to infinity at +/- 90 degrees, there is user-defined threshold, above and below which nothing will be plotted. This defaults to +/- 85 degrees. source: http://en.wikipedia.org/wiki/Mercator_projection
62599028d164cc6175821efa
class TestData(IDataVerify): <NEW_LINE> <INDENT> customers: [Customer] = None <NEW_LINE> lc_global: LCGlobal = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.customers = [] <NEW_LINE> <DEDENT> def data_verify(self): <NEW_LINE> <INDENT> if self.customers is not None: <NEW_LINE> <INDENT> for customer in self.customers: <NEW_LINE> <INDENT> customer.data_verify() <NEW_LINE> <DEDENT> <DEDENT> if self.lc_global is not None: <NEW_LINE> <INDENT> self.lc_global.data_verify() <NEW_LINE> <DEDENT> <DEDENT> def data_verify_re(self): <NEW_LINE> <INDENT> if self.customers is not None: <NEW_LINE> <INDENT> for customer in self.customers: <NEW_LINE> <INDENT> customer.data_verify() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_pre_verify_data(self): <NEW_LINE> <INDENT> if self.customers is not None: <NEW_LINE> <INDENT> for customer in self.customers: <NEW_LINE> <INDENT> customer.update_pre_verify_data() <NEW_LINE> <DEDENT> <DEDENT> if self.lc_global is not None: <NEW_LINE> <INDENT> self.lc_global.update_pre_verify_data() <NEW_LINE> <DEDENT> <DEDENT> def update_post_verify_data(self): <NEW_LINE> <INDENT> if self.customers is not None: <NEW_LINE> <INDENT> for customer in self.customers: <NEW_LINE> <INDENT> customer.update_post_verify_data() <NEW_LINE> <DEDENT> <DEDENT> if self.lc_global is not None: <NEW_LINE> <INDENT> self.lc_global.update_post_verify_data()
测试数据封装类
62599028d18da76e235b790f
class FilterGroup: <NEW_LINE> <INDENT> CONJUNCTIONS = { 'and': lambda l: reduce(lambda memo, item: memo and item, l), 'or': lambda l: reduce(lambda memo, item: memo or item, l), 'not': lambda l: not reduce(lambda memo, item: memo and item, l), } <NEW_LINE> def __init__(self, conjunction, filter_list): <NEW_LINE> <INDENT> self.conjunction = conjunction <NEW_LINE> self.filter_list = filter_list <NEW_LINE> <DEDENT> def run(self, document): <NEW_LINE> <INDENT> result = [filter_or_group.run(document) for filter_or_group in self.filter_list] <NEW_LINE> return self.CONJUNCTIONS[self.conjunction](result) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_filter_group(cls, obj): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> o = obj.copy() <NEW_LINE> if len(o) != 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> k, v = o.popitem() <NEW_LINE> if k not in cls.CONJUNCTIONS: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if type(v) not in [list, tuple]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False
Stores a list of filter objects joined by a conjunction
6259902866673b3332c31371
class Skill(object): <NEW_LINE> <INDENT> deserialized_types = { 'stage': 'ask_smapi_model.v1.stage_type.StageType', 'locale': 'str' } <NEW_LINE> attribute_map = { 'stage': 'stage', 'locale': 'locale' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> def __init__(self, stage=None, locale=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.stage = stage <NEW_LINE> self.locale = locale <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Skill): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
:param stage: :type stage: (optional) ask_smapi_model.v1.stage_type.StageType :param locale: skill locale in bcp 47 format :type locale: (optional) str
6259902891af0d3eaad3adab
class JSONResponse(HttpResponse): <NEW_LINE> <INDENT> def __init__(self, data, **kwargs): <NEW_LINE> <INDENT> content = JSONRenderer().render(data) <NEW_LINE> kwargs['content_type'] = 'application/json' <NEW_LINE> super(JSONResponse, self).__init__(content, **kwargs)
将JSON转为httpresponse
625990281d351010ab8f4a9a
class PictureQuerySet(models.QuerySet, PictureQuerySetMixin, ModeratedQuerySetMixin): <NEW_LINE> <INDENT> pass
Queryset des images
62599028ac7a0e7691f7346d
class TransformerEncoder(FairseqEncoder): <NEW_LINE> <INDENT> def __init__(self, args, dictionary, embed_tokens, proj_to_decoder=True): <NEW_LINE> <INDENT> super().__init__(dictionary) <NEW_LINE> self.transformer_embedding = TransformerEmbedding( args=args, embed_tokens=embed_tokens ) <NEW_LINE> self.transformer_encoder_given_embeddings = TransformerEncoderGivenEmbeddings( args=args, proj_to_decoder=proj_to_decoder ) <NEW_LINE> self.tracker = VariableTracker() <NEW_LINE> self.set_gradient_tracking_mode(False) <NEW_LINE> self.set_embed_noising_mode(False) <NEW_LINE> <DEDENT> def forward(self, src_tokens, src_lengths): <NEW_LINE> <INDENT> self.tracker.reset() <NEW_LINE> x, encoder_padding_mask, positions = self.transformer_embedding( src_tokens=src_tokens, src_lengths=src_lengths ) <NEW_LINE> self.tracker.track(x, "token_embeddings", retain_grad=self.track_gradients) <NEW_LINE> x = self.transformer_encoder_given_embeddings( x=x, positions=positions, encoder_padding_mask=encoder_padding_mask ) <NEW_LINE> if encoder_padding_mask is None: <NEW_LINE> <INDENT> encoder_padding_mask = torch.empty([]) <NEW_LINE> <DEDENT> return x, src_tokens, encoder_padding_mask <NEW_LINE> <DEDENT> def reorder_encoder_out(self, encoder_out, new_order): <NEW_LINE> <INDENT> (x, src_tokens, encoder_padding_mask) = encoder_out <NEW_LINE> src_tokens_tensor = pytorch_translate_utils.get_source_tokens_tensor(src_tokens) <NEW_LINE> if x is not None: <NEW_LINE> <INDENT> x = x.index_select(1, new_order) <NEW_LINE> <DEDENT> if src_tokens_tensor is not None: <NEW_LINE> <INDENT> src_tokens_tensor = src_tokens_tensor.index_select(0, new_order) <NEW_LINE> <DEDENT> if encoder_padding_mask.shape == torch.Size([]): <NEW_LINE> <INDENT> encoder_padding_mask = None <NEW_LINE> <DEDENT> if encoder_padding_mask is not None: <NEW_LINE> <INDENT> encoder_padding_mask = encoder_padding_mask.index_select(0, new_order) <NEW_LINE> <DEDENT> return (x, src_tokens_tensor, encoder_padding_mask) <NEW_LINE> <DEDENT> def max_positions(self): <NEW_LINE> <INDENT> return self.transformer_embedding.embed_positions.max_positions <NEW_LINE> <DEDENT> def upgrade_state_dict_named(self, state_dict, name): <NEW_LINE> <INDENT> if isinstance( self.transformer_embedding.embed_positions, SinusoidalPositionalEmbedding ): <NEW_LINE> <INDENT> if f"{name}.transformer_embedding.embed_positions.weights" in state_dict: <NEW_LINE> <INDENT> del state_dict[f"{name}.transformer_embedding.embed_positions.weights"] <NEW_LINE> <DEDENT> state_dict[ f"{name}.transformer_embedding.embed_positions._float_tensor" ] = torch.FloatTensor(1) <NEW_LINE> <DEDENT> self.transformer_encoder_given_embeddings.upgrade_state_dict_named( state_dict, f"{name}.transformer_encoder_given_embeddings" ) <NEW_LINE> return state_dict <NEW_LINE> <DEDENT> def set_gradient_tracking_mode(self, mode=True): <NEW_LINE> <INDENT> self.tracker.reset() <NEW_LINE> self.track_gradients = mode <NEW_LINE> <DEDENT> def set_embed_noising_mode(self, mode=True): <NEW_LINE> <INDENT> self.embed_noising_mode = mode
Transformer encoder.
62599028507cdc57c63a5d2b
class ModelLock(ModelSQL): <NEW_LINE> <INDENT> __name__ = 'test.modelsql.lock'
Model to test lock
6259902866673b3332c31373
class CheckRequestModelView(APIView): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cassandra_data = CassandraQcResult() <NEW_LINE> <DEDENT> def deal_pointrule(self, conversation_data): <NEW_LINE> <INDENT> start_time = conversation_data.starttime // 1000 <NEW_LINE> end_time = conversation_data.endtime // 1000 <NEW_LINE> starttime = time.strftime("%Y-%m-%d %X ", time.localtime(start_time)) <NEW_LINE> endtime = time.strftime("%Y-%m-%d %X ", time.localtime(end_time)) <NEW_LINE> data = { 'siteid':conversation_data.siteid, 'monitor_type':conversation_data.monitor_type, 'transaction_id': conversation_data.transaction_id, 'starttime': starttime, 'endtime': endtime, 'customerid': conversation_data.customerid, 'firstsupplierid': conversation_data.firstsupplierid, 'grade': conversation_data.grade } <NEW_LINE> ruleids_data = conversation_data.ruleids <NEW_LINE> if not ruleids_data: <NEW_LINE> <INDENT> ruleids_data = '{}' <NEW_LINE> <DEDENT> ruleids_dict = json.loads(ruleids_data) <NEW_LINE> ruleids = list(ruleids_dict.keys()) <NEW_LINE> data.update({'ruleids': ruleids}) <NEW_LINE> return data <NEW_LINE> <DEDENT> def get(self, request): <NEW_LINE> <INDENT> siteid = request.query_params.get('site_id') <NEW_LINE> start = request.query_params.get('start_time', '') <NEW_LINE> end = request.query_params.get('end_time', '') <NEW_LINE> monitor_type = request.query_params.get('monitor_type') <NEW_LINE> transaction_id = request.query_params.get('transaction_id', '') <NEW_LINE> monitor_type = int(monitor_type) <NEW_LINE> if start and end: <NEW_LINE> <INDENT> start_time = int(time.mktime(time.strptime(start, "%Y-%m-%d")) * 1000) <NEW_LINE> end_time = int(time.mktime(time.strptime(end, "%Y-%m-%d"))) * 1000 + 86400000 <NEW_LINE> logger.info('查找%s--%s的分析结果' % (start, end)) <NEW_LINE> try: <NEW_LINE> <INDENT> sql = 'select * from qcresult where siteid = %s AND monitor_type = %s AND starttime >= %s AND endtime >0 AND endtime <= %s ALLOW FILTERING ' <NEW_LINE> data = (siteid, monitor_type,start_time,end_time) <NEW_LINE> qcresult_data = self.cassandra_data.get_data(sql, data) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.debug('获取质检结果失败:%s' %e) <NEW_LINE> return Response({'msg': '获取质检结果失败'}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> <DEDENT> elif transaction_id: <NEW_LINE> <INDENT> logger.info('查找%s的质检结果' % (transaction_id)) <NEW_LINE> try: <NEW_LINE> <INDENT> sql = 'select * from qcresult where siteid = %s AND monitor_type = %s AND transaction_id = %s ALLOW FILTERING ' <NEW_LINE> data = (siteid, monitor_type,transaction_id) <NEW_LINE> qcresult_data = self.cassandra_data.get_data(sql, data) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.debug('获取质检结果失败:%s' % e) <NEW_LINE> return Response({'msg': '获取质检结果失败'}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> <DEDENT> data = list(map(self.deal_pointrule, qcresult_data)) <NEW_LINE> return Response(data, status=status.HTTP_200_OK)
查看质检结果
6259902891af0d3eaad3adad
class ItemFinder(object): <NEW_LINE> <INDENT> def __init__(self, view): <NEW_LINE> <INDENT> self.view = view <NEW_LINE> <DEDENT> def get_item_at_point(self, pos): <NEW_LINE> <INDENT> item, handle = self.view.get_handle_at_point(pos) <NEW_LINE> return item or self.view.get_item_at_point(pos)
Find an item on the canvas.
62599028bf627c535bcb243b
class Cat(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> breed = models.CharField(max_length=100) <NEW_LINE> description = models.CharField(max_length=200) <NEW_LINE> age = models.IntegerField() <NEW_LINE> user = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> likes = models.IntegerField(default=0) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Cat class.
62599028796e427e5384f702
class RNNet(nn.Module): <NEW_LINE> <INDENT> def __init__( self, in_channels=9, num_bins=32, hidden_size=128, num_rnn_layers=1, rnn_dropout=0.25, dense_features=None, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> if dense_features is None: <NEW_LINE> <INDENT> dense_features = [256, 1] <NEW_LINE> <DEDENT> dense_features.insert(0, hidden_size) <NEW_LINE> self.dropout = nn.Dropout(rnn_dropout) <NEW_LINE> self.rnn = nn.LSTM( input_size=in_channels * num_bins, hidden_size=hidden_size, num_layers=num_rnn_layers, batch_first=True, ) <NEW_LINE> self.hidden_size = hidden_size <NEW_LINE> self.dense_layers = nn.ModuleList( [ nn.Linear( in_features=dense_features[i - 1], out_features=dense_features[i] ) for i in range(1, len(dense_features)) ] ) <NEW_LINE> self.initialize_weights() <NEW_LINE> <DEDENT> def initialize_weights(self): <NEW_LINE> <INDENT> sqrt_k = math.sqrt(1 / self.hidden_size) <NEW_LINE> for parameters in self.rnn.all_weights: <NEW_LINE> <INDENT> for pam in parameters: <NEW_LINE> <INDENT> nn.init.uniform_(pam.data, -sqrt_k, sqrt_k) <NEW_LINE> <DEDENT> <DEDENT> for dense_layer in self.dense_layers: <NEW_LINE> <INDENT> nn.init.kaiming_uniform_(dense_layer.weight.data) <NEW_LINE> nn.init.constant_(dense_layer.bias.data, 0) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x, return_last_dense=False): <NEW_LINE> <INDENT> x = x.permute(0, 2, 1, 3).contiguous() <NEW_LINE> x = x.view(x.shape[0], x.shape[1], x.shape[2] * x.shape[3]) <NEW_LINE> sequence_length = x.shape[1] <NEW_LINE> hidden_state = torch.zeros(1, x.shape[0], self.hidden_size) <NEW_LINE> cell_state = torch.zeros(1, x.shape[0], self.hidden_size) <NEW_LINE> if x.is_cuda: <NEW_LINE> <INDENT> hidden_state = hidden_state.cuda() <NEW_LINE> cell_state = cell_state.cuda() <NEW_LINE> <DEDENT> for i in range(sequence_length): <NEW_LINE> <INDENT> input_x = x[:, i, :].unsqueeze(1) <NEW_LINE> _, (hidden_state, cell_state) = self.rnn( input_x, (hidden_state, cell_state) ) <NEW_LINE> hidden_state = self.dropout(hidden_state) <NEW_LINE> <DEDENT> x = hidden_state.squeeze(0) <NEW_LINE> for layer_number, dense_layer in enumerate(self.dense_layers): <NEW_LINE> <INDENT> x = dense_layer(x) <NEW_LINE> if return_last_dense and (layer_number == len(self.dense_layers) - 2): <NEW_LINE> <INDENT> output = x <NEW_LINE> <DEDENT> <DEDENT> if return_last_dense: <NEW_LINE> <INDENT> return x, output <NEW_LINE> <DEDENT> return x
A crop yield conv net. For a description of the parameters, see the RNNModel class.
625990281f5feb6acb163b76
class TimerScheduler(scheduler_base.APRSScheduler): <NEW_LINE> <INDENT> def ready(self, gps_data, start_datetime): <NEW_LINE> <INDENT> if start_datetime is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.last_packet_gps_data: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if (gps_data.current_datetime - self.last_packet_gps_data.current_datetime).total_seconds() >= SCHEDULER_TIME_INTERVAL: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Very simple scheduler. Send a message every SCHEDULER_TIME_INTERVAL seconds. Returns True if we're ready to send our message.
62599028c432627299fa3f79
class TLSRecordHeader: <NEW_LINE> <INDENT> __slots__ = ("record_type", "version", "length") <NEW_LINE> fmt = "!BHH" <NEW_LINE> class RecordType(enum.IntEnum): <NEW_LINE> <INDENT> CHANGE_CIPHER_SPEC = 0x14 <NEW_LINE> ALERT = 0x15 <NEW_LINE> HANDSHAKE = 0x16 <NEW_LINE> APPLICATION_DATA = 0x17 <NEW_LINE> <DEDENT> def __init__(self, record_type, version, length): <NEW_LINE> <INDENT> self.record_type = record_type <NEW_LINE> self.version = version <NEW_LINE> self.length = length <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s(%s, %s, %s)" % ( type(self).__name__, self.record_type, self.version, self.length, ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r, %r, %r)" % ( type(self).__name__, self.record_type, self.version, self.length, ) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TLSRecordHeader): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return ( self.record_type is other.record_type and self.version is other.version and self.length == other.length ) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return 0x5AFE ^ self.record_type ^ self.version ^ self.length <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return 5 <NEW_LINE> <DEDENT> def __bytes__(self): <NEW_LINE> <INDENT> return struct.pack( TLSRecordHeader.fmt, self.record_type, self.version, self.length ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_bytes(cls, header): <NEW_LINE> <INDENT> record_type, version, length = struct.unpack( TLSRecordHeader.fmt, header[:5] ) <NEW_LINE> return cls( TLSRecordHeader.RecordType(record_type), TLSVersion(version), length, )
Encode/decode TLS record protocol format.
6259902830c21e258be9979d
class HelloSerializer(serializers.Serializer): <NEW_LINE> <INDENT> name = serializers.CharField(max_length=10)
Serializes a name field for testing our API view.
6259902815baa72349462f1f
class Scene(object): <NEW_LINE> <INDENT> def enter(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def notify(self, event): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def leave(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def draw(self, surface): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def handle_event(self, event): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def enterphase(self): <NEW_LINE> <INDENT> pass
Drawing GUI and handle scene specific events for a different game-states, like menu or lobby etc.
6259902821a7993f00c66f04
class Supremacy(): <NEW_LINE> <INDENT> game_id = None <NEW_LINE> url = None <NEW_LINE> debug = 0 <NEW_LINE> default_params = { "@c": "ultshared.action.UltUpdateGameStateAction", "playerID": 0, "userAuth": "787925a25d0c072c3eaff5c1eff52829475fd506", "tstamp": int(time.time()) } <NEW_LINE> headers = { "Host": "xgs8.c.bytro.com", "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:57.0) " + "Gecko/20100101 Firefox/57.0", "Accept": "text/plain, */*; q=0.01", "Accept-Language": "en-US,en;q=0.5", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Origin": "https://www.supremacy1914.nl", "DNT": "1", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache" } <NEW_LINE> def __init__(self, game_id, url=None, debug=None): <NEW_LINE> <INDENT> self.game_id = game_id <NEW_LINE> self.url = url if url else "http://xgs1.c.bytro.com" <NEW_LINE> self.default_params["gameID"] = game_id <NEW_LINE> if debug and isinstance(debug, int): <NEW_LINE> <INDENT> self.debug = debug <NEW_LINE> <DEDENT> <DEDENT> def all(self): <NEW_LINE> <INDENT> return self._request() <NEW_LINE> <DEDENT> def game(self): <NEW_LINE> <INDENT> return self._request(12) <NEW_LINE> <DEDENT> def coalitions(self): <NEW_LINE> <INDENT> result = self._request(2) <NEW_LINE> return result["teams"] if "teams" in result else None <NEW_LINE> <DEDENT> def players(self): <NEW_LINE> <INDENT> return self._request(1) <NEW_LINE> <DEDENT> def market(self): <NEW_LINE> <INDENT> return self._request(4) <NEW_LINE> <DEDENT> def score(self, day): <NEW_LINE> <INDENT> return self._request(2, day) <NEW_LINE> <DEDENT> def relations(self): <NEW_LINE> <INDENT> return self._request(5) <NEW_LINE> <DEDENT> def _request(self, state_type=None, day=None): <NEW_LINE> <INDENT> params = self.default_params <NEW_LINE> if state_type is not None: <NEW_LINE> <INDENT> params["stateType"] = state_type <NEW_LINE> <DEDENT> if day is not None: <NEW_LINE> <INDENT> params["option"] = day <NEW_LINE> <DEDENT> request = requests.post(self.url, headers=self.headers, json=params) <NEW_LINE> response = json.loads(request.text) <NEW_LINE> if self.debug >= 2: <NEW_LINE> <INDENT> print_json(response) <NEW_LINE> <DEDENT> if "@c" in response["result"] and response["result"]["@c"] == "ultshared.rpc.UltSwitchServerException": <NEW_LINE> <INDENT> if "newHostName" in response["result"]: <NEW_LINE> <INDENT> new_url = "http://%s" % response["result"]["newHostName"] <NEW_LINE> if self.debug >= 1: <NEW_LINE> <INDENT> print("new host: %s for %s" % (new_url, self.game_id)) <NEW_LINE> <DEDENT> raise ServerChangeError(new_url) <NEW_LINE> <DEDENT> if self.debug >= 1: <NEW_LINE> <INDENT> print("Game %s does not exist" % self.game_id) <NEW_LINE> <DEDENT> raise GameDoesNotExistError("Game %s is not found" % self.game_id) <NEW_LINE> <DEDENT> return response["result"]
The supremacy class allow easy asses to the Supremacy 1914 API
625990286e29344779b015d7
class _MarkerFinder(object): <NEW_LINE> <INDENT> def __init__(self, stream): <NEW_LINE> <INDENT> super(_MarkerFinder, self).__init__() <NEW_LINE> self._stream = stream <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_stream(cls, stream): <NEW_LINE> <INDENT> return cls(stream) <NEW_LINE> <DEDENT> def next(self, start): <NEW_LINE> <INDENT> position = start <NEW_LINE> while True: <NEW_LINE> <INDENT> position = self._offset_of_next_ff_byte(start=position) <NEW_LINE> position, byte_ = self._next_non_ff_byte(start=position+1) <NEW_LINE> if byte_ == b'\x00': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> marker_code, segment_offset = byte_, position+1 <NEW_LINE> break <NEW_LINE> <DEDENT> return marker_code, segment_offset <NEW_LINE> <DEDENT> def _next_non_ff_byte(self, start): <NEW_LINE> <INDENT> self._stream.seek(start) <NEW_LINE> byte_ = self._read_byte() <NEW_LINE> while byte_ == b'\xFF': <NEW_LINE> <INDENT> byte_ = self._read_byte() <NEW_LINE> <DEDENT> offset_of_non_ff_byte = self._stream.tell() - 1 <NEW_LINE> return offset_of_non_ff_byte, byte_ <NEW_LINE> <DEDENT> def _offset_of_next_ff_byte(self, start): <NEW_LINE> <INDENT> self._stream.seek(start) <NEW_LINE> byte_ = self._read_byte() <NEW_LINE> while byte_ != b'\xFF': <NEW_LINE> <INDENT> byte_ = self._read_byte() <NEW_LINE> <DEDENT> offset_of_ff_byte = self._stream.tell() - 1 <NEW_LINE> return offset_of_ff_byte <NEW_LINE> <DEDENT> def _read_byte(self): <NEW_LINE> <INDENT> byte_ = self._stream.read(1) <NEW_LINE> if not byte_: <NEW_LINE> <INDENT> raise Exception('unexpected end of file') <NEW_LINE> <DEDENT> return byte_
Service class that knows how to find the next JFIF marker in a stream.
62599028a8ecb033258721a5
class ItemsByTagView(AdsMixin, FavoriteItemsMixin, CacheMixin, ListView): <NEW_LINE> <INDENT> template_name = 'news_by_tag.html' <NEW_LINE> context_object_name = 'items' <NEW_LINE> paginate_by = 20 <NEW_LINE> paginator_class = DiggPaginator <NEW_LINE> model = Item <NEW_LINE> cache_timeout = 300 <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> items = super(ItemsByTagView, self).get_queryset() .filter(status='active', activated_at__lte=datetime.datetime.now()) <NEW_LINE> tag = self.request.GET.get('tag') <NEW_LINE> if tag in ['ru', 'en']: <NEW_LINE> <INDENT> items = items.filter(tags__name__in=tag) <NEW_LINE> <DEDENT> items = items.prefetch_related('issue', 'section') <NEW_LINE> items = items.order_by('-created_at', '-related_to_date') <NEW_LINE> return items
Лента новостей.
62599028796e427e5384f704
class _Job(object): <NEW_LINE> <INDENT> def __init__(self, _id, pending=True, machine_name=None, tags=frozenset(), args=tuple(), kwargs=None, machine=None, allocation_id=None): <NEW_LINE> <INDENT> self.id = _id <NEW_LINE> self.pending = pending <NEW_LINE> self.machine_name = machine_name <NEW_LINE> self.tags = set(tags if tags is not None else ["default"]) <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = dict({} if kwargs is None else kwargs) <NEW_LINE> self.machine = machine <NEW_LINE> self.allocation_id = allocation_id <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{} id={}>".format(self.__class__.__name__, self.id)
The internal state representing a job. Attributes ---------- id : int A unique ID assigned to the job. pending : bool If True, the job is currently queued for execution, if False the job has been allocated. machine_name : str or None The machine this job must be executed on or None if any machine with matching tags is sufficient. tags : set([str, ...]) or None The set of tags required of any machine the job is to be executed on. If None, only machines with the "default" tag will be used. args, kwargs : tuple, dict The arguments to the alloc function for this job. machine : :py:class:`._Machine` or None The machine the job has been allocated on. allocation_id : int or None The allocation ID for the Job's allocation.
625990281f5feb6acb163b77
class ImageLayer(Layer): <NEW_LINE> <INDENT> type = 'image' <NEW_LINE> def __init__(self, map, name, visible=True, opacity=1, image=None): <NEW_LINE> <INDENT> super(ImageLayer, self).__init__(map=map, name=name, visible=visible, opacity=opacity) <NEW_LINE> self.image = image <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return bool(self.image) <NEW_LINE> <DEDENT> __bool__ = __nonzero__ <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> d = super(ImageLayer, self).to_dict() <NEW_LINE> d.update(dict( type='imagelayer', image=self.image.source, )) <NEW_LINE> return d <NEW_LINE> <DEDENT> def generate_draw_commands(self): <NEW_LINE> <INDENT> yield draw.DrawImageCommand( image=self.image, pos=(0, 0), opacity=self.opacity, ) <NEW_LINE> <DEDENT> @helpers.from_dict_method <NEW_LINE> def from_dict(cls, dct, map): <NEW_LINE> <INDENT> helpers.assert_item(dct, 'type', 'imagelayer') <NEW_LINE> helpers.assert_item(dct, 'width', map.width) <NEW_LINE> helpers.assert_item(dct, 'height', map.height) <NEW_LINE> helpers.assert_item(dct, 'x', 0) <NEW_LINE> helpers.assert_item(dct, 'y', 0) <NEW_LINE> self = cls( map=map, name=dct.pop('name'), visible=dct.pop('visible', True), opacity=dct.pop('opacity', 1), image=image.open(dct.pop('image')), ) <NEW_LINE> if getattr(map, 'base_path', None): <NEW_LINE> <INDENT> self.image.base_path = map.base_path <NEW_LINE> self.base_path = map.base_path <NEW_LINE> <DEDENT> self.properties.update(dct.pop('properties', {})) <NEW_LINE> return self
An image layer See :class:`Layer` documentation for most init arguments. Other init agruments, which become attributes: .. attribute:: image The image to use for the layer
625990286fece00bbaccc941
class Xml_Profiles_s(Collection): <NEW_LINE> <INDENT> def __init__(self, policy): <NEW_LINE> <INDENT> super(Xml_Profiles_s, self).__init__(policy) <NEW_LINE> self._meta_data['object_has_stats'] = False <NEW_LINE> self._meta_data['minimum_version'] = '11.6.0' <NEW_LINE> self._meta_data['allowed_lazy_attributes'] = [Xml_Profile] <NEW_LINE> self._meta_data['required_json_kind'] = 'tm:asm:policies:xml-profiles:xml-profilecollectionstate' <NEW_LINE> self._meta_data['attribute_registry'] = { 'tm:asm:policies:xml-profiles:xml-profilestate': Xml_Profile }
BIG-IP® ASM Xml-Profiles sub-collection. Due to the bug that prevents from creating this object in 11.5.4 Final, I am disabling this for anything lower than 11.6.0. This will be subject to change at some point
6259902863f4b57ef0086536
class Elf32_File(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> super(Elf32_File, self).__init__() <NEW_LINE> self.filename = filename <NEW_LINE> try: <NEW_LINE> <INDENT> self.efile = open(filename, 'rb+') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> exit(-1) <NEW_LINE> <DEDENT> """Initial ELF Header""" <NEW_LINE> self.elf32_header = Elf32_Header(self.efile) <NEW_LINE> """Initial ELF Section String Name Table""" <NEW_LINE> self.elf32_shstrt = Elf32_Section_Header_Name_String_Table(self.efile, self.elf32_header) <NEW_LINE> """Initial ELF Section Header Table""" <NEW_LINE> self.elf32_sht = Elf32_Section_Header_Table(self.efile, self.elf32_header, self.elf32_shstrt) <NEW_LINE> """Get the section that want to crypt""" <NEW_LINE> self.fixSection = self.elf32_sht.findFixedSection("mysection") <NEW_LINE> self.fixOffset = self.fixSection[0] <NEW_LINE> self.fixSize = self.fixSection[1] <NEW_LINE> self.fixAddr = self.fixSection[2] <NEW_LINE> self.fixAlign = self.fixSection[3] <NEW_LINE> """Write sh_addr and sh_size to Elf_header's e_shoff and e_shentsize""" <NEW_LINE> self.efile.seek(sct.calcsize("16BHHIII")) <NEW_LINE> self.efile.write(sct.pack("I", self.fixAddr)) <NEW_LINE> self.efile.seek(sct.calcsize("16BHHIIIIIHHH")) <NEW_LINE> self.efile.write(sct.pack("I", self.fixSize)) <NEW_LINE> print("""Offset:0x%08x\nSize:0x%08x\nAddr:0x08%x\nAlign:0x08%x""" %(self.fixOffset,self.fixSize,self.fixAddr, self.fixAlign)) <NEW_LINE> self.efile.seek(self.fixOffset) <NEW_LINE> self.SectionData = self.efile.read(self.fixSize) <NEW_LINE> self.fmt = '' + str(len(self.SectionData)) + 'B' <NEW_LINE> self.SectionData = list(sct.unpack(self.fmt, self.SectionData)) <NEW_LINE> print(len(self.SectionData)) <NEW_LINE> """Do the ecrypt(xor) for each byte""" <NEW_LINE> for i in range(0, len(self.SectionData), 1): <NEW_LINE> <INDENT> self.SectionData[i] = self.SectionData[i] ^ 0x1 <NEW_LINE> <DEDENT> self.SectionData = bytes(self.SectionData) <NEW_LINE> self.efile.seek(self.fixOffset) <NEW_LINE> self.efile.write(self.SectionData)
docstring for Elf32_File
62599028d99f1b3c44d0662b
class NetworkLogIncident(): <NEW_LINE> <INDENT> def __init__(self, server_id, incident_type, ip_address, network_log_date, log): <NEW_LINE> <INDENT> self.server_id = server_id <NEW_LINE> self.incident_type = incident_type <NEW_LINE> self.ip_address = ip_address <NEW_LINE> self.network_log_date = network_log_date <NEW_LINE> self.log = log <NEW_LINE> <DEDENT> def sql_insert_network_log_string(self): <NEW_LINE> <INDENT> sql_command = '' <NEW_LINE> if self.ip_address != '' and self.network_log_date != '': <NEW_LINE> <INDENT> sql_values = "VALUES( {0}, '{1}', '{2}', '{3}', {4} )".format(self.server_id, self.ip_address, self.network_log_date, self.log, self.incident_type) <NEW_LINE> sql_command = 'INSERT INTO dbo.NetworkLog (' + 'ServerId, IPAddress, NetworkLogDate, [Log], IncidentTypeId) ' + sql_values <NEW_LINE> print(sql_values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('ERROR: bad ip or date: ', self.log) <NEW_LINE> <DEDENT> return sql_command <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return 'server_id: ' + str(self.server_id) + ', incident_type: ' + self.incident_type + ', ip_address: ' + self.ip_address + ', network_log_date: ' + self.network_log_date + ', log: ' + self.log
structure of data to be written to database
62599028796e427e5384f706
@Actions.register("json.check") <NEW_LINE> class JsonCheckAction(BaseAction): <NEW_LINE> <INDENT> def execute(self, context): <NEW_LINE> <INDENT> d = json.loads(context.get_last_return_value()) <NEW_LINE> action_config = self.get_action_config() <NEW_LINE> if 'assert_path' not in action_config: <NEW_LINE> <INDENT> print("Must have a assert-path") <NEW_LINE> exit(0) <NEW_LINE> <DEDENT> assert_path = action_config['assert_path'] <NEW_LINE> v = self.get_value(d, assert_path) <NEW_LINE> if not v or not self.match_type(v, action_config['assert_type']): <NEW_LINE> <INDENT> print("Type not matched") <NEW_LINE> <DEDENT> <DEDENT> def get_value(self, data, path): <NEW_LINE> <INDENT> v = data <NEW_LINE> ps = path.split(".") <NEW_LINE> for p in ps: <NEW_LINE> <INDENT> if str.isdigit(p): <NEW_LINE> <INDENT> v = v[int(p)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = v[p] <NEW_LINE> <DEDENT> <DEDENT> return v <NEW_LINE> <DEDENT> def match_type(self, value, value_type): <NEW_LINE> <INDENT> if value_type in ['array', 'list']: <NEW_LINE> <INDENT> return type(value) is list <NEW_LINE> <DEDENT> elif value_type in ['str', 'string']: <NEW_LINE> <INDENT> return type(value) is str <NEW_LINE> <DEDENT> return False
:param url || string || @required :param content_type || string || @optional
625990285166f23b2e24435f
class Solution: <NEW_LINE> <INDENT> def maxSubArray(self, nums): <NEW_LINE> <INDENT> if nums is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> a = [0 for _ in xrange(len(nums))] <NEW_LINE> a[0] = nums[0] <NEW_LINE> for i in xrange(1, len(a)): <NEW_LINE> <INDENT> a[i] = max(a[i - 1], 0) + nums[i] <NEW_LINE> <DEDENT> return max(a)
@param nums: A list of integers @return: An integer denote the sum of maximum subarray
62599028d10714528d69ee51
class OUI(BaseIdentifier): <NEW_LINE> <INDENT> __slots__ = ('records',) <NEW_LINE> def __init__(self, oui): <NEW_LINE> <INDENT> super(OUI, self).__init__() <NEW_LINE> from netaddr.eui import ieee <NEW_LINE> self.records = [] <NEW_LINE> if isinstance(oui, str): <NEW_LINE> <INDENT> self._value = int(oui.replace('-', ''), 16) <NEW_LINE> <DEDENT> elif _is_int(oui): <NEW_LINE> <INDENT> if 0 <= oui <= 0xffffff: <NEW_LINE> <INDENT> self._value = oui <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('OUI int outside expected range: %r' % oui) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('unexpected OUI format: %r' % oui) <NEW_LINE> <DEDENT> if self._value in ieee.OUI_INDEX: <NEW_LINE> <INDENT> fh = open(ieee.OUI_REGISTRY_PATH, 'rb') <NEW_LINE> for (offset, size) in ieee.OUI_INDEX[self._value]: <NEW_LINE> <INDENT> fh.seek(offset) <NEW_LINE> data = fh.read(size).decode('UTF-8') <NEW_LINE> self._parse_data(data, offset, size) <NEW_LINE> <DEDENT> fh.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotRegisteredError('OUI %r not registered!' % oui) <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, OUI): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> other = self.__class__(other) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> return self._value == other._value <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, OUI): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> other = self.__class__(other) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> return self._value != other._value <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self._value, self.records <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self._value, self.records = state <NEW_LINE> <DEDENT> def _parse_data(self, data, offset, size): <NEW_LINE> <INDENT> record = { 'idx': 0, 'oui': '', 'org': '', 'address': [], 'offset': offset, 'size': size, } <NEW_LINE> for line in data.split("\n"): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if not line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if '(hex)' in line: <NEW_LINE> <INDENT> record['idx'] = self._value <NEW_LINE> record['org'] = line.split(None, 2)[2] <NEW_LINE> record['oui'] = str(self) <NEW_LINE> <DEDENT> elif '(base 16)' in line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> record['address'].append(line) <NEW_LINE> <DEDENT> <DEDENT> self.records.append(record) <NEW_LINE> <DEDENT> @property <NEW_LINE> def reg_count(self): <NEW_LINE> <INDENT> return len(self.records) <NEW_LINE> <DEDENT> def registration(self, index=0): <NEW_LINE> <INDENT> return DictDotLookup(self.records[index]) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> int_val = self._value <NEW_LINE> return "%02X-%02X-%02X" % ( (int_val >> 16) & 0xff, (int_val >> 8) & 0xff, int_val & 0xff) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "OUI('%s')" % self
An individual IEEE OUI (Organisationally Unique Identifier). For online details see - http://standards.ieee.org/regauth/oui/
625990289b70327d1c57fd0c
class Monster: <NEW_LINE> <INDENT> def __init__(self,name: str, health: int,dam_low: int,dam_high: int,defense: int,speed: int): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._health = health <NEW_LINE> self._cur_health = health <NEW_LINE> self._dam_low = dam_low <NEW_LINE> self._dam_high = dam_high <NEW_LINE> self._defense = defense <NEW_LINE> self._speed = speed <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = '' <NEW_LINE> s += 'Name: ' + self._name + '\n' <NEW_LINE> s += 'Max Health: ' + str(self._health) + '\n' <NEW_LINE> s += 'Current Health: ' + str(self._cur_health) + '\n' <NEW_LINE> s += 'Damage: ' + str(self._dam_low) + ' - ' + str(self._dam_high) + '\n' <NEW_LINE> s += 'Defense: ' + str(self._defense) + '\n' <NEW_LINE> s += 'Speed: ' + str(self._speed) + '\n' <NEW_LINE> return s <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def get_max_health(self): <NEW_LINE> <INDENT> return self._health <NEW_LINE> <DEDENT> def get_cur_health(self): <NEW_LINE> <INDENT> return self._cur_health <NEW_LINE> <DEDENT> def change_health(self,amt:int): <NEW_LINE> <INDENT> self._cur_health += amt <NEW_LINE> if self._cur_health <= 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif self._cur_health > self._health: <NEW_LINE> <INDENT> self._cur_health = self._health <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def take_damage(self,amt:int): <NEW_LINE> <INDENT> dam = self._defense - amt <NEW_LINE> if dam > 0: <NEW_LINE> <INDENT> dam = 0 <NEW_LINE> <DEDENT> self._cur_health += dam <NEW_LINE> if self._cur_health <= 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def get_low(self): <NEW_LINE> <INDENT> return self._dam_low <NEW_LINE> <DEDENT> def get_high(self): <NEW_LINE> <INDENT> return self._dam_high <NEW_LINE> <DEDENT> def get_def(self): <NEW_LINE> <INDENT> return self._defense <NEW_LINE> <DEDENT> def get_speed(self): <NEW_LINE> <INDENT> return self._speed <NEW_LINE> <DEDENT> def attack(self): <NEW_LINE> <INDENT> dam = random.randint(self.get_low(),self.get_high()) <NEW_LINE> return dam
Defines a mosnter class
62599028c432627299fa3f7d
class PolicyNetworkParams: <NEW_LINE> <INDENT> UNSUPPORTED = ["obs_size"] <NEW_LINE> DEPRECATED = ["end_learning_rate", "decay_steps", "decay_power"] <NEW_LINE> def __init__(self, hidden_size, dropout_rate, l2_reg_coef, dense_size, attention_mechanism, network_parameters): <NEW_LINE> <INDENT> self.hidden_size = hidden_size <NEW_LINE> self.dropout_rate = dropout_rate <NEW_LINE> self.l2_reg_coef = l2_reg_coef <NEW_LINE> self.dense_size = dense_size <NEW_LINE> self.attention_mechanism = attention_mechanism <NEW_LINE> self.network_parameters = network_parameters or {} <NEW_LINE> self.log_deprecated_params(self.network_parameters.keys()) <NEW_LINE> <DEDENT> def get_hidden_size(self): <NEW_LINE> <INDENT> return self.network_parameters.get("hidden_size", self.hidden_size) <NEW_LINE> <DEDENT> def get_action_size(self): <NEW_LINE> <INDENT> return self.network_parameters.get("action_size") <NEW_LINE> <DEDENT> def get_dropout_rate(self): <NEW_LINE> <INDENT> return self.network_parameters.get("dropout_rate", self.dropout_rate) <NEW_LINE> <DEDENT> def get_l2_reg_coef(self): <NEW_LINE> <INDENT> return self.network_parameters.get("l2_reg_coef", self.l2_reg_coef) <NEW_LINE> <DEDENT> def get_dense_size(self): <NEW_LINE> <INDENT> return self.network_parameters.get("dense_size", self.dense_size) or self.hidden_size <NEW_LINE> <DEDENT> def get_learning_rate(self): <NEW_LINE> <INDENT> return self.network_parameters.get("learning_rate", None) <NEW_LINE> <DEDENT> def get_attn_params(self): <NEW_LINE> <INDENT> return self.network_parameters.get('attention_mechanism', self.attention_mechanism) <NEW_LINE> <DEDENT> def log_deprecated_params(self, network_parameters): <NEW_LINE> <INDENT> if any(p in network_parameters for p in self.DEPRECATED): <NEW_LINE> <INDENT> log.warning(f"parameters {self.DEPRECATED} are deprecated," f" for learning rate schedule documentation see" f" deeppavlov.core.models.lr_scheduled_tf_model" f" or read a github tutorial on super convergence.")
The class to deal with the overcomplicated structure of the GO-bot configs. It is initialized from the config-as-is and performs all the conflicting parameters resolution internally.
6259902830c21e258be997a1
class PythonRequirement: <NEW_LINE> <INDENT> def __init__(self, requirement, name=None, repository=None, use_2to3=False, compatibility=None): <NEW_LINE> <INDENT> self._requirement = Requirement.parse(requirement) <NEW_LINE> self._repository = repository <NEW_LINE> self._name = name or self._requirement.project_name <NEW_LINE> self._use_2to3 = use_2to3 <NEW_LINE> self.compatibility = compatibility or [''] <NEW_LINE> <DEDENT> def should_build(self, python, platform): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def use_2to3(self): <NEW_LINE> <INDENT> return self._use_2to3 <NEW_LINE> <DEDENT> @property <NEW_LINE> def repository(self): <NEW_LINE> <INDENT> return self._repository <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return self._requirement.key <NEW_LINE> <DEDENT> @property <NEW_LINE> def extras(self): <NEW_LINE> <INDENT> return self._requirement.extras <NEW_LINE> <DEDENT> @property <NEW_LINE> def specs(self): <NEW_LINE> <INDENT> return self._requirement.specs <NEW_LINE> <DEDENT> @property <NEW_LINE> def project_name(self): <NEW_LINE> <INDENT> return self._requirement.project_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def requirement(self): <NEW_LINE> <INDENT> return self._requirement <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self._requirement <NEW_LINE> <DEDENT> def cache_key(self): <NEW_LINE> <INDENT> return str(self._requirement) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'PythonRequirement({})'.format(self._requirement)
Pants wrapper around pkg_resources.Requirement Describes an external dependency as understood by ``easy_install`` or ``pip``. It takes a single non-keyword argument of the `Requirement`-style string, e.g. :: python_requirement('django-celery') python_requirement('tornado==2.2') python_requirement('kombu>=2.1.1,<3.0') Pants resolves the dependency *and its transitive closure*. For example, `django-celery` pulls also pulls down its dependencies: `celery>=2.5.1`, `django-picklefield>=0.2.0`, `ordereddict`, `python-dateutil`, `kombu>=2.1.1,<3.0`, `anyjson>=0.3.1`, `importlib`, and `amqplib>=1.0`. To let other Targets depend on this ``python_requirement``, put it in a `python_requirement_library <#python_requirement_library>`_. :API: public
62599028711fe17d825e145f
class MyObject: <NEW_LINE> <INDENT> CLASS_ATTRIBUTE = 'some value' <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return 'MyObject({})'.format(self.name) <NEW_LINE> <DEDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name
Example for dis.
625990288e05c05ec3f6f621
class PhylipWriter(SequentialAlignmentWriter): <NEW_LINE> <INDENT> def write_alignment(self, alignment, id_width=_PHYLIP_ID_WIDTH): <NEW_LINE> <INDENT> handle = self.handle <NEW_LINE> if len(alignment) == 0: <NEW_LINE> <INDENT> raise ValueError("Must have at least one sequence") <NEW_LINE> <DEDENT> length_of_seqs = alignment.get_alignment_length() <NEW_LINE> for record in alignment: <NEW_LINE> <INDENT> if length_of_seqs != len(record.seq): <NEW_LINE> <INDENT> raise ValueError("Sequences must all be the same length") <NEW_LINE> <DEDENT> <DEDENT> if length_of_seqs <= 0: <NEW_LINE> <INDENT> raise ValueError("Non-empty sequences are required") <NEW_LINE> <DEDENT> names = [] <NEW_LINE> seqs = [] <NEW_LINE> for record in alignment: <NEW_LINE> <INDENT> name = record.id.strip() <NEW_LINE> for char in "[](),": <NEW_LINE> <INDENT> name = name.replace(char, "") <NEW_LINE> <DEDENT> for char in ":;": <NEW_LINE> <INDENT> name = name.replace(char, "|") <NEW_LINE> <DEDENT> name = name[:id_width] <NEW_LINE> if name in names: <NEW_LINE> <INDENT> raise ValueError("Repeated name %r (originally %r), " "possibly due to truncation" % (name, record.id)) <NEW_LINE> <DEDENT> names.append(name) <NEW_LINE> sequence = str(record.seq) <NEW_LINE> if "." in sequence: <NEW_LINE> <INDENT> raise ValueError(_NO_DOTS) <NEW_LINE> <DEDENT> seqs.append(sequence) <NEW_LINE> <DEDENT> handle.write(" %i %s\n" % (len(alignment), length_of_seqs)) <NEW_LINE> block = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> for name, sequence in zip(names, seqs): <NEW_LINE> <INDENT> if block == 0: <NEW_LINE> <INDENT> handle.write(name[:id_width].ljust(id_width)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> handle.write(" " * id_width) <NEW_LINE> <DEDENT> for chunk in range(0, 5): <NEW_LINE> <INDENT> i = block * 50 + chunk * 10 <NEW_LINE> seq_segment = sequence[i:i + 10] <NEW_LINE> handle.write(" %s" % seq_segment) <NEW_LINE> if i + 10 > length_of_seqs: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> handle.write("\n") <NEW_LINE> <DEDENT> block += 1 <NEW_LINE> if block * 50 >= length_of_seqs: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> handle.write("\n")
Phylip alignment writer.
6259902866673b3332c31379
class AssumableEmailOrUsernameModelBackend(ModelBackend): <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> if '@' in username: <NEW_LINE> <INDENT> kwargs = {'email': username} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs = {'username': username} <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(**kwargs) <NEW_LINE> if password is None: <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> elif user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
Custom authentication backend that allows logging in using either username or email address, and allows authentication without a password. IMPORTANT: This backend assumes that the credentials have already been verified if a password is not passed to authenticate(), so all calls to that method should be done very carefully.
625990286e29344779b015db
class Meter(object): <NEW_LINE> <INDENT> def reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def add(self, value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> pass
Meters provide a way to keep track of important statistics in an online manner. This class is abstract, but provides a standard interface for all meters to follow.
62599028a8ecb033258721a9
class Rotacionar_90Y3(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.rotate_90y3" <NEW_LINE> bl_label = "-{}º y_3".format(R) <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.active_object is not None <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> rotate(context, 'y_3', -R) <NEW_LINE> return {'FINISHED'}
Operator 'rotate'. Rotaciona o grupo y_3 -90 graus.
625990281f5feb6acb163b7b
class SetTopicAttributesResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
A ResultSet with methods tailored to the values returned by the SetTopicAttributes Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
6259902863f4b57ef0086538
class LinearDataGenerator: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def generateLinearInput(intercept, weights, xMean, xVariance, nPoints, seed, eps): <NEW_LINE> <INDENT> weights = [float(weight) for weight in weights] <NEW_LINE> xMean = [float(mean) for mean in xMean] <NEW_LINE> xVariance = [float(var) for var in xVariance] <NEW_LINE> return list( callMLlibFunc( "generateLinearInputWrapper", float(intercept), weights, xMean, xVariance, int(nPoints), int(seed), float(eps), ) ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @since("1.5.0") <NEW_LINE> def generateLinearRDD(sc, nexamples, nfeatures, eps, nParts=2, intercept=0.0): <NEW_LINE> <INDENT> return callMLlibFunc( "generateLinearRDDWrapper", sc, int(nexamples), int(nfeatures), float(eps), int(nParts), float(intercept), )
Utils for generating linear data. .. versionadded:: 1.5.0
625990289b70327d1c57fd0e
class OlnDetectionGenerator(GenericDetectionGenerator): <NEW_LINE> <INDENT> def __call__(self, box_outputs, class_outputs, anchor_boxes, image_shape, is_single_fg_score=False, keep_nms=True): <NEW_LINE> <INDENT> if is_single_fg_score: <NEW_LINE> <INDENT> dummy_bg_scores = tf.zeros_like(class_outputs) <NEW_LINE> class_outputs = tf.stack([dummy_bg_scores, class_outputs], -1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> class_outputs = tf.nn.softmax(class_outputs, axis=-1) <NEW_LINE> <DEDENT> class_outputs_shape = tf.shape(class_outputs) <NEW_LINE> batch_size = class_outputs_shape[0] <NEW_LINE> num_locations = class_outputs_shape[1] <NEW_LINE> num_classes = class_outputs_shape[-1] <NEW_LINE> num_detections = num_locations * (num_classes - 1) <NEW_LINE> class_outputs = tf.slice(class_outputs, [0, 0, 1], [-1, -1, -1]) <NEW_LINE> box_outputs = tf.reshape( box_outputs, tf.stack([batch_size, num_locations, num_classes, 4], axis=-1)) <NEW_LINE> box_outputs = tf.slice(box_outputs, [0, 0, 1, 0], [-1, -1, -1, -1]) <NEW_LINE> anchor_boxes = tf.tile( tf.expand_dims(anchor_boxes, axis=2), [1, 1, num_classes - 1, 1]) <NEW_LINE> box_outputs = tf.reshape(box_outputs, tf.stack([batch_size, num_detections, 4], axis=-1)) <NEW_LINE> anchor_boxes = tf.reshape( anchor_boxes, tf.stack([batch_size, num_detections, 4], axis=-1)) <NEW_LINE> decoded_boxes = box_utils.decode_boxes( box_outputs, anchor_boxes, weights=[10.0, 10.0, 5.0, 5.0]) <NEW_LINE> decoded_boxes = box_utils.clip_boxes(decoded_boxes, image_shape) <NEW_LINE> decoded_boxes = tf.reshape( decoded_boxes, tf.stack([batch_size, num_locations, num_classes - 1, 4], axis=-1)) <NEW_LINE> if keep_nms: <NEW_LINE> <INDENT> nmsed_boxes, nmsed_scores, nmsed_classes, valid_detections = ( self._generate_detections(decoded_boxes, class_outputs)) <NEW_LINE> nmsed_classes += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nmsed_boxes = decoded_boxes[:, :, 0, :] <NEW_LINE> nmsed_scores = class_outputs[:, :, 0] <NEW_LINE> nmsed_classes = tf.cast(tf.ones_like(nmsed_scores), tf.int32) <NEW_LINE> valid_detections = tf.cast( tf.reduce_sum(tf.ones_like(nmsed_scores), axis=-1), tf.int32) <NEW_LINE> <DEDENT> return nmsed_boxes, nmsed_scores, nmsed_classes, valid_detections
Generates the final detected boxes with scores and classes.
62599028c432627299fa3f7f
class Generation(SamlBase): <NEW_LINE> <INDENT> c_tag = 'Generation' <NEW_LINE> c_namespace = NAMESPACE <NEW_LINE> c_children = SamlBase.c_children.copy() <NEW_LINE> c_attributes = SamlBase.c_attributes.copy() <NEW_LINE> c_child_order = SamlBase.c_child_order[:] <NEW_LINE> c_cardinality = SamlBase.c_cardinality.copy() <NEW_LINE> c_attributes['mechanism'] = ('mechanism', 'None', True) <NEW_LINE> def __init__(self, mechanism=None, text=None, extension_elements=None, extension_attributes=None, ): <NEW_LINE> <INDENT> SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes, ) <NEW_LINE> self.mechanism = mechanism
The urn:oasis:names:tc:SAML:2.0:ac:classes:Password:Generation element
6259902830c21e258be997a2
class SubGame(GameState): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> GameState.__init__(self) <NEW_LINE> self.screen_state = GetScreen().copy()
A game state which will set up the screen with the same parameters as used earlier in the game. Should be the base subclass for most GameStates.
62599028711fe17d825e1460
class rect(rect_pt): <NEW_LINE> <INDENT> def __init__(self, x, y, width, height): <NEW_LINE> <INDENT> rect_pt.__init__(self, unit.topt(x), unit.topt(y), unit.topt(width), unit.topt(height))
rectangle at position (x,y) with width and height
6259902821bff66bcd723bf0
class RESTError(Exception): <NEW_LINE> <INDENT> http_code = None <NEW_LINE> app_code = None <NEW_LINE> message = None <NEW_LINE> info = None <NEW_LINE> errid = None <NEW_LINE> errobj = None <NEW_LINE> trace = None <NEW_LINE> def __init__(self, info = None, errobj = None, trace = None): <NEW_LINE> <INDENT> self.errid = "%032x" % random.randrange(1 << 128) <NEW_LINE> self.errobj = errobj <NEW_LINE> self.info = info <NEW_LINE> self.trace = trace <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s %s [HTTP %d, APP %d, MSG %s, INFO %s, ERR %s]" % (self.__class__.__name__, self.errid, self.http_code, self.app_code, repr(self.message).replace("\n", " ~~ "), repr(self.info).replace("\n", " ~~ "), repr(self.errobj).replace("\n", " ~~ "))
Base class for REST errors. .. attribute:: http_code Integer, HTTP status code for this error. Also emitted as X-Error-HTTP header value. .. attribute:: app_code Integer, application error code, to be emitted as X-REST-Status header. .. attribute:: message String, information about the error, to be emitted as X-Error-Detail header. Should not contain anything sensitive, and in particular should never include any unvalidated or unsafe data, e.g. input parameters or data from a database. Normally a fixed label with one-to-one match with the :obj:`app-code`. If the text exceeds 200 characters, it's truncated. Since this is emitted as a HTTP header, it cannot contain newlines or anything encoding-dependent. .. attribute:: info String, additional information beyond :obj:`message`, to be emitted as X-Error-Info header. Like :obj:`message` should not contain anything sensitive or unsafe, or text inappropriate for a HTTP response header, and should be short enough to fit in 200 characters. This is normally free form text to clarify why the error happened. .. attribute:: errid String, random unique identifier for this error, to be emitted as X-Error-ID header and output into server logs when logging the error. The purpose is that clients save this id when they receive an error, and further error reporting or debugging can use this value to identify the specific error, and for example to grep logs for more information. .. attribute:: errobj If the problem was caused by another exception being raised in the code, reference to the original exception object. For example if the code dies with an :class:`KeyError`, this is the original exception object. This error is logged to the server logs when reporting the error, but no information about it is returned to the HTTP client. .. attribute:: trace The origin of the exception as returned by :func:`format_exc`. The full trace is emitted to the server logs, each line prefixed with timestamp. This information is not returned to the HTTP client.
62599028287bf620b6272b7d
class Feature(collections.namedtuple( "Feature", ["index", "name", "layer_set", "full_name", "scale", "type", "palette", "clip"])): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> dtypes = { 1: np.uint8, 8: np.uint8, 16: np.uint16, 32: np.int32, } <NEW_LINE> def unpack(self, obs): <NEW_LINE> <INDENT> planes = getattr(obs.feature_layer_data, self.layer_set) <NEW_LINE> plane = getattr(planes, self.name) <NEW_LINE> return self.unpack_layer(plane) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @sw.decorate <NEW_LINE> def unpack_layer(plane): <NEW_LINE> <INDENT> size = point.Point.build(plane.size) <NEW_LINE> if size == (0, 0): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> data = np.frombuffer(plane.data, dtype=Feature.dtypes[plane.bits_per_pixel]) <NEW_LINE> if plane.bits_per_pixel == 1: <NEW_LINE> <INDENT> data = np.unpackbits(data) <NEW_LINE> if data.shape[0] != size.x * size.y: <NEW_LINE> <INDENT> data = data[:size.x * size.y] <NEW_LINE> <DEDENT> <DEDENT> return data.reshape(size.y, size.x) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @sw.decorate <NEW_LINE> def unpack_rgb_image(plane): <NEW_LINE> <INDENT> assert plane.bits_per_pixel == 24, "{} != 24".format(plane.bits_per_pixel) <NEW_LINE> size = point.Point.build(plane.size) <NEW_LINE> data = np.frombuffer(plane.data, dtype=np.uint8) <NEW_LINE> return data.reshape(size.y, size.x, 3) <NEW_LINE> <DEDENT> @sw.decorate <NEW_LINE> def color(self, plane): <NEW_LINE> <INDENT> if self.clip: <NEW_LINE> <INDENT> plane = np.clip(plane, 0, self.scale - 1) <NEW_LINE> <DEDENT> return self.palette[plane]
Define properties of a feature layer. Attributes: index: Index of this layer into the set of layers. name: The name of the layer within the set. layer_set: Which set of feature layers to look at in the observation proto. full_name: The full name including for visualization. scale: Max value (+1) of this layer, used to scale the values. type: A FeatureType for scalar vs categorical. palette: A color palette for rendering. clip: Whether to clip the values for coloring.
62599028925a0f43d25e8fd5
class GatewayCollection(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'readonly': True}, 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[GatewayContract]'}, 'count': {'key': 'count', 'type': 'long'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, count: Optional[int] = None, **kwargs ): <NEW_LINE> <INDENT> super(GatewayCollection, self).__init__(**kwargs) <NEW_LINE> self.value = None <NEW_LINE> self.count = count <NEW_LINE> self.next_link = None
Paged Gateway list representation. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: Page values. :vartype value: list[~api_management_client.models.GatewayContract] :ivar count: Total record count number across all pages. :vartype count: long :ivar next_link: Next page link if any. :vartype next_link: str
62599028d99f1b3c44d06631
class SomAccountInvoicePendingError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> super(SomAccountInvoicePendingError, self).__init__(msg) <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__()
Base class for other exceptions
625990288e05c05ec3f6f623
class ML_phys(GenerativeModel): <NEW_LINE> <INDENT> def __init__(self, init_params, inf_params=None, X=T.matrix(), S=T.matrix(), P=T.matrix(), batch_size=1, rng=None): <NEW_LINE> <INDENT> super().__init__(init_params, inf_params, X, S, P, batch_size, rng) <NEW_LINE> self.type = 'ML_phys' <NEW_LINE> start = T.zeros([self.S.shape[0]]) <NEW_LINE> def one_step(s, C_tm1, D_tm1, delta_t, tau, eta, c0, kappa, gamma): <NEW_LINE> <INDENT> C = s + T.exp(-delta_t / tau) * C_tm1 <NEW_LINE> D = T.minimum(1/softp(gamma),(D_tm1 * T.maximum(0,1. - delta_t/softp(kappa)*(1+softp(gamma)*((softp(c0)+C)**eta - softp(c0)**eta))) + delta_t/softp(kappa)*((softp(c0)+C)**eta - softp(c0)**eta))) <NEW_LINE> return C, D <NEW_LINE> <DEDENT> C_D, updates = theano.scan(fn=one_step, sequences=[self.S.T], outputs_info=[start, start], non_sequences=[self.pars['delta_t'], self.pars['tau'], self.pars['eta'], self.pars['c0'], self.pars['kappa'], self.pars['gamma']]) <NEW_LINE> self.C = C_D[0].T <NEW_LINE> self.D = C_D[1].T <NEW_LINE> self.F = (softp(self.pars['alpha']) * self.D.T + self.pars['beta']).T <NEW_LINE> self.genfunc = theano.function([self.S, In(self.P, value=np.zeros([1, 1]).astype(config.floatX))], self.F, on_unused_input='ignore')
Nonlinear model as described in the paper, allows for facilitation and saturation. Sigmoid and softplus functions are used to ensure valid parameters. Otherwise training might crash. delta_t is the constant bin size, and is not trained. C_(t) = s + exp(δ/τ) * C_(t-1) D_(t) = minimum(1/softplus(γ),(D_(t-1) * maximum(0,1 - δ/softplus(kappa)*(1+softplus(γ)*((softplus(c0)+C_(t)) ^ η - softplus(c0) ^ η))) + δ/softplus(kappa)*((softplus(c0)+C_(t)) ^ η - softplus(c0) ^ η))) F_(t) = softplus(α) * D_(t) + β + sigmoid(σ)
62599028d18da76e235b7915
class AsyncDeviceStorage(RemoteDeviceBased): <NEW_LINE> <INDENT> ERROR_MSG_INSUFFICIENT_STORAGE = "INSTALL_FAILED_INSUFFICIENT_STORAGE" <NEW_LINE> def __init__(self, device: Device): <NEW_LINE> <INDENT> super(AsyncDeviceStorage, self).__init__(device) <NEW_LINE> self._ext_storage = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def external_storage_location(self) -> str: <NEW_LINE> <INDENT> return self.device.external_storage_location <NEW_LINE> <DEDENT> async def list(self, remote_path: str, timeout: float = Device.TIMEOUT_ADB_CMD) -> AsyncIterable[str]: <NEW_LINE> <INDENT> async with self.device.monitor_remote_cmd("shell", "ls", remote_path, include_stderr=False) as proc: <NEW_LINE> <INDENT> async for line in proc.output(unresponsive_timeout=timeout): <NEW_LINE> <INDENT> yield line <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def push(self, local_path: str, remote_path: str, timeout: Optional[float]=Device.TIMEOUT_LONG_ADB_CMD) -> None: <NEW_LINE> <INDENT> if not os.path.isfile(local_path): <NEW_LINE> <INDENT> raise FileNotFoundError("No such file found: %s" % local_path) <NEW_LINE> <DEDENT> await self.device.execute_remote_cmd_async('push', local_path, remote_path, timeout=timeout) <NEW_LINE> <DEDENT> async def pull(self, remote_path: str, local_path: str, run_as: Optional[str] = None) -> None: <NEW_LINE> <INDENT> if os.path.exists(local_path): <NEW_LINE> <INDENT> log.warning("File %s already exists when pulling. Potential to overwrite files.", local_path) <NEW_LINE> <DEDENT> if run_as: <NEW_LINE> <INDENT> with open(local_path, 'w') as out: <NEW_LINE> <INDENT> await self.device.execute_remote_cmd_async ('shell', 'run-as', run_as, 'cat', remote_path, stdout=out) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> await self.device.execute_remote_cmd_async('pull', remote_path, local_path) <NEW_LINE> <DEDENT> <DEDENT> async def make_dir(self, path: str, run_as: Optional[str] = None) -> None: <NEW_LINE> <INDENT> if run_as: <NEW_LINE> <INDENT> await self.device.execute_remote_cmd_async("shell", "run-as", run_as, "mkdir", "-p", path, timeout=Device.TIMEOUT_ADB_CMD) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await self.device.execute_remote_cmd_async("shell", "mkdir", "-p", path) <NEW_LINE> <DEDENT> <DEDENT> async def remove(self, path: str, recursive: bool = False, run_as: Optional[str] = None) -> None: <NEW_LINE> <INDENT> cmd = ["shell", "run-as", run_as, "rm"] if run_as else ["shell", "rm"] <NEW_LINE> if recursive: <NEW_LINE> <INDENT> cmd.append("-r") <NEW_LINE> <DEDENT> cmd.append(path) <NEW_LINE> await self.device.execute_remote_cmd_async(*cmd, timeout=Device.TIMEOUT_LONG_ADB_CMD)
Class providing API to push, install and remove files and apps to a remote device :param device: which device Class providing API to push, push and pull files to a remote device
62599028796e427e5384f70c
class BaseCheckpointer(NonTransformableMixin, BaseStep): <NEW_LINE> <INDENT> def __init__( self, execution_mode: ExecutionMode ): <NEW_LINE> <INDENT> BaseStep.__init__(self) <NEW_LINE> self.execution_mode = execution_mode <NEW_LINE> <DEDENT> def is_for_execution_mode(self, execution_mode: ExecutionMode) -> bool: <NEW_LINE> <INDENT> if execution_mode == ExecutionMode.FIT: <NEW_LINE> <INDENT> return self.execution_mode in [ ExecutionMode.FIT, ExecutionMode.FIT_OR_FIT_TRANSFORM, ExecutionMode.FIT_OR_FIT_TRANSFORM_OR_TRANSFORM ] <NEW_LINE> <DEDENT> if execution_mode == ExecutionMode.FIT_TRANSFORM: <NEW_LINE> <INDENT> return self.execution_mode in [ ExecutionMode.FIT_TRANSFORM, ExecutionMode.FIT_OR_FIT_TRANSFORM, ExecutionMode.FIT_OR_FIT_TRANSFORM_OR_TRANSFORM ] <NEW_LINE> <DEDENT> if execution_mode == ExecutionMode.TRANSFORM: <NEW_LINE> <INDENT> return self.execution_mode in [ ExecutionMode.TRANSFORM, ExecutionMode.FIT_OR_FIT_TRANSFORM_OR_TRANSFORM ] <NEW_LINE> <DEDENT> return execution_mode == ExecutionMode.FIT_OR_FIT_TRANSFORM_OR_TRANSFORM <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def save_checkpoint(self, data_container: DataContainer, context: ExecutionContext) -> DataContainer: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def read_checkpoint(self, data_container: DataContainer, context: ExecutionContext) -> DataContainer: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def should_resume(self, data_container: DataContainer, context: ExecutionContext) -> bool: <NEW_LINE> <INDENT> raise NotImplementedError()
Base class to implement a step checkpoint or data container checkpoint. :class:`Checkpoint` uses many BaseCheckpointer to checkpoint both data container checkpoints, and step checkpoints. BaseCheckpointer has an execution mode so there could be different checkpoints for each execution mode (fit, fit_transform or transform). .. seealso:: :class:`Checkpoint`
62599028d10714528d69ee54
class ServerStatus: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.tried_to_launch = False
What is the status of our current attempt at launching the server
625990288a349b6b436871c8
class Tween(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _checkRange(n): <NEW_LINE> <INDENT> if not 0.0 <= n <= 1.0: <NEW_LINE> <INDENT> raise ValueError('Argument must be between 0.0 and 1.0.') <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _split_values(*values, floor=0, ceiling=1): <NEW_LINE> <INDENT> values = tuple(values) <NEW_LINE> assert all(True for value in values if floor < value < ceiling), f'all values must be between {floor} and {ceiling}' <NEW_LINE> values_a = (floor,) + values <NEW_LINE> values_b = values + (ceiling,) <NEW_LINE> return tuple(zip(values_a, values_b)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _reframe_value(n, min=0, max=1): <NEW_LINE> <INDENT> return min + ((max - min) * n) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _reframe_func(tween_func, min=0, max=1): <NEW_LINE> <INDENT> return lambda n: tween_func(Timeline.Tween._reframe_value(n, min, max)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def tween_linear(n): <NEW_LINE> <INDENT> Timeline.Tween._checkRange(n) <NEW_LINE> return n <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def tween_invert(tween_func): <NEW_LINE> <INDENT> def _tween_invert(n): <NEW_LINE> <INDENT> return tween_func(1 - n) <NEW_LINE> <DEDENT> _tween_invert.inverted = True <NEW_LINE> _tween_invert.tween_func = tween_func <NEW_LINE> return _tween_invert <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def tween_progress_split(tween_func, *split_values): <NEW_LINE> <INDENT> return ( Timeline.Tween._reframe_func(tween_func, _low, _high) for _low, _high in Timeline.Tween._split_values(*split_values) ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def tween_step(num_steps): <NEW_LINE> <INDENT> increment = 1 / num_steps <NEW_LINE> def _tween_step(n): <NEW_LINE> <INDENT> Timeline.Tween._checkRange(n) <NEW_LINE> return (n // increment) * increment <NEW_LINE> <DEDENT> return _tween_step
These tween functions support tween function modification. The tween functions themselves can be imported from https://github.com/asweigart/pytweening/blob/master/pytweening/__init__.py A handy visualisation of tween can be found at https://easings.net/en
62599028c432627299fa3f83
class HandShakeFailed(Exception): <NEW_LINE> <INDENT> pass
Raised when the handshake fails
6259902821a7993f00c66f0e
class Cell(object): <NEW_LINE> <INDENT> def __init__(self, vertices): <NEW_LINE> <INDENT> self.vertices = vertices <NEW_LINE> self.index = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def pos(self): <NEW_LINE> <INDENT> position = self.vertices.sum(axis=0)/8. <NEW_LINE> return position <NEW_LINE> <DEDENT> @property <NEW_LINE> def vol(self): <NEW_LINE> <INDENT> vertices = self.vertices <NEW_LINE> pos = self.pos <NEW_LINE> def _get_volume(a, b, c): <NEW_LINE> <INDENT> return abs( np.dot( (vertices[a] - pos), np.cross( (vertices[b] - pos), (vertices[c] - pos) ) ) )/6. <NEW_LINE> <DEDENT> volume = 0. <NEW_LINE> for i in range(12): <NEW_LINE> <INDENT> volume += _get_volume(indexA[i], indexB[i], indexC[i]) <NEW_LINE> <DEDENT> return volume <NEW_LINE> <DEDENT> def dx(self, variableName, mesh): <NEW_LINE> <INDENT> pass
A container of cell info @vertices is a np.array with 8 vertices and self-explanatory. Each vertex of @vertices is the coordinate.
62599028ac7a0e7691f7347b
class SkyPositionTest(unittest.TestCase): <NEW_LINE> <INDENT> pass
docstring
62599028507cdc57c63a5d3a
class NapRequest(object): <NEW_LINE> <INDENT> VALID_HTTP_METHODS = ('GET', 'POST', 'PUT', 'DELETE', 'PATCH') <NEW_LINE> def __init__(self, method, url, data=None, headers=None, auth=None, *args, **kwargs): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self.url = url <NEW_LINE> self.data = data <NEW_LINE> if not headers: <NEW_LINE> <INDENT> headers = {} <NEW_LINE> <DEDENT> self.headers = headers <NEW_LINE> self.auth = auth <NEW_LINE> self.extra_args = args <NEW_LINE> self.extra_kwargs = kwargs <NEW_LINE> <DEDENT> @property <NEW_LINE> def method(self): <NEW_LINE> <INDENT> return self._method <NEW_LINE> <DEDENT> @method.setter <NEW_LINE> def method(self, value): <NEW_LINE> <INDENT> http_method = value.upper() <NEW_LINE> if http_method not in self.VALID_HTTP_METHODS: <NEW_LINE> <INDENT> raise ValueError("Invalid method") <NEW_LINE> <DEDENT> self._method = value <NEW_LINE> <DEDENT> def send(self): <NEW_LINE> <INDENT> response = requests.request(self.method, self.url, data=self.data, headers=self.headers, auth=self.auth, *self.extra_args, **self.extra_kwargs) <NEW_LINE> return response
A request to send to a nap-modeled API. Primarily used internally within ResourceModel methods.
625990285166f23b2e244369
class Out: <NEW_LINE> <INDENT> def __init__ (self, write_function = None, flush_function = None): <NEW_LINE> <INDENT> if write_function is None: <NEW_LINE> <INDENT> self.write = lambda *args: None <NEW_LINE> <DEDENT> elif not hasattr(write_function, '__call__'): <NEW_LINE> <INDENT> raise TypeError('argument must be callable') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write = write_function <NEW_LINE> <DEDENT> if flush_function is None: <NEW_LINE> <INDENT> self.flush = lambda *args: None <NEW_LINE> <DEDENT> elif not hasattr(flush_function, '__call__'): <NEW_LINE> <INDENT> raise TypeError('argument must be callable') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.flush = flush_function
A generic .write-able class. Pass a function to act as a write method. This is useful for, for example, piping stuff that gets sent to sys.stdout to your own write function. If no argument is given, write does nothing. A function to act as a flush method can be passed as a second argument; if it is not, Out.flush() does nothing.
62599028925a0f43d25e8fdb
class TestRequestHandler(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.addr = ('0.0.0.0', 9999) <NEW_LINE> self.real_do_GET = SimpleHTTPRequestHandler.do_GET <NEW_LINE> self.real_setup = RewritingHTTPRequestHandler.setup <NEW_LINE> self.real_handle = RewritingHTTPRequestHandler.handle <NEW_LINE> self.real_finish = RewritingHTTPRequestHandler.finish <NEW_LINE> SimpleHTTPRequestHandler.do_GET = fake_do_GET <NEW_LINE> RewritingHTTPRequestHandler.setup = noop <NEW_LINE> RewritingHTTPRequestHandler.handle = noop <NEW_LINE> RewritingHTTPRequestHandler.finish = noop <NEW_LINE> self.dir = os.getcwd() <NEW_LINE> self.tempdir = tempfile.mkdtemp() <NEW_LINE> os.chdir(self.tempdir) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> SimpleHTTPRequestHandler.do_GET = self.real_do_GET <NEW_LINE> RewritingHTTPRequestHandler.setup = self.real_setup <NEW_LINE> RewritingHTTPRequestHandler.handle = self.real_handle <NEW_LINE> RewritingHTTPRequestHandler.finish = self.real_finish <NEW_LINE> os.chdir(self.dir) <NEW_LINE> shutil.rmtree(self.tempdir) <NEW_LINE> <DEDENT> def test_valid_file(self): <NEW_LINE> <INDENT> with open('somefile.html', 'w'): <NEW_LINE> <INDENT> req = FakeRequest('/somefile.html') <NEW_LINE> handler = RewritingHTTPRequestHandler(req, self.addr, None) <NEW_LINE> handler.path = req.path <NEW_LINE> resp = handler.do_GET() <NEW_LINE> self.assertEqual('/somefile.html', resp) <NEW_LINE> <DEDENT> <DEDENT> def test_invalid_file(self): <NEW_LINE> <INDENT> req = FakeRequest('/missingfile.html') <NEW_LINE> handler = RewritingHTTPRequestHandler(req, self.addr, None) <NEW_LINE> handler.path = req.path <NEW_LINE> resp = handler.do_GET() <NEW_LINE> self.assertEqual('/index.html', resp) <NEW_LINE> <DEDENT> def test_valid_path(self): <NEW_LINE> <INDENT> subdir = tempfile.mkdtemp(dir=self.tempdir) <NEW_LINE> dirname = os.path.split(subdir)[-1] <NEW_LINE> fn = os.path.join(dirname, 'somefile.html') <NEW_LINE> with open(fn, 'w'): <NEW_LINE> <INDENT> path = '/' + fn <NEW_LINE> req = FakeRequest(path) <NEW_LINE> handler = RewritingHTTPRequestHandler(req, self.addr, None) <NEW_LINE> handler.path = req.path <NEW_LINE> resp = handler.do_GET() <NEW_LINE> self.assertEqual(path, resp) <NEW_LINE> <DEDENT> <DEDENT> def test_invalid_path(self): <NEW_LINE> <INDENT> subdir = tempfile.mkdtemp(dir=self.tempdir) <NEW_LINE> dirname = os.path.split(subdir)[-1] <NEW_LINE> fn = os.path.join(dirname, 'missingfile.html') <NEW_LINE> path = '/' + fn <NEW_LINE> req = FakeRequest(path) <NEW_LINE> handler = RewritingHTTPRequestHandler(req, self.addr, None) <NEW_LINE> handler.path = req.path <NEW_LINE> resp = handler.do_GET() <NEW_LINE> self.assertEqual('/index.html', resp)
The RewritingHTTPRequestHandler.
6259902873bcbd0ca4bcb225
class CourseSerializer(serializers.Serializer): <NEW_LINE> <INDENT> course_id = serializers.CharField()
Serializer for Course IDs
625990289b70327d1c57fd16
class ProductSelectView(generics.ListAPIView): <NEW_LINE> <INDENT> renderer_classes = (JSONRenderer, BrowsableAPIRenderer) <NEW_LINE> serializer_class = app_settings.PRODUCT_SELECT_SERIALIZER <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> term = self.request.GET.get('term', '') <NEW_LINE> if len(term) >= 2: <NEW_LINE> <INDENT> return ProductModel.objects.select_lookup(term)[:10] <NEW_LINE> <DEDENT> return ProductModel.objects.all()[:10]
A simple list view, which is used only by the admin backend. It is required to fetch the data for rendering the select widget when looking up for a product.
62599028d99f1b3c44d06637
class VpnClientConfiguration(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'vpn_client_address_pool': {'key': 'vpnClientAddressPool', 'type': 'AddressSpace'}, 'vpn_client_root_certificates': {'key': 'vpnClientRootCertificates', 'type': '[VpnClientRootCertificate]'}, 'vpn_client_revoked_certificates': {'key': 'vpnClientRevokedCertificates', 'type': '[VpnClientRevokedCertificate]'}, 'vpn_client_protocols': {'key': 'vpnClientProtocols', 'type': '[str]'}, 'vpn_client_ipsec_policies': {'key': 'vpnClientIpsecPolicies', 'type': '[IpsecPolicy]'}, 'radius_server_address': {'key': 'radiusServerAddress', 'type': 'str'}, 'radius_server_secret': {'key': 'radiusServerSecret', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(VpnClientConfiguration, self).__init__(**kwargs) <NEW_LINE> self.vpn_client_address_pool = kwargs.get('vpn_client_address_pool', None) <NEW_LINE> self.vpn_client_root_certificates = kwargs.get('vpn_client_root_certificates', None) <NEW_LINE> self.vpn_client_revoked_certificates = kwargs.get('vpn_client_revoked_certificates', None) <NEW_LINE> self.vpn_client_protocols = kwargs.get('vpn_client_protocols', None) <NEW_LINE> self.vpn_client_ipsec_policies = kwargs.get('vpn_client_ipsec_policies', None) <NEW_LINE> self.radius_server_address = kwargs.get('radius_server_address', None) <NEW_LINE> self.radius_server_secret = kwargs.get('radius_server_secret', None)
VpnClientConfiguration for P2S client. :param vpn_client_address_pool: The reference of the address space resource which represents Address space for P2S VpnClient. :type vpn_client_address_pool: ~azure.mgmt.network.v2018_08_01.models.AddressSpace :param vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway. :type vpn_client_root_certificates: list[~azure.mgmt.network.v2018_08_01.models.VpnClientRootCertificate] :param vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network gateway. :type vpn_client_revoked_certificates: list[~azure.mgmt.network.v2018_08_01.models.VpnClientRevokedCertificate] :param vpn_client_protocols: VpnClientProtocols for Virtual network gateway. :type vpn_client_protocols: list[str or ~azure.mgmt.network.v2018_08_01.models.VpnClientProtocol] :param vpn_client_ipsec_policies: VpnClientIpsecPolicies for virtual network gateway P2S client. :type vpn_client_ipsec_policies: list[~azure.mgmt.network.v2018_08_01.models.IpsecPolicy] :param radius_server_address: The radius server address property of the VirtualNetworkGateway resource for vpn client connection. :type radius_server_address: str :param radius_server_secret: The radius secret property of the VirtualNetworkGateway resource for vpn client connection. :type radius_server_secret: str
6259902866673b3332c31384
class Null(object): <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ''
Always different
62599028bf627c535bcb244b
class Measure: <NEW_LINE> <INDENT> def __init__(self, names): <NEW_LINE> <INDENT> self.res = {} <NEW_LINE> self.cnt = {} <NEW_LINE> for n in names: <NEW_LINE> <INDENT> self.res[n] = [0,0,0] <NEW_LINE> self.cnt[n] = [0,0] <NEW_LINE> <DEDENT> <DEDENT> def update_res(self, preds, c): <NEW_LINE> <INDENT> for p in preds: <NEW_LINE> <INDENT> if p == c: <NEW_LINE> <INDENT> if preds[p][0] == 1.0: <NEW_LINE> <INDENT> self.res[p][0] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.res[p][2] += 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if preds[p][0] == 1.0: <NEW_LINE> <INDENT> self.res[p][1] += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def update_count(self, res, cls): <NEW_LINE> <INDENT> if res == True: <NEW_LINE> <INDENT> self.cnt[cls][0] += 1 <NEW_LINE> <DEDENT> self.cnt[cls][1] += 1 <NEW_LINE> <DEDENT> def ds_counter(self, ds): <NEW_LINE> <INDENT> return self.cnt[ds] <NEW_LINE> <DEDENT> def all_counter(self): <NEW_LINE> <INDENT> c = 0 <NEW_LINE> t = 0 <NEW_LINE> for n in self.cnt: <NEW_LINE> <INDENT> c += self.cnt[n][0] <NEW_LINE> t += self.cnt[n][1] <NEW_LINE> <DEDENT> return [c, t] <NEW_LINE> <DEDENT> def fmeasure(self, p, r): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = 2.0 * (p * r) / (p + r) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> res = 0.0 <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def precision(self, res): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> prec = res[0] / float(res[0] + res[1]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> prec = 1.0 <NEW_LINE> <DEDENT> return prec <NEW_LINE> <DEDENT> def recall(self, res): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rec = res[0] / float(res[0] + res[2]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> rec = 0.0 <NEW_LINE> <DEDENT> return rec <NEW_LINE> <DEDENT> def svm_metrics(self, svm): <NEW_LINE> <INDENT> pr = self.precision(self.res[svm]) <NEW_LINE> re = self.recall(self.res[svm]) <NEW_LINE> fm = self.fmeasure(pr, re) <NEW_LINE> return (pr, re, fm) <NEW_LINE> <DEDENT> def all_metrics(self): <NEW_LINE> <INDENT> met = {} <NEW_LINE> for r in self.res: <NEW_LINE> <INDENT> met[r] = self.svm_metrics(r) <NEW_LINE> <DEDENT> return met <NEW_LINE> <DEDENT> def micro_average(self): <NEW_LINE> <INDENT> sums = [0,0,0] <NEW_LINE> for r in self.res: <NEW_LINE> <INDENT> for v in range(3): <NEW_LINE> <INDENT> sums[v] += self.res[r][v] <NEW_LINE> <DEDENT> <DEDENT> pr = self.precision(sums) <NEW_LINE> re = self.recall(sums) <NEW_LINE> fm = self.fmeasure(pr, re) <NEW_LINE> return (pr, re, fm)
Class which handles metrics computation names list of names of the svms
625990283eb6a72ae038b5fa
class LoginTask(Task): <NEW_LINE> <INDENT> def __init__(self, o, container, registries={}): <NEW_LINE> <INDENT> Task.__init__(self, o, container) <NEW_LINE> self._registries = registries <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> image = self.container.get_image_details() <NEW_LINE> if image['repository'].find('/') <= 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> registry, repo_name = image['repository'].split('/', 1) <NEW_LINE> if registry not in self._registries: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.o.reset() <NEW_LINE> self.o.pending('logging in to {}...'.format(registry)) <NEW_LINE> try: <NEW_LINE> <INDENT> self.container.ship.backend.login(**self._registries[registry]) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise exceptions.OrchestrationException( 'Login to {} failed: {}'.format(registry, e))
Log in with the registry hosting the image a container is based on. Extracts the registry name from the image needed for the container, and if authentication data is provided for that registry, login to it so a subsequent pull operation can be performed.
6259902826238365f5fadae7
class EggPMExtensionFactory(ExtensionFactory): <NEW_LINE> <INDENT> def build(self, extensionPath, moduleName, className, pmExtensionName): <NEW_LINE> <INDENT> return EggPMExtension(extensionPath, moduleName, className, pmExtensionName)
Simple factory class for EggPMExtension objects
62599028d18da76e235b7919
@BACKBONES.register_module() <NEW_LINE> class ResNeXt(ResNet): <NEW_LINE> <INDENT> arch_settings = { 50: (Bottleneck, (3, 4, 6, 3)), 101: (Bottleneck, (3, 4, 23, 3)), 152: (Bottleneck, (3, 8, 36, 3)) } <NEW_LINE> def __init__(self, groups=1, base_width=4, **kwargs): <NEW_LINE> <INDENT> self.groups = groups <NEW_LINE> self.base_width = base_width <NEW_LINE> super(ResNeXt, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def make_res_layer(self, **kwargs): <NEW_LINE> <INDENT> return ResLayer( groups=self.groups, base_width=self.base_width, base_channels=self.base_channels, **kwargs)
ResNeXt backbone. Args: depth (int): Depth of resnet, from {18, 34, 50, 101, 152}. in_channels (int): Number of input image channels. Default: 3. num_stages (int): Resnet stages. Default: 4. groups (int): Group of resnext. base_width (int): Base width of resnext. strides (Sequence[int]): Strides of the first block of each stage. dilations (Sequence[int]): Dilation of each stage. out_indices (Sequence[int]): Output from which stages. style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two layer is the 3x3 conv layer, otherwise the stride-two layer is the first 1x1 conv layer. frozen_stages (int): Stages to be frozen (all param fixed). -1 means not freezing any parameters. norm_cfg (dict): dictionary to construct and config norm layer. norm_eval (bool): Whether to set norm layers to eval mode, namely, freeze running stats (mean and var). Note: Effect on Batch Norm and its variants only. with_cp (bool): Use checkpoint or not. Using checkpoint will save some memory while slowing down the training speed. zero_init_residual (bool): whether to use zero init for last norm layer in resblocks to let them behave as identity.
62599028796e427e5384f714
class VoterErrors( Errors ): <NEW_LINE> <INDENT> def __init__( self ): <NEW_LINE> <INDENT> super().__init__()
Error objects which are raised when the Voter has done something which prevents her vote from being counted
625990286fece00bbaccc951
class SelectableFunctionText(uw.Text): <NEW_LINE> <INDENT> def __init__(self, name, activated, on_select): <NEW_LINE> <INDENT> markup = name <NEW_LINE> if activated: <NEW_LINE> <INDENT> markup += ' (active)' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> markup += ' (nopped)' <NEW_LINE> <DEDENT> super().__init__(markup, align=uw.CENTER) <NEW_LINE> self.name = name <NEW_LINE> self.on_select = on_select <NEW_LINE> <DEDENT> def selectable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def keypress(self, _, key): <NEW_LINE> <INDENT> if key == 'enter' or key == ' ': <NEW_LINE> <INDENT> self.on_select(self.name) <NEW_LINE> return None <NEW_LINE> <DEDENT> return key
Text that is selectable, works like a button but with a different appearance :ivar name: The name of the function :ivar on_select: Called when the text is selected
62599028ac7a0e7691f73481
class AclModelTest(BaseTest): <NEW_LINE> <INDENT> def test_change_permission(self): <NEW_LINE> <INDENT> for permission in ('read', 'write', 'delete'): <NEW_LINE> <INDENT> self.sketch1.grant_permission( user=self.user1, permission=permission) <NEW_LINE> self.assertTrue( self.sketch1.has_permission( user=self.user1, permission=permission)) <NEW_LINE> self.sketch1.revoke_permission( user=self.user1, permission=permission) <NEW_LINE> self.assertFalse( self.sketch1.has_permission( user=self.user1, permission=permission)) <NEW_LINE> <DEDENT> <DEDENT> def test_change_public(self): <NEW_LINE> <INDENT> self.sketch1.grant_permission(user=None, permission='read') <NEW_LINE> self.assertTrue(self.sketch1.is_public) <NEW_LINE> self.sketch1.revoke_permission(user=None, permission='read') <NEW_LINE> self.assertFalse(self.sketch1.is_public)
Test the ACL model.
625990280a366e3fb87dd980
class Actions(ActionsBase): <NEW_LINE> <INDENT> def prepare(self,**args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop(self,**kwargs): <NEW_LINE> <INDENT> if j.system.fs.exists('$(param.base)/var/run/asterisk/asterisk.ctl'): <NEW_LINE> <INDENT> j.do.execute('cd $(param.base)/sbin && ./asterisk -rx "core stop now" ')
process for install ------------------- step1: prepare actions step2: check_requirements action step3: download files & copy on right location (hrd info is used) step4: configure action step5: check_uptime_local to see if process stops (uses timeout $process.stop.timeout) step5b: if check uptime was true will do stop action and retry the check_uptime_local check step5c: if check uptime was true even after stop will do halt action and retry the check_uptime_local check step6: use the info in the hrd to start the application step7: do check_uptime_local to see if process starts step7b: do monitor_local to see if package healthy installed & running step7c: do monitor_remote to see if package healthy installed & running, but this time test is done from central location
62599028a4f1c619b294f58d
class UnitTestBaseAsync(unittest.IsolatedAsyncioTestCase): <NEW_LINE> <INDENT> async def asyncSetUp(self): <NEW_LINE> <INDENT> self.session = ClientSession() <NEW_LINE> self.rdy = RainCloudy(USERNAME, PASSWORD, self.session) <NEW_LINE> <DEDENT> async def asyncTearDown(self): <NEW_LINE> <INDENT> await self.session.close() <NEW_LINE> self.rdy = None <NEW_LINE> <DEDENT> def add_methods(self, mocked: aioresponses): <NEW_LINE> <INDENT> mocked.get( SETUP_ENDPOINT, status=200, body=load_fixture("setup.html"), content_type="text/html; charset=UTF-8", ) <NEW_LINE> mocked.get( LOGIN_ENDPOINT, status=200, body=load_fixture("home.html"), content_type="text/html; charset=UTF-8", ) <NEW_LINE> mocked.get( re.compile(rf"^{STATUS_ENDPOINT}*"), status=200, body=load_fixture("get_cu_and_fu_status.json"), ) <NEW_LINE> mocked.post( LOGIN_ENDPOINT, status=200, body=load_fixture("home.html"), content_type="text/html; charset=UTF-8", ) <NEW_LINE> mocked.get( HOME_ENDPOINT, status=200, body=load_fixture("home.html"), content_type="text/html; charset=UTF-8", ) <NEW_LINE> mocked.get(LOGOUT_ENDPOINT, status=200) <NEW_LINE> mocked.post(SETUP_ENDPOINT)
Top level test class for RainCloudy Core.
625990289b70327d1c57fd1a
class InvalidLocation(Exception): <NEW_LINE> <INDENT> pass
InvalidLocation Exception class.
62599028507cdc57c63a5d40
class SetBase(_ORMBase): <NEW_LINE> <INDENT> pass
Wrapper class for set element
62599028bf627c535bcb244f
class TestMigrationStat(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMigrationStat(self): <NEW_LINE> <INDENT> pass
MigrationStat unit test stubs
62599028287bf620b6272b89
class TestCall(mox.MoxTestBase): <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(dbus, 'SystemBus', use_mock_anything=True) <NEW_LINE> bus = self.mox.CreateMock(dbus.bus.BusConnection) <NEW_LINE> proxy = self.mox.CreateMockAnything() <NEW_LINE> method = self.mox.CreateMockAnything() <NEW_LINE> dbus.SystemBus().AndReturn(bus) <NEW_LINE> bus.get_object(privops.DBUS_BUS_NAME, privops.DBUS_OBJECT_PATH).AndReturn( proxy) <NEW_LINE> proxy.get_dbus_method( 'createNetwork', dbus_interface=privops.DBUS_INTERFACE).AndReturn( method) <NEW_LINE> method(['www.company.com', 'login.company.com']) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> privops.call('createNetwork', ['www.company.com', 'login.company.com'])
Test dispatching privileged operations.
62599028796e427e5384f716
class FeedbackSubjectOneOffJob(jobs.BaseMapReduceOneOffJobManager): <NEW_LINE> <INDENT> DEFAULT_SUBJECT = u'(Feedback from a learner)' <NEW_LINE> @classmethod <NEW_LINE> def entity_classes_to_map_over(cls): <NEW_LINE> <INDENT> return [feedback_models.FeedbackThreadModel] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def map(item): <NEW_LINE> <INDENT> if item.subject != FeedbackSubjectOneOffJob.DEFAULT_SUBJECT: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> first_message = feedback_services.get_message( item.exploration_id, item.thread_id, 0) <NEW_LINE> if not first_message.text: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if len(first_message.text) > constants.FEEDBACK_SUBJECT_MAX_CHAR_LIMIT: <NEW_LINE> <INDENT> updated_subject = first_message.text[ :constants.FEEDBACK_SUBJECT_MAX_CHAR_LIMIT] <NEW_LINE> if ' ' in updated_subject: <NEW_LINE> <INDENT> updated_subject = ' '.join(updated_subject.split(' ')[:-1]) <NEW_LINE> <DEDENT> item.subject = updated_subject + '...' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item.subject = first_message.text <NEW_LINE> <DEDENT> item.put(update_last_updated_time=False) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def reduce(key, value): <NEW_LINE> <INDENT> pass
One-off job for updating the feedback subject.
6259902863f4b57ef008653f
class SymConf(object): <NEW_LINE> <INDENT> dimension=1 <NEW_LINE> base =2 <NEW_LINE> basemax =2 <NEW_LINE> def __init__(self,val=0): <NEW_LINE> <INDENT> tmp=val <NEW_LINE> if isinstance(val,str): <NEW_LINE> <INDENT> tmp = int(val,SymConf.base) <NEW_LINE> <DEDENT> self.label,self.label_int,self.repetition = SymConf.get_representative(tmp) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return self.label <NEW_LINE> <DEDENT> def get_state(self,k): <NEW_LINE> <INDENT> return self.label[k] <NEW_LINE> <DEDENT> def get_dimension(self): <NEW_LINE> <INDENT> return SymConf.dimension <NEW_LINE> <DEDENT> def set(self,conf): <NEW_LINE> <INDENT> self.label = conf <NEW_LINE> return None <NEW_LINE> <DEDENT> def set_state(self,k,new_state): <NEW_LINE> <INDENT> self.label[k] = new_state <NEW_LINE> return self.label <NEW_LINE> <DEDENT> def get_integer(self): <NEW_LINE> <INDENT> return int(self.label,base) <NEW_LINE> <DEDENT> def get_repetition(self): <NEW_LINE> <INDENT> return self.repetition <NEW_LINE> <DEDENT> def get_count(self,state): <NEW_LINE> <INDENT> return self.label.count(state) <NEW_LINE> <DEDENT> def init_globals(base,N): <NEW_LINE> <INDENT> SymConf.base = base <NEW_LINE> SymConf.dimension = N <NEW_LINE> SymConf.basemax = base**N <NEW_LINE> <DEDENT> @Memoize <NEW_LINE> def get_representative(val,p=0): <NEW_LINE> <INDENT> if isinstance(val,str): <NEW_LINE> <INDENT> val = int(val,SymConf.base) <NEW_LINE> <DEDENT> rep=val <NEW_LINE> current = val <NEW_LINE> repetition=1 <NEW_LINE> for k in range(SymConf.dimension-1): <NEW_LINE> <INDENT> new = current * SymConf.base <NEW_LINE> shift,current = divmod(new,SymConf.basemax) <NEW_LINE> current = current + shift <NEW_LINE> if not (current > rep): <NEW_LINE> <INDENT> repetition = repetition + max(0, 1-abs(rep-current)) <NEW_LINE> rep = current <NEW_LINE> <DEDENT> <DEDENT> return gmpy2.digits(rep,SymConf.base).zfill(SymConf.dimension),rep,repetition <NEW_LINE> <DEDENT> def get_basis(p=0): <NEW_LINE> <INDENT> lookup = [] <NEW_LINE> for k in range(SymConf.basemax): <NEW_LINE> <INDENT> lookup.append( SymConf.get_representative(k,p)[1]) <NEW_LINE> <DEDENT> return [SymConf(x) for x in set(lookup)] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<SymConf %r>' % (self.label)
represents an unique network configuration. * Important: in this representation, states are limited up to maximum of 10 states.
625990289b70327d1c57fd1c
@admin.register(models.Room) <NEW_LINE> class RoomAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> inlines = (PhotoInline,) <NEW_LINE> fieldsets = ( ( "Basic Info", { "fields": ( "name", "description", "country", "city", "address", "price", "room_type", ) }, ), ("Times", {"fields": ("check_in", "check_out", "instant_book",)}), ("Spaces", {"fields": ("guests", "beds", "bedrooms", "baths",)}), ( "More About the Spaces", { "classes": ("collapse",), "fields": ("amenties", "facilities", "house_rules",), }, ), ("Last Detail", {"fields": ("host",)}), ) <NEW_LINE> list_display = ( "name", "country", "city", "price", "guests", "beds", "bedrooms", "baths", "check_in", "check_out", "instant_book", "count_amenties", "count_photos", "total_rating", ) <NEW_LINE> list_filter = ( "instant_book", "host__superhost", "room_type", "amenties", "facilities", "house_rules", "city", "country", ) <NEW_LINE> raw_id_fields = ("host",) <NEW_LINE> search_fields = ( "=city", "^host__username", ) <NEW_LINE> filter_horizontal = ( "amenties", "facilities", "house_rules", ) <NEW_LINE> def count_amenties(self, obj): <NEW_LINE> <INDENT> return obj.amenties.count() <NEW_LINE> <DEDENT> count_amenties.short_description = "Amenity Count" <NEW_LINE> def count_photos(self, obj): <NEW_LINE> <INDENT> return obj.photos.count() <NEW_LINE> <DEDENT> count_photos.short_description = "Photo Count"
Room Admin Definition
625990288a349b6b436871d2
class RiderViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> serializer_class = RiderModelSerializer <NEW_LINE> lookup_field = 'slugname' <NEW_LINE> search_fields = ( 'first_name', 'last_slugname', 'vehicle_made', 'vehicle_model', 'licence_plate', 'rider_address' ) <NEW_LINE> filter_fields = ('is_active', 'is_available') <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = Rider.objects.all() <NEW_LINE> if self.action == 'list': <NEW_LINE> <INDENT> queryset = ( queryset .filter(is_available=True) .filter(is_active=True) ) <NEW_LINE> return queryset <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> @action(detail=True, methods=['GET']) <NEW_LINE> def orders(self, request, *args, **kwargs): <NEW_LINE> <INDENT> rider = get_object_or_404(Rider, id=kwargs['slugname']) <NEW_LINE> orders_assigned = ( rider.order_set.all() .filter(picked_up=False) .filter(deliveried=False) ) <NEW_LINE> n = len(orders_assigned) <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> data = OrderModelSerializer(orders_assigned[i]).data <NEW_LINE> <DEDENT> return Response(data, status=status.HTTP_200_OK)
Riders view set. ################################################################################# Http methods and the URLs: GET /riders/ (list Riders) POST /riders/ (create Rider) PUT /riders/ (update Rider info) PATCH /riders/ (partial Rider Store info) GET /riders/<id>/orders/ (show orders to pickup) ######################################################################################
6259902891af0d3eaad3adc3
class RepoHandler(object): <NEW_LINE> <INDENT> def __init__(self, repo_url=None, temp_dir_path=None): <NEW_LINE> <INDENT> if repo_url is None and temp_dir_path is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if repo_url is None and temp_dir_path is not None: <NEW_LINE> <INDENT> self.set_repo(temp_dir_path) <NEW_LINE> return <NEW_LINE> <DEDENT> self.repo_url = repo_url <NEW_LINE> if temp_dir_path is None: <NEW_LINE> <INDENT> temp_dir_path = repo_url.rsplit('/', 1)[-1] <NEW_LINE> if temp_dir_path.endswith('.git'): <NEW_LINE> <INDENT> temp_dir_path = temp_dir_path.replace('.git', '') <NEW_LINE> <DEDENT> <DEDENT> if os.path.exists(temp_dir_path): <NEW_LINE> <INDENT> shutil.rmtree(temp_dir_path) <NEW_LINE> self.repo_path = temp_dir_path <NEW_LINE> <DEDENT> elif os.access(os.path.dirname(temp_dir_path), os.W_OK): <NEW_LINE> <INDENT> self.repo_path = temp_dir_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_path = '/'.join([os.getcwd(), temp_dir_path]) <NEW_LINE> if os.access(os.path.dirname(new_path), os.W_OK): <NEW_LINE> <INDENT> self.repo_path = new_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IOError('No permissions for:' + temp_dir_path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> return self.repo_path <NEW_LINE> <DEDENT> def set_repo(self, path): <NEW_LINE> <INDENT> self.repo_path = path <NEW_LINE> self.repo = Repo(self.repo_path) <NEW_LINE> <DEDENT> def clone_repo(self): <NEW_LINE> <INDENT> Repo.clone_from(self.repo_url, self.repo_path) <NEW_LINE> self.repo = Repo(self.repo_path) <NEW_LINE> assert not self.repo.bare <NEW_LINE> <DEDENT> def get_modified_files(self, prev_commits=1): <NEW_LINE> <INDENT> hcommit = self.repo.head.commit <NEW_LINE> results = hcommit.diff('HEAD~' + str(prev_commits)) <NEW_LINE> mod_added_files = [] <NEW_LINE> for result in results.iter_change_type('M'): <NEW_LINE> <INDENT> full_path = '/'.join([self.repo_path, result.a_path]) <NEW_LINE> file_descriptor = self.get_file_descriptor(full_path=full_path, change_type='M') <NEW_LINE> mod_added_files += [file_descriptor] <NEW_LINE> <DEDENT> for result in results.iter_change_type('A'): <NEW_LINE> <INDENT> full_path = '/'.join([self.repo_path, result.a_path]) <NEW_LINE> file_descriptor = self.get_file_descriptor(full_path=full_path, change_type='A') <NEW_LINE> mod_added_files += [file_descriptor] <NEW_LINE> <DEDENT> return mod_added_files <NEW_LINE> <DEDENT> def get_file_descriptor(self, full_path, change_type): <NEW_LINE> <INDENT> file_ext = os.path.splitext(full_path)[1] <NEW_LINE> file_descriptor = FileDesc(path=full_path, ext=file_ext, change_type=change_type) <NEW_LINE> return file_descriptor <NEW_LINE> <DEDENT> def delete_temp_dir(self): <NEW_LINE> <INDENT> shutil.rmtree(self.repo_path) <NEW_LINE> <DEDENT> def run_cmd(self, cmd): <NEW_LINE> <INDENT> pass
The main Repository Handler class. 1. Initializes the folder in which to store the cloned repo 2. Clones the repo 3. Gets the list of modified files in the repo_url as a list of file descriptor objects
62599028e76e3b2f99fd99a8
class Test_sqlite(B3TestCase, StorageAPITest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> B3TestCase.setUp(self) <NEW_LINE> self.storage = self.console.storage = DatabaseStorage('sqlite://'+SQLITE_DB, self.console) <NEW_LINE> self.storage.executeSql("@b3/sql/sqlite/b3.sql") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> B3TestCase.tearDown(self) <NEW_LINE> self.storage.shutdown()
NOTE: to work properly you must be running a MySQL database on localhost which must have a user named 'b3test' with password 'test' which has all privileges over a table (already created or not) named 'b3_test'
62599028796e427e5384f718
class GetUsersInfo(Resource): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.parser = reqparse.RequestParser() <NEW_LINE> self.parser.add_argument("nickname", type=str, default="") <NEW_LINE> pass <NEW_LINE> <DEDENT> @swagger.operation( notes="根据用户昵称获取用户信息", nickname="get", parameters=[ { "name": "nickname", "description": "用户昵称", "required": False, "dataType": "str", "paramType": "query", "defaultValue": None } ], responseMessages=[ { "code": 200, "message": "success" }, { "code": 404, "message": "No Such Articles" } ] ) <NEW_LINE> def get(self): <NEW_LINE> <INDENT> _args = self.parser.parse_args() <NEW_LINE> if not _args["nickname"]: <NEW_LINE> <INDENT> return {"code": 400, "msg": "请输入用户昵称!"} <NEW_LINE> <DEDENT> access_token = get_access_token() <NEW_LINE> info_url = WeChatConfig.Urls["user_info_url"] <NEW_LINE> user_url = WeChatConfig.Urls["openid_list_url"] <NEW_LINE> payload = { "access_token": access_token["access_token"], "next_openid": None } <NEW_LINE> response = requests.get(user_url, params=payload) <NEW_LINE> result = dict() <NEW_LINE> try: <NEW_LINE> <INDENT> users = response.json().get("data").get("openid") <NEW_LINE> data = {} <NEW_LINE> i = 1 <NEW_LINE> for item in users: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> payload = { "access_token": access_token["access_token"], "openid": item, "lang": "zh_CN" } <NEW_LINE> response = requests.get(info_url, params=payload) <NEW_LINE> user_info = response.json() <NEW_LINE> if user_info["nickname"] == _args["nickname"]: <NEW_LINE> <INDENT> data = user_info <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> result.update({"code": 200, "data": data}) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> result.update({"code": 400, "data": str(ex)}) <NEW_LINE> <DEDENT> if not result["data"]: <NEW_LINE> <INDENT> result["data"] = "不存在此用户" <NEW_LINE> <DEDENT> return result
# 获取关注用户的openid列表
62599028a8ecb033258721b9
class WebParserTest(UnittestPythonCompatibility): <NEW_LINE> <INDENT> web_file = os.path.join(FILEPATH, 'graph.web') <NEW_LINE> tempfiles = [] <NEW_LINE> def tearDown(self): <NEW_LINE> <INDENT> for tmp in self.tempfiles: <NEW_LINE> <INDENT> if os.path.exists(tmp): <NEW_LINE> <INDENT> os.remove(tmp) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_format_import(self): <NEW_LINE> <INDENT> graph = read_web(self.web_file, auto_parse_format=False) <NEW_LINE> self.assertEqual(len(graph), 694) <NEW_LINE> self.assertEqual(len(graph.edges), 1386) <NEW_LINE> self.assertEqual(graph.directed, False) <NEW_LINE> self.assertEqual(graph_directionality(graph), 'undirectional') <NEW_LINE> self.assertEqual(graph.root, 1) <NEW_LINE> self.assertTrue(isinstance(graph, GraphAxis)) <NEW_LINE> self.assertTrue(isinstance(graph.query_nodes({'key': 'ntrials'}).value, PY_STRING)) <NEW_LINE> self.assertTrue(isinstance(graph.query_nodes({'key': 'rotate180_0'}).value, PY_STRING)) <NEW_LINE> for node in graph.query_nodes({'key': 'activereslist'}): <NEW_LINE> <INDENT> self.assertTrue(isinstance(node.value, list)) <NEW_LINE> <DEDENT> for node in graph.query_nodes({'type': 'FloatArray'}): <NEW_LINE> <INDENT> self.assertTrue(all([isinstance(n, str) for n in node.get()])) <NEW_LINE> <DEDENT> <DEDENT> def test_format_import_autoformatparse(self): <NEW_LINE> <INDENT> graph = read_web(self.web_file) <NEW_LINE> self.assertTrue(isinstance(graph.query_nodes({'key': 'ntrials'}).value, int)) <NEW_LINE> self.assertTrue(isinstance(graph.query_nodes({'key': 'rotate180_0'}).value, bool)) <NEW_LINE> for node in graph.query_nodes({'key': 'activereslist'}): <NEW_LINE> <INDENT> self.assertTrue(isinstance(node.value, list)) <NEW_LINE> <DEDENT> for node in graph.query_nodes({'type': 'FloatArray'}): <NEW_LINE> <INDENT> self.assertTrue(all([isinstance(n, float) for n in node.get()])) <NEW_LINE> <DEDENT> <DEDENT> def test_format_import_orm(self): <NEW_LINE> <INDENT> web = GraphAxis() <NEW_LINE> web.orm.node_mapping.add(FloatArray, lambda x: x.get('type') == 'FloatArray') <NEW_LINE> web = read_web(self.web_file, graph=web) <NEW_LINE> for node in web.query_nodes({'type': 'FloatArray'}): <NEW_LINE> <INDENT> self.assertTrue(all([isinstance(n, str) for n in node.get()])) <NEW_LINE> <DEDENT> <DEDENT> def test_format_export(self): <NEW_LINE> <INDENT> graph = read_web(self.web_file) <NEW_LINE> web = write_web(graph) <NEW_LINE> outfile = os.path.join(FILEPATH, 'test_export.web') <NEW_LINE> with open(outfile, 'w') as otf: <NEW_LINE> <INDENT> otf.write(web) <NEW_LINE> self.tempfiles.append(outfile) <NEW_LINE> <DEDENT> self.assertTrue(os.path.isfile(outfile)) <NEW_LINE> graph1 = read_web(outfile) <NEW_LINE> self.assertEqual(len(graph), len(graph1)) <NEW_LINE> self.assertEqual(len(graph.edges), len(graph1.edges))
Unit tests for parsing Spider serialized data structures (.web format)
625990280a366e3fb87dd984
class SpecMeta(abc.ABCMeta): <NEW_LINE> <INDENT> def __init__(cls, name, bases, dct): <NEW_LINE> <INDENT> cls.validate_handler(dct) <NEW_LINE> SpecRegistry.register(cls) <NEW_LINE> super().__init__(name, bases, dct) <NEW_LINE> <DEDENT> def validate_handler(self, dct): <NEW_LINE> <INDENT> self.validate_handler_schema(dct) <NEW_LINE> <DEDENT> def validate_handler_schema(self, dct): <NEW_LINE> <INDENT> schema = dct.get('schema', None) <NEW_LINE> if isinstance(schema, str) and len(schema) > 0: <NEW_LINE> <INDENT> self._schema = schema <NEW_LINE> <DEDENT> elif isinstance(schema, list) and len(schema) > 0: <NEW_LINE> <INDENT> for v in schema: <NEW_LINE> <INDENT> if not (isinstance(v, str) and len(v) > 0): <NEW_LINE> <INDENT> raise SpecInvalidSchema() <NEW_LINE> <DEDENT> <DEDENT> self._schema = schema <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SpecInvalidSchema("Schema type or value error")
Meta class for all elements with schemas. This allows for registration, validation, and tracking of the schema implementors.
62599028a4f1c619b294f591
class _MandatoryValue: <NEW_LINE> <INDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return '<mandatory argument value>'
Use this as a default value for an Argument to indicate that the argument is mandatory: requests without the argument are not accepted.
62599028be8e80087fbc0016
class GitResponse(HttpResponse): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.service = Services(kwargs.pop('service', None)) <NEW_LINE> self.action = kwargs.pop('action', None) <NEW_LINE> self.repository = kwargs.pop('repository', None) <NEW_LINE> self.data = kwargs.pop('data', None) <NEW_LINE> super(GitResponse, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def set_response_header(self): <NEW_LINE> <INDENT> self.__setitem__('Expires', 'Fri, 01 Jan 1980 00:00:00 GMT') <NEW_LINE> self.__setitem__('Pragma', 'no-cache') <NEW_LINE> self.__setitem__('Cache-Control', 'no-cache, max-age=0, must-revalidate') <NEW_LINE> self.__setitem__('Content-Type', 'application/x-{0}-{1}'.format(self.service.value, self.action)) <NEW_LINE> <DEDENT> def set_response_first_line(self): <NEW_LINE> <INDENT> service = '# service={}\n'.format(self.service.value) <NEW_LINE> length = len(service) + 4 <NEW_LINE> prefix = "{:04x}".format(length & 0xFFFF) <NEW_LINE> self.write('{0}{1}0000'.format(prefix, service)) <NEW_LINE> <DEDENT> def set_response_payload(self, payload_type): <NEW_LINE> <INDENT> if payload_type == plumbing.git_info_refs: <NEW_LINE> <INDENT> process = subprocess.Popen([self.service.value, '--stateless-rpc', '--advertise-refs', self.repository], stdout=subprocess.PIPE) <NEW_LINE> self.write(process.stdout.read()) <NEW_LINE> <DEDENT> elif payload_type == plumbing.git_receive_pack: <NEW_LINE> <INDENT> process = subprocess.Popen(['git-receive-pack', '--stateless-rpc', self.repository], stdin=subprocess.PIPE, stdout=subprocess.PIPE) <NEW_LINE> self.write(process.communicate(input=self.data)[0]) <NEW_LINE> <DEDENT> elif payload_type == plumbing.git_upload_pack: <NEW_LINE> <INDENT> process = subprocess.Popen(['git-upload-pack', '--stateless-rpc', self.repository], stdin=subprocess.PIPE, stdout=subprocess.PIPE) <NEW_LINE> self.write(process.communicate(input=self.data)[0]) <NEW_LINE> <DEDENT> <DEDENT> def get_http_info_refs(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.set_response_header() <NEW_LINE> self.set_response_first_line() <NEW_LINE> self.set_response_payload(plumbing.git_info_refs) <NEW_LINE> return self <NEW_LINE> <DEDENT> except BaseException as e: <NEW_LINE> <INDENT> exc_type, exc_obj, exc_tb = sys.exc_info() <NEW_LINE> fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] <NEW_LINE> logger.info(exc_type, fname, exc_tb.tb_lineno) <NEW_LINE> return get_http_error(e) <NEW_LINE> <DEDENT> <DEDENT> def get_http_service_rpc(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.set_response_header() <NEW_LINE> if self.service == Services.git_receive_pack: <NEW_LINE> <INDENT> self.set_response_payload(plumbing.git_receive_pack) <NEW_LINE> <DEDENT> elif self.service == Services.git_upload_pack: <NEW_LINE> <INDENT> self.set_response_payload(plumbing.git_upload_pack) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> except BaseException as e: <NEW_LINE> <INDENT> return get_http_error(e)
An extension of Django's HttpResponse that meets Git's smart HTTP specs The responses to Git's requests must follow a protocol, and this class is meant to build properly formed responses. Attributes: service (str): the initiated git plumbing command action (str): the action initiated by the service repository (str): target repository of the request data (str): uploaded data
6259902830c21e258be997ab
class PlaceView(): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> res = Place.query.all() <NEW_LINE> return [{'id' : u.id, 'name' : u.name} for u in res] <NEW_LINE> <DEDENT> def post(self): <NEW_LINE> <INDENT> place = Place(name=request.form.get('name')) <NEW_LINE> place.insert() <NEW_LINE> return {'id' : place.id, 'name' : place.name}, 201 <NEW_LINE> <DEDENT> def delete(self, id_self): <NEW_LINE> <INDENT> place = Place.query.filter_by(id=id_self).first() <NEW_LINE> place.remove() <NEW_LINE> return "", 204 <NEW_LINE> <DEDENT> def put(self, id_self): <NEW_LINE> <INDENT> place = Place.query.filter_by(id=id_self).first() <NEW_LINE> if request.form.get('name') is None: <NEW_LINE> <INDENT> pl_name = place.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pl_name = request.form.get('name') <NEW_LINE> <DEDENT> place.update(pl_name) <NEW_LINE> return {'id' : place.id, 'name' : place.name}
CRUD Places
625990288c3a8732951f74f5
class ApiTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.url = apitestconfig.URL <NEW_LINE> self.headers = apitestconfig.HEADERS <NEW_LINE> self.get_params = apitestconfig.GET_PARAMS <NEW_LINE> self.post_data = apitestconfig.POST_DATA <NEW_LINE> <DEDENT> def test_get(self): <NEW_LINE> <INDENT> for params in self.get_params: <NEW_LINE> <INDENT> r = requests.get(self.url, params=params) <NEW_LINE> status = r.status_code <NEW_LINE> result = r.json() <NEW_LINE> print("GET", result) <NEW_LINE> self.assertEqual(status, 200) <NEW_LINE> <DEDENT> <DEDENT> def test_post(self): <NEW_LINE> <INDENT> headers = self.headers <NEW_LINE> for data in self.post_data: <NEW_LINE> <INDENT> r = requests.post( url=self.url, data=json.dumps(data), headers=headers ) <NEW_LINE> result = r.json() <NEW_LINE> status = r.status_code <NEW_LINE> print("POST:", result) <NEW_LINE> self.assertEqual(status, 200) <NEW_LINE> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass
XX接口自动化测试
625990281d351010ab8f4ab4
class DescribeAuditResultRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(DescribeAuditResultRequest, self).__init__( '/regions/{regionId}/instances/{instanceId}/audit:describeAuditResult', 'GET', header, version) <NEW_LINE> self.parameters = parameters
查询 Clickhouse audit列表信息
62599028bf627c535bcb2453
class Docfield: <NEW_LINE> <INDENT> def __init__(self, name, type_name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.type_name = type_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s : %s' % (self.name, self.type_name)
Python representing docfield in a frappe doctype
62599028c432627299fa3f8f
class User(MetaData): <NEW_LINE> <INDENT> email = EmailField(required=True, unique=True) <NEW_LINE> username = StringField(regex=r'[a-zA-Z0-9_-]+$', max_length=120, required=True, unique=True) <NEW_LINE> password = StringField(max_length=64, required=True) <NEW_LINE> is_superuser = BooleanField(default=False) <NEW_LINE> is_disabled = BooleanField(default=False) <NEW_LINE> meta = { 'collection': 'users', 'db_alias': 'makechat_test' if TEST_MODE else 'makechat', 'indexes': ['email', 'username', 'password'] } <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.username
Collection of users profiles.
625990298a349b6b436871d6