code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class RunningAverageMeter(object): <NEW_LINE> <INDENT> def __init__(self, momentum=0.99): <NEW_LINE> <INDENT> self.momentum = momentum <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.val = None <NEW_LINE> self.avg = 0 <NEW_LINE> <DEDENT> def update(self, val): <NEW_LINE> <INDENT> if(val is None): return <NEW_LINE> if self.val is None: <NEW_LINE> <INDENT> self.avg = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.avg = self.avg * self.momentum + val * (1 - self.momentum) <NEW_LINE> <DEDENT> self.val = val
|
Computes and stores the average and current value
|
62599026d164cc6175821ebb
|
class OpenPosition(CommonType): <NEW_LINE> <INDENT> def __init__(self, symbol: str, side: str, quantity: float, initial_price: float, unrealized_pl: float): <NEW_LINE> <INDENT> self.symbol = str(symbol) <NEW_LINE> self.side = str(side) <NEW_LINE> self.quantity = float(quantity) <NEW_LINE> self.initial_price = float(initial_price) <NEW_LINE> self.unrealized_pl = float(unrealized_pl)
|
OpenPosition is a glorified immutable dict for easy storage and lookup.
It is based on the "OpenPosition" struct in:
https://github.com/fund3/communication-protocol/blob/master/TradeMessage.capnp
|
62599026bf627c535bcb23fc
|
class CommsPlan: <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> self.session = aiohttp.ClientSession() <NEW_LINE> self.comms_plan_url ="https://docs.google.com/spreadsheets/d/1a63VD2WXmShIwpiTTfHuK-5yWKw-3AtXLbv1LBjMyCE" <NEW_LINE> self.comms_plan_export = self.comms_plan_url + "/export?exportFormat=csv" <NEW_LINE> <DEDENT> async def fetch_comms_plan(self): <NEW_LINE> <INDENT> resp = await self.session.get(self.comms_plan_export) <NEW_LINE> if (resp.status != 200): <NEW_LINE> <INDENT> await self.bot.say("Error getting comms plan. Check logs") <NEW_LINE> raise Exception("Could not get status") <NEW_LINE> <DEDENT> return await resp.text() <NEW_LINE> <DEDENT> def is_number(self, number): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> float(number) <NEW_LINE> return True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def parse_comms_plan(self, plan): <NEW_LINE> <INDENT> data = io.StringIO(plan) <NEW_LINE> reader = csv.reader(data) <NEW_LINE> comms_info = [] <NEW_LINE> for row in reader: <NEW_LINE> <INDENT> if row[1] and self.is_number(row[1]): <NEW_LINE> <INDENT> radio = {} <NEW_LINE> radio['freq'] = row[1] <NEW_LINE> radio['use'] = row[3] <NEW_LINE> comms_info.append(radio) <NEW_LINE> <DEDENT> <DEDENT> return comms_info <NEW_LINE> <DEDENT> async def respond_with_plan(self, plan): <NEW_LINE> <INDENT> message_template = "```Address: dcs.hoggitworld.com\n%s```%s" <NEW_LINE> plan_message = "" <NEW_LINE> for radio in plan: <NEW_LINE> <INDENT> plan_message += "%s: %s\n" % (radio['use'], radio['freq']) <NEW_LINE> <DEDENT> await self.bot.say(message_template % (plan_message, "Details: <%s>" % self.comms_plan_url)) <NEW_LINE> <DEDENT> @commands.group(pass_context=True, aliases=["srs"]) <NEW_LINE> async def print_comms_plan(self, ctx): <NEW_LINE> <INDENT> comms_plan = await self.fetch_comms_plan() <NEW_LINE> comms_plan = self.parse_comms_plan(comms_plan) <NEW_LINE> await self.respond_with_plan(comms_plan)
|
Fetches the comms plan from discord
|
6259902621bff66bcd723ba9
|
class AutomaticTuningOptionModeActual(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> OFF = "Off" <NEW_LINE> ON = "On"
|
Automatic tuning option actual state.
|
62599026d99f1b3c44d065e9
|
class _ProfileLRPool(object): <NEW_LINE> <INDENT> def __init__(self, max_size: int): <NEW_LINE> <INDENT> self._parameter_list: List[float] = [] <NEW_LINE> self._likelihood_list: List[float] = [] <NEW_LINE> self._model_list: List[modeling.Dataset] = [] <NEW_LINE> self.max_size: int = max_size <NEW_LINE> <DEDENT> @property <NEW_LINE> def full(self) -> bool: <NEW_LINE> <INDENT> return (len(self) >= self.max_size) <NEW_LINE> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> return len(self._parameter_list) <NEW_LINE> <DEDENT> def append(self, parameter: float, dataset: modeling.Dataset) -> None: <NEW_LINE> <INDENT> if parameter in self.parameter_list: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._parameter_list.append(parameter) <NEW_LINE> self._likelihood_list.append(dataset.stat_sum()) <NEW_LINE> self._model_list.append(dataset.models) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _index_list(self) -> List[int]: <NEW_LINE> <INDENT> return np.argsort(self._parameter_list) <NEW_LINE> <DEDENT> @property <NEW_LINE> def parameter_list(self) -> List[str]: <NEW_LINE> <INDENT> return np.array(self._parameter_list)[self._index_list] <NEW_LINE> <DEDENT> @property <NEW_LINE> def likelihood_list(self) -> List[float]: <NEW_LINE> <INDENT> return np.array(self._likelihood_list)[self._index_list] <NEW_LINE> <DEDENT> def ml_model(self) -> modeling.models.Models: <NEW_LINE> <INDENT> ml_index = np.argmin(self._likelihood_list) <NEW_LINE> ml_model = self._model_list[ml_index] <NEW_LINE> temp_model = [] <NEW_LINE> letters = string.ascii_uppercase + string.digits <NEW_LINE> random_string = "".join(random.choice(letters) for _ in range(10)) <NEW_LINE> for model in ml_model: <NEW_LINE> <INDENT> name = model.name.split("_A_")[0] <NEW_LINE> temp_model.append(model.copy(name=name + "_A_" + random_string)) <NEW_LINE> <DEDENT> return temp_model <NEW_LINE> <DEDENT> def closest_model(self, parameter: float) -> modeling.models.Models: <NEW_LINE> <INDENT> best_index = np.argmin( np.abs(np.array(self._parameter_list) - parameter)) <NEW_LINE> closest_model = self._model_list[best_index] <NEW_LINE> letters = string.ascii_uppercase + string.digits <NEW_LINE> random_string = "".join(random.choice(letters) for _ in range(10)) <NEW_LINE> temp_model = [] <NEW_LINE> for model in closest_model: <NEW_LINE> <INDENT> name = model.name.split("_A_")[0] <NEW_LINE> temp_model.append(model.copy(name=name + "_A_" + random_string)) <NEW_LINE> <DEDENT> closest_model = temp_model <NEW_LINE> return closest_model
|
Helper class to contain the profile likelihood values
and their corresponding models.
Attributes:
max_size: Maximal size of the pool. Limited to limit used memory.
full: True if the maximum size of the pool is reached.
parameter_list: List of parameters for the profile.
likelihood_list: List of maximum profile likelihood values.
Todo:
This class is useful to find a good start model for fits and
to perform plots of the profile likelihood function. However,
it is very memory inefficient. One way to improve it would be
to not save the complete list of models but only their parameters.
|
6259902656b00c62f0fb3806
|
class testMultiException_result(TBase): <NEW_LINE> <INDENT> def __init__(self, success=None, err1=None, err2=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.err1 = err1 <NEW_LINE> self.err2 = err2 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- success
- err1
- err2
|
62599026925a0f43d25e8f8d
|
class InternalError(DatabaseError): <NEW_LINE> <INDENT> pass
|
The database encountered an internal error
|
62599026796e427e5384f6c3
|
class Approval(aff4.AFF4Object): <NEW_LINE> <INDENT> class SchemaCls(aff4.AFF4Object.SchemaCls): <NEW_LINE> <INDENT> REQUESTOR = aff4.Attribute("aff4:approval/requestor", rdfvalue.RDFString, "Requestor of the approval.") <NEW_LINE> APPROVER = aff4.Attribute("aff4:approval/approver", rdfvalue.RDFString, "An approver for the request.", "approver") <NEW_LINE> SUBJECT = aff4.Attribute("aff4:approval/subject", rdfvalue.RDFURN, "Subject of the approval. I.e. the resource that " "requires approved access.") <NEW_LINE> REASON = aff4.Attribute("aff4:approval/reason", rdfvalue.RDFString, "The reason for requesting access to this client.") <NEW_LINE> EMAIL_MSG_ID = aff4.Attribute("aff4:approval/email_msg_id", rdfvalue.RDFString, "The email thread message ID for this" "approval. Storing this allows for " "conversation threading.") <NEW_LINE> EMAIL_CC = aff4.Attribute("aff4:approval/email_cc", rdfvalue.RDFString, "Comma separated list of email addresses to " "CC on approval emails.") <NEW_LINE> NOTIFIED_USERS = aff4.Attribute("aff4:approval/notified_users", rdfvalue.RDFString, "Comma-separated list of GRR users " "notified about this approval.") <NEW_LINE> <DEDENT> def CheckAccess(self, token): <NEW_LINE> <INDENT> _ = token <NEW_LINE> raise NotImplementedError() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def GetApprovalForObject(object_urn, token=None, username=""): <NEW_LINE> <INDENT> if token is None: <NEW_LINE> <INDENT> raise access_control.UnauthorizedAccess( "No token given, cannot authenticate.") <NEW_LINE> <DEDENT> if not username: <NEW_LINE> <INDENT> username = token.username <NEW_LINE> <DEDENT> approvals_root_urn = aff4.ROOT_URN.Add("ACL").Add(object_urn.Path()).Add( username) <NEW_LINE> children_urns = list( aff4.FACTORY.ListChildren( approvals_root_urn, token=token)) <NEW_LINE> if not children_urns: <NEW_LINE> <INDENT> raise access_control.UnauthorizedAccess( "No approval found for user %s" % utils.SmartStr(username), subject=object_urn) <NEW_LINE> <DEDENT> last_error = None <NEW_LINE> approvals = aff4.FACTORY.MultiOpen( children_urns, mode="r", aff4_type=Approval, age=aff4.ALL_TIMES, token=token) <NEW_LINE> for approval in approvals: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> test_token = access_control.ACLToken( username=username, reason=approval.Get(approval.Schema.REASON)) <NEW_LINE> approval.CheckAccess(test_token) <NEW_LINE> return test_token <NEW_LINE> <DEDENT> except access_control.UnauthorizedAccess as e: <NEW_LINE> <INDENT> last_error = e <NEW_LINE> <DEDENT> <DEDENT> if last_error: <NEW_LINE> <INDENT> raise access_control.UnauthorizedAccess(last_error, subject=object_urn) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise access_control.UnauthorizedAccess( "Couldn't open any of %d approvals " "for user %s" % (len(children_urns), utils.SmartStr(username)), subject=object_urn)
|
An abstract approval request object.
This object normally lives within the namespace:
aff4:/ACL/...
The aff4:/ACL namespace is not writable by users, hence all manipulation of
this object must be done via dedicated flows. These flows use the server's
access credentials for manipulating this object.
|
62599026d164cc6175821ebe
|
class UserViewSet(ModelViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filter_fields = ('name', 'discriminator')
|
View providing CRUD access to the users stored.
|
62599026d18da76e235b78f1
|
class NohinDetail(BaseModel): <NEW_LINE> <INDENT> belong_user = models.CharField(verbose_name='所属ユーザ', max_length=50) <NEW_LINE> kataban = models.CharField(verbose_name='会社名', max_length=50) <NEW_LINE> price = models.IntegerField(verbose_name='単価') <NEW_LINE> amount = models.IntegerField(verbose_name='数量') <NEW_LINE> nohin = models.ForeignKey(Nohin, verbose_name='納品', on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '{}, {}, {}, {}'.format(self.belong_user, self.kataban, self.price, self.amount)
|
納品詳細
|
62599026be8e80087fbbffc0
|
class LevelAPI(_abstract_json_versioned_api.VersionedAPI): <NEW_LINE> <INDENT> api_name: str = 'level' <NEW_LINE> api_version: str <NEW_LINE> game: Game <NEW_LINE> Level: type <NEW_LINE> Area: type <NEW_LINE> Course: type <NEW_LINE> @classmethod <NEW_LINE> def build(cls, game: Game, api_version: Optional[str]) -> 'LevelAPI': <NEW_LINE> <INDENT> if not isinstance(game, Game): <NEW_LINE> <INDENT> raise ValueError(f'`game` argument must be a Game enum member (got "{game!r}" of type {type(game)})') <NEW_LINE> <DEDENT> game_name = { Game.NEW_SUPER_MARIO_BROS: 'NSMB', Game.NEW_SUPER_MARIO_BROS_WII: 'NSMBW', Game.NEW_SUPER_MARIO_BROS_2: 'NSMB2', Game.NEW_SUPER_MARIO_BROS_U: 'NSMBU', Game.NEW_SUPER_LUIGI_U: 'NSMBU', Game.NEW_SUPER_MARIO_BROS_U_DELUXE: 'NSMBUDX', }.get(game) <NEW_LINE> if game_name is None: <NEW_LINE> <INDENT> raise ValueError(f'Unsupported game: {game}') <NEW_LINE> <DEDENT> if api_version is None: <NEW_LINE> <INDENT> for available_version in reversed(cls._get_available_api_versions()): <NEW_LINE> <INDENT> if available_version.startswith(f'{game_name}_'): <NEW_LINE> <INDENT> full_api_version = available_version <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError(f'No level API versions found for {game_name}') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> full_api_version = f'{game_name}_{api_version}' <NEW_LINE> <DEDENT> self = super().build(full_api_version) <NEW_LINE> self.game = game <NEW_LINE> self.api_version = full_api_version[full_api_version.index('_')+1:] <NEW_LINE> course_superclass, level_superclass = { 'NSMB': (Course, Level), 'NSMBW': (NSMBWCourse, NSMBWLevel), 'NSMB2': (Course, Level), 'NSMBU': (Course, Level), 'NSMBUDX': (Course, Level), }[game_name] <NEW_LINE> self.Course = type('Course', (course_superclass,), {'_api': self}) <NEW_LINE> self.Area = type('Area', (Area,), {'_api': self}) <NEW_LINE> self.Level = type('Level', (level_superclass,), {'_api': self}) <NEW_LINE> return self <NEW_LINE> <DEDENT> def _get_mixins_for_struct(self, name: str) -> List[type]: <NEW_LINE> <INDENT> if name in MixinsPerClassName: <NEW_LINE> <INDENT> return MixinsPerClassName[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super()._get_mixins_for_struct(name)
|
Class representing the entire API -- a set of classes you can use to
represent a level
|
6259902630c21e258be9975e
|
class TestCleanContent(BaseTest): <NEW_LINE> <INDENT> def test_empty_text_must_return_empty_list(self): <NEW_LINE> <INDENT> actual = clean_content('') <NEW_LINE> self.assertEqual(actual, []) <NEW_LINE> <DEDENT> def test_one_item_in_text_must_return_list_with_item(self): <NEW_LINE> <INDENT> actual = clean_content(' it em ') <NEW_LINE> self.assertEqual(actual, ['it em']) <NEW_LINE> <DEDENT> def test_one_item_starts_with_star_in_text_must_return_list_with_item(self): <NEW_LINE> <INDENT> actual = clean_content(' * it em ') <NEW_LINE> self.assertEqual(actual, ['it em']) <NEW_LINE> <DEDENT> def test_one_item_starts_with_hyphen_in_text_must_return_list_with_item(self): <NEW_LINE> <INDENT> actual = clean_content(' - it em ') <NEW_LINE> self.assertEqual(actual, ['it em']) <NEW_LINE> <DEDENT> def test_one_item_starts_with_stars_and_hyphens_in_text_must_return_list_with_item(self): <NEW_LINE> <INDENT> actual = clean_content(' * - it em ') <NEW_LINE> self.assertEqual(actual, ['it em']) <NEW_LINE> <DEDENT> def test_one_item_starts_with_stars_in_text_must_return_list_with_item(self): <NEW_LINE> <INDENT> actual = clean_content(' ** * * it em ') <NEW_LINE> self.assertEqual(actual, ['it em']) <NEW_LINE> <DEDENT> def test_one_item_starts_with_hyphens_in_text_must_return_list_with_item(self): <NEW_LINE> <INDENT> actual = clean_content(' -- - - it em ') <NEW_LINE> self.assertEqual(actual, ['it em']) <NEW_LINE> <DEDENT> def test_one_item_starts_with_user_ref_text_must_return_empty_list(self): <NEW_LINE> <INDENT> actual = clean_content(' -[]@user.us-er ') <NEW_LINE> self.assertEqual(actual, []) <NEW_LINE> <DEDENT> def test_one_item_starts_with_user_ref_checked_text_must_return_empty_list(self): <NEW_LINE> <INDENT> actual = clean_content(' - [x] @ user.us-er ') <NEW_LINE> self.assertEqual(actual, []) <NEW_LINE> <DEDENT> def test_multiple_items_must_return_list_with_valid_changes(self): <NEW_LINE> <INDENT> actual = clean_content(' - item1\n\n\n - [X] @user \n* item2 \n item3\n\n \n- [ ] @user-test \n') <NEW_LINE> self.assertEqual(actual, ['item1', 'item2', 'item3'])
|
This class tests the clean_content method
|
62599026d164cc6175821ebf
|
class Terminal(VteTerminal): <NEW_LINE> <INDENT> def __init__(self, CONF): <NEW_LINE> <INDENT> VteTerminal.__init__(self) <NEW_LINE> self.set_scrollback_lines(-1) <NEW_LINE> self.set_audible_bell(0) <NEW_LINE> self.connect("key_press_event", self.copy_or_paste) <NEW_LINE> self.host, self.port = CONF.getApiRestfulConInfo() <NEW_LINE> self.faraday_directory = os.path.dirname(os.path.realpath('faraday.py')) <NEW_LINE> self.faraday_exec = self.faraday_directory + "/faraday-terminal.zsh" <NEW_LINE> self.start_faraday() <NEW_LINE> <DEDENT> @scrollable(overlay_scrolling=True) <NEW_LINE> def create_scrollable_terminal(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def start_faraday(self): <NEW_LINE> <INDENT> home_dir = os.path.expanduser('~') <NEW_LINE> self.spawn_sync(Vte.PtyFlags.DEFAULT, home_dir, [self.faraday_exec, str(self.host), str(self.port)], ['FARADAY_PATH=%s' % self.faraday_directory], GLib.SpawnFlags.DO_NOT_REAP_CHILD, None, None, None) <NEW_LINE> <DEDENT> def copy_or_paste(self, widget, event): <NEW_LINE> <INDENT> control_key = 'control-mask' <NEW_LINE> shift_key = 'shift-mask' <NEW_LINE> last_pressed_key = Gdk.keyval_name(event.get_keyval()[1]) <NEW_LINE> set_pressed_special_keys = set(event.state.value_nicks) <NEW_LINE> if event.type == Gdk.EventType.KEY_PRESS: <NEW_LINE> <INDENT> if {control_key, shift_key} <= set_pressed_special_keys: <NEW_LINE> <INDENT> if last_pressed_key == 'C': <NEW_LINE> <INDENT> self.copy_clipboard() <NEW_LINE> <DEDENT> elif last_pressed_key == 'V': <NEW_LINE> <INDENT> self.paste_clipboard() <NEW_LINE> <DEDENT> return True
|
Defines a simple terminal that will execute faraday-terminal with the
corresponding host and port as specified by the CONF.
Inherits from Compatibility.Vte, which is just Vte.Terminal with
spawn_sync overrode to function with API 2.90 and 2.91
|
625990261d351010ab8f4a5f
|
class PersonDetail(TemplateView): <NEW_LINE> <INDENT> template_name = 'personDetail.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> class CardNumberAngularForm(NgModelFormMixin, NgModelForm, Bootstrap3FormMixin): <NEW_LINE> <INDENT> form_name = 'cardNumberForm' <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Person <NEW_LINE> fields = ('card_number', ) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.update(scope_prefix='person') <NEW_LINE> super(CardNumberAngularForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> class DataAngularForm(NgModelFormMixin, NgModelForm, Bootstrap3FormMixin): <NEW_LINE> <INDENT> form_name = 'personDataForm' <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Person <NEW_LINE> fields = ('first_name', 'last_name', 'rank', 'group') <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.update(scope_prefix='person') <NEW_LINE> super(DataAngularForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> class PhotoAngularForm(NgModelFormMixin, NgModelForm, Bootstrap3FormMixin): <NEW_LINE> <INDENT> form_name = 'personPhotoForm' <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Person <NEW_LINE> fields = ('photo', ) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.update(scope_prefix='person') <NEW_LINE> super(PhotoAngularForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> context = super(PersonDetail, self).get_context_data(**kwargs) <NEW_LINE> context.update(cardNumberForm=CardNumberAngularForm()) <NEW_LINE> context.update(dataForm=DataAngularForm()) <NEW_LINE> context.update(photoForm=PhotoAngularForm()) <NEW_LINE> return context
|
Person detail view.
Display persons detail info. Class based on TemplateView.
:return: generated personDetail.html
|
62599026c432627299fa3f3c
|
class ObjectKey(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def initFromObject(self, obj): <NEW_LINE> <INDENT> self._className = obj.__class__.__name__ <NEW_LINE> self._serialNum = obj.serialNum() <NEW_LINE> if self._serialNum is 0: <NEW_LINE> <INDENT> self._serialNum = obj.store().newSerialNum() <NEW_LINE> obj.setSerialNum(self._serialNum) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def initFromClassNameAndSerialNum(self, className, serialNum): <NEW_LINE> <INDENT> assert className is not None <NEW_LINE> assert serialNum > 0 <NEW_LINE> self._className = className <NEW_LINE> self._serialNum = serialNum <NEW_LINE> return self <NEW_LINE> <DEDENT> def serialNum(self): <NEW_LINE> <INDENT> return self._serialNum <NEW_LINE> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> result = cmp(self._className, other._className) <NEW_LINE> if result == 0: <NEW_LINE> <INDENT> result = cmp(self._serialNum, other._serialNum) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self._className) ^ hash(self._serialNum) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<%s, %s>' % (self._className, self._serialNum)
|
An ObjectKey is used by ObjectStore for keeping track of objects in memory.
Currently a key is equal to the class name of the object combined
with the object's serial number, although as a user of object keys,
you don't normally need to know what's inside them.
|
6259902626238365f5fada9b
|
class ICC_082: <NEW_LINE> <INDENT> pass
|
Frozen Clone
|
62599026ac7a0e7691f73433
|
class _CumulativeWorkerStatsTest(TestCase): <NEW_LINE> <INDENT> def test___init__(t): <NEW_LINE> <INDENT> cws = _CumulativeWorkerStats() <NEW_LINE> t.assertEqual(cws.numoccurrences, 0) <NEW_LINE> t.assertEqual(cws.totaltime, 0.0) <NEW_LINE> t.assertEqual(cws.lasttime, 0) <NEW_LINE> <DEDENT> @patch('{src}.time'.format(**PATH), autospec=True) <NEW_LINE> def test_addOccurrence(t, time): <NEW_LINE> <INDENT> time.time.side_effect = [sentinel.t0, sentinel.t1] <NEW_LINE> cws = _CumulativeWorkerStats() <NEW_LINE> cws.addOccurrence(10) <NEW_LINE> t.assertEqual(cws.numoccurrences, 1) <NEW_LINE> t.assertEqual(cws.totaltime, 10.0) <NEW_LINE> t.assertEqual(cws.lasttime, sentinel.t0) <NEW_LINE> cws.addOccurrence(100) <NEW_LINE> t.assertEqual(cws.numoccurrences, 2) <NEW_LINE> t.assertEqual(cws.totaltime, 110.0) <NEW_LINE> t.assertEqual(cws.lasttime, sentinel.t1)
|
Test the _CumulativeWorkerStats class.
|
6259902673bcbd0ca4bcb1da
|
class ParsingError(Exception): <NEW_LINE> <INDENT> pass
|
Base class for parsing error
|
62599026a4f1c619b294f53e
|
class ActiveDirectoryAccountGetIterKeyTd(NetAppObject): <NEW_LINE> <INDENT> _key_0 = None <NEW_LINE> @property <NEW_LINE> def key_0(self): <NEW_LINE> <INDENT> return self._key_0 <NEW_LINE> <DEDENT> @key_0.setter <NEW_LINE> def key_0(self, val): <NEW_LINE> <INDENT> if val != None: <NEW_LINE> <INDENT> self.validate('key_0', val) <NEW_LINE> <DEDENT> self._key_0 = val <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_api_name(): <NEW_LINE> <INDENT> return "active-directory-account-get-iter-key-td" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_desired_attrs(): <NEW_LINE> <INDENT> return [ 'key-0', ] <NEW_LINE> <DEDENT> def describe_properties(self): <NEW_LINE> <INDENT> return { 'key_0': { 'class': basestring, 'is_list': False, 'required': 'optional' }, }
|
Key typedef for table active_directory
|
62599026bf627c535bcb2400
|
class AltManager(Manager): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> conf = config.TempestConfig() <NEW_LINE> super(AltManager, self).__init__(conf.identity.alt_username, conf.identity.alt_password, conf.identity.alt_tenant_name)
|
Manager object that uses the alt_XXX credentials for its
managed client objects
|
62599026a8ecb03325872168
|
class LoggingContext(object): <NEW_LINE> <INDENT> __slots__ = ["parent_context", "name", "__dict__"] <NEW_LINE> thread_local = threading.local() <NEW_LINE> class Sentinel(object): <NEW_LINE> <INDENT> __slots__ = [] <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "sentinel" <NEW_LINE> <DEDENT> def copy_to(self, record): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> sentinel = Sentinel() <NEW_LINE> def __init__(self, name=None): <NEW_LINE> <INDENT> self.parent_context = None <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s@%x" % (self.name, id(self)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def current_context(cls): <NEW_LINE> <INDENT> return getattr(cls.thread_local, "current_context", cls.sentinel) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.parent_context is not None: <NEW_LINE> <INDENT> raise Exception("Attempt to enter logging context multiple times") <NEW_LINE> <DEDENT> self.parent_context = self.current_context() <NEW_LINE> self.thread_local.current_context = self <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> if self.thread_local.current_context is not self: <NEW_LINE> <INDENT> if self.thread_local.current_context is self.sentinel: <NEW_LINE> <INDENT> logger.debug("Expected logging context %s has been lost", self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warn( "Current logging context %s is not expected context %s", self.thread_local.current_context, self ) <NEW_LINE> <DEDENT> <DEDENT> self.thread_local.current_context = self.parent_context <NEW_LINE> self.parent_context = None <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.parent_context, name) <NEW_LINE> <DEDENT> def copy_to(self, record): <NEW_LINE> <INDENT> if self.parent_context is not None: <NEW_LINE> <INDENT> self.parent_context.copy_to(record) <NEW_LINE> <DEDENT> for key, value in self.__dict__.items(): <NEW_LINE> <INDENT> setattr(record, key, value)
|
Additional context for log formatting. Contexts are scoped within a
"with" block. Contexts inherit the state of their parent contexts.
Args:
name (str): Name for the context for debugging.
|
62599026925a0f43d25e8f91
|
class _Paragraph(Subshape): <NEW_LINE> <INDENT> def __init__(self, p, parent): <NEW_LINE> <INDENT> super(_Paragraph, self).__init__(parent) <NEW_LINE> self._element = self._p = p <NEW_LINE> <DEDENT> def add_line_break(self): <NEW_LINE> <INDENT> self._p.add_br() <NEW_LINE> <DEDENT> def add_run(self): <NEW_LINE> <INDENT> r = self._p.add_r() <NEW_LINE> return _Run(r, self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def alignment(self): <NEW_LINE> <INDENT> return self._pPr.algn <NEW_LINE> <DEDENT> @alignment.setter <NEW_LINE> def alignment(self, value): <NEW_LINE> <INDENT> self._pPr.algn = value <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> for elm in self._element.content_children: <NEW_LINE> <INDENT> self._element.remove(elm) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def font(self): <NEW_LINE> <INDENT> return Font(self._defRPr) <NEW_LINE> <DEDENT> @property <NEW_LINE> def level(self): <NEW_LINE> <INDENT> return self._pPr.lvl <NEW_LINE> <DEDENT> @level.setter <NEW_LINE> def level(self, level): <NEW_LINE> <INDENT> self._pPr.lvl = level <NEW_LINE> <DEDENT> @property <NEW_LINE> def line_spacing(self): <NEW_LINE> <INDENT> pPr = self._p.pPr <NEW_LINE> if pPr is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return pPr.line_spacing <NEW_LINE> <DEDENT> @line_spacing.setter <NEW_LINE> def line_spacing(self, value): <NEW_LINE> <INDENT> pPr = self._p.get_or_add_pPr() <NEW_LINE> pPr.line_spacing = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def runs(self): <NEW_LINE> <INDENT> return tuple(_Run(r, self) for r in self._element.r_lst) <NEW_LINE> <DEDENT> @property <NEW_LINE> def space_after(self): <NEW_LINE> <INDENT> pPr = self._p.pPr <NEW_LINE> if pPr is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return pPr.space_after <NEW_LINE> <DEDENT> @space_after.setter <NEW_LINE> def space_after(self, value): <NEW_LINE> <INDENT> pPr = self._p.get_or_add_pPr() <NEW_LINE> pPr.space_after = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def space_before(self): <NEW_LINE> <INDENT> pPr = self._p.pPr <NEW_LINE> if pPr is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return pPr.space_before <NEW_LINE> <DEDENT> @space_before.setter <NEW_LINE> def space_before(self, value): <NEW_LINE> <INDENT> pPr = self._p.get_or_add_pPr() <NEW_LINE> pPr.space_before = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def text(self): <NEW_LINE> <INDENT> return "".join(elm.text for elm in self._element.content_children) <NEW_LINE> <DEDENT> @text.setter <NEW_LINE> def text(self, text): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> self._element.append_text(to_unicode(text)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _defRPr(self): <NEW_LINE> <INDENT> return self._pPr.get_or_add_defRPr() <NEW_LINE> <DEDENT> @property <NEW_LINE> def _pPr(self): <NEW_LINE> <INDENT> return self._p.get_or_add_pPr()
|
Paragraph object. Not intended to be constructed directly.
|
625990268e05c05ec3f6f601
|
class ProductsJsonHandler(api): <NEW_LINE> <INDENT> def get(self, p=1): <NEW_LINE> <INDENT> data = self.db.product(state=1)[int(p): 10] <NEW_LINE> list = data.object_list <NEW_LINE> for item in list: <NEW_LINE> <INDENT> item['imgs'] = item['imgs'].split('|') <NEW_LINE> <DEDENT> self.write({'data': list, 'prev': data.prevpage, 'next': data.nextpage})
|
yf: 根据页数P获取商品列表
|
625990266e29344779b0159b
|
@add_metaclass(MetaInstructionCase) <NEW_LINE> class AdcImmTest(unittest.TestCase): <NEW_LINE> <INDENT> asm = 'ADC #$10' <NEW_LINE> lex = [('T_INSTRUCTION', 'ADC'), ('T_HEX_NUMBER', '#$10')] <NEW_LINE> syn = ['S_IMMEDIATE'] <NEW_LINE> code = [0x69, 0x10]
|
Test the arithmetic operation ADC between decimal 16
and the content of the accumulator.
|
6259902626238365f5fada9d
|
class DataShape(TableModule): <NEW_LINE> <INDENT> inputs = [ SlotDescriptor("table", type=Table, required=True), ] <NEW_LINE> def __init__(self, **kwds: Any) -> None: <NEW_LINE> <INDENT> super().__init__(**kwds) <NEW_LINE> pass <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @process_slot("table", reset_cb="reset") <NEW_LINE> @run_if_any <NEW_LINE> def run_step( self, run_number: int, step_size: int, howlong: float ) -> ReturnRunStep: <NEW_LINE> <INDENT> assert self.context <NEW_LINE> with self.context as ctx: <NEW_LINE> <INDENT> slot = ctx.table <NEW_LINE> data = slot.data() <NEW_LINE> if not data: <NEW_LINE> <INDENT> return self._return_run_step(self.state_blocked, steps_run=0) <NEW_LINE> <DEDENT> if slot.has_buffered(): <NEW_LINE> <INDENT> slot.clear_buffers() <NEW_LINE> <DEDENT> self.result = PsDict({k: str(v) for (k, v) in dshape_fields(data.dshape)}) <NEW_LINE> return self._return_run_step(self.state_zombie, steps_run=0)
|
Adds statistics on input data
|
62599026ac7a0e7691f73435
|
class SubscriptionFormPlugin(CMSPlugin): <NEW_LINE> <INDENT> title = models.CharField(_('title'), max_length=100, blank=True) <NEW_LINE> show_description = models.BooleanField(_('show description'), default=True, help_text=_('Show the mailing list\'s description.')) <NEW_LINE> mailing_list = models.ForeignKey(MailingList, verbose_name=_('mailing list'), help_text=_('Mailing List to subscribe to.')) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.mailing_list.name
|
CMS Plugin for susbcribing to a mailing list
|
625990268c3a8732951f74a4
|
class ProxyOnlyResource(Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> def __init__(self, kind=None): <NEW_LINE> <INDENT> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.kind = kind <NEW_LINE> self.type = None
|
Azure proxy only resource. This resource is not tracked by Azure Resource
Manager.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
|
625990263eb6a72ae038b5b1
|
class OperationList(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'required': True}, } <NEW_LINE> _attribute_map = { 'count': {'key': 'count', 'type': 'long'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[Operation]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(OperationList, self).__init__(**kwargs) <NEW_LINE> self.count = kwargs.get('count', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None) <NEW_LINE> self.value = kwargs['value']
|
Paged list of operation resources.
All required parameters must be populated in order to send to Azure.
:param count: Total item count.
:type count: long
:param next_link: The Url of next result page.
:type next_link: str
:param value: Required. Collection of items of type results.
:type value: list[~azure.mgmt.purview.models.Operation]
|
62599026ac7a0e7691f73437
|
class PhysicalMixin(models.Model): <NEW_LINE> <INDENT> UNIT_SQUAREFOOT = 1 <NEW_LINE> UNIT_SQUAREMETER = 2 <NEW_LINE> UNIT_ACRE = 3 <NEW_LINE> UNIT_HECTARE = 4 <NEW_LINE> UNIT_CHOICES = ( (UNIT_SQUAREFOOT, 'square feet'), (UNIT_SQUAREMETER, 'square meters'), (UNIT_ACRE, 'acres'), (UNIT_HECTARE, 'hectares'), ) <NEW_LINE> UNIT_CONVERSIONS = { (UNIT_SQUAREFOOT, UNIT_SQUAREMETER): Decimal(.0929), (UNIT_SQUAREFOOT, UNIT_ACRE): Decimal(.000023), (UNIT_SQUAREFOOT, UNIT_HECTARE): Decimal(.0000093), (UNIT_SQUAREMETER, UNIT_SQUAREFOOT): Decimal(10.76), (UNIT_SQUAREMETER, UNIT_ACRE): Decimal(.00025), (UNIT_SQUAREMETER, UNIT_HECTARE): Decimal(.0001), (UNIT_ACRE, UNIT_SQUAREFOOT): Decimal(43560), (UNIT_ACRE, UNIT_SQUAREMETER): Decimal(4047), (UNIT_ACRE, UNIT_HECTARE): Decimal(.4047), (UNIT_HECTARE, UNIT_SQUAREFOOT): Decimal(107639.10), (UNIT_HECTARE, UNIT_SQUAREMETER): Decimal(10000.00), (UNIT_HECTARE, UNIT_ACRE): Decimal(2.47), } <NEW_LINE> area = models.DecimalField(max_digits=12, decimal_places=2) <NEW_LINE> unit = models.PositiveSmallIntegerField(choices=UNIT_CHOICES, default=UNIT_SQUAREFOOT, help_text="Unit of measurement.") <NEW_LINE> area_normalized = models.DecimalField(max_digits=12, decimal_places=2, editable=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def convert(self, someUnit): <NEW_LINE> <INDENT> if someUnit == self.unit: <NEW_LINE> <INDENT> return self.area <NEW_LINE> <DEDENT> elif (self.unit, someUnit) in self.UNIT_CONVERSIONS: <NEW_LINE> <INDENT> return self.area * self.UNIT_CONVERSIONS[(self.unit, someUnit)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Can't convert") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def square_feet(self): <NEW_LINE> <INDENT> return self.convert(self.UNIT_SQUAREFOOT) <NEW_LINE> <DEDENT> @property <NEW_LINE> def square_meters(self): <NEW_LINE> <INDENT> return self.convert(self.UNIT_SQUAREMETER) <NEW_LINE> <DEDENT> @property <NEW_LINE> def acres(self): <NEW_LINE> <INDENT> return self.convert(self.UNIT_ACRE) <NEW_LINE> <DEDENT> @property <NEW_LINE> def hectares(self): <NEW_LINE> <INDENT> return self.convert(self.UNIT_HECTARE) <NEW_LINE> <DEDENT> def save(self, force_insert=False, force_update=False): <NEW_LINE> <INDENT> self.area_normalized = self.convert(self.UNIT_SQUAREFOOT) <NEW_LINE> super(PhysicalMixin, self).save(force_insert, force_update)
|
Fields and methods for a
physically constructable design project.
|
62599026d99f1b3c44d065f1
|
@actions.register('stop') <NEW_LINE> class Stop(BaseAction): <NEW_LINE> <INDENT> valid_origin_states = ('running',) <NEW_LINE> schema = type_schema( 'stop', **{'terminate-ephemeral': {'type': 'boolean'}, 'hibernate': {'type': 'boolean'}}) <NEW_LINE> has_hibernate = jmespath.compile('[].HibernationOptions.Configured') <NEW_LINE> def get_permissions(self): <NEW_LINE> <INDENT> perms = ('ec2:StopInstances',) <NEW_LINE> if self.data.get('terminate-ephemeral', False): <NEW_LINE> <INDENT> perms += ('ec2:TerminateInstances',) <NEW_LINE> <DEDENT> return perms <NEW_LINE> <DEDENT> def split_on_storage(self, instances): <NEW_LINE> <INDENT> ephemeral = [] <NEW_LINE> persistent = [] <NEW_LINE> for i in instances: <NEW_LINE> <INDENT> if EphemeralInstanceFilter.is_ephemeral(i): <NEW_LINE> <INDENT> ephemeral.append(i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> persistent.append(i) <NEW_LINE> <DEDENT> <DEDENT> return ephemeral, persistent <NEW_LINE> <DEDENT> def split_on_hibernate(self, instances): <NEW_LINE> <INDENT> enabled, disabled = [], [] <NEW_LINE> for status, i in zip(self.has_hibernate.search(instances), instances): <NEW_LINE> <INDENT> if status is True: <NEW_LINE> <INDENT> enabled.append(i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> disabled.append(i) <NEW_LINE> <DEDENT> <DEDENT> return enabled, disabled <NEW_LINE> <DEDENT> def process(self, instances): <NEW_LINE> <INDENT> instances = self.filter_resources(instances, 'State.Name', self.valid_origin_states) <NEW_LINE> if not len(instances): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> client = utils.local_session( self.manager.session_factory).client('ec2') <NEW_LINE> ephemeral, persistent = self.split_on_storage(instances) <NEW_LINE> if self.data.get('terminate-ephemeral', False) and ephemeral: <NEW_LINE> <INDENT> self._run_instances_op( client.terminate_instances, [i['InstanceId'] for i in ephemeral]) <NEW_LINE> <DEDENT> if persistent: <NEW_LINE> <INDENT> if self.data.get('hibernate', False): <NEW_LINE> <INDENT> enabled, persistent = self.split_on_hibernate(persistent) <NEW_LINE> if enabled: <NEW_LINE> <INDENT> self._run_instances_op( client.stop_instances, [i['InstanceId'] for i in enabled], Hibernate=True) <NEW_LINE> <DEDENT> <DEDENT> self._run_instances_op( client.stop_instances, [i['InstanceId'] for i in persistent]) <NEW_LINE> <DEDENT> return instances <NEW_LINE> <DEDENT> def _run_instances_op(self, op, instance_ids, **kwargs): <NEW_LINE> <INDENT> while instance_ids: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.manager.retry(op, InstanceIds=instance_ids, **kwargs) <NEW_LINE> <DEDENT> except ClientError as e: <NEW_LINE> <INDENT> if e.response['Error']['Code'] == 'IncorrectInstanceState': <NEW_LINE> <INDENT> instance_ids.remove(extract_instance_id(e)) <NEW_LINE> <DEDENT> raise
|
Stops or hibernates a running EC2 instances
:Example:
.. code-block:: yaml
policies:
- name: ec2-stop-running-instances
resource: ec2
query:
- instance-state-name: running
actions:
- stop
- name: ec2-hibernate-instances
resources: ec2
query:
- instance-state-name: running
actions:
- type: stop
hibernate: true
Note when using hiberate, instances not configured for hiberation
will just be stopped.
|
625990265e10d32532ce40ab
|
class Tramites(models.Model): <NEW_LINE> <INDENT> folio = models.CharField(primary_key=True, max_length=13) <NEW_LINE> distrito = models.PositiveSmallIntegerField(editable=False) <NEW_LINE> mac = models.CharField(max_length=6, editable=False) <NEW_LINE> tramo_exitoso = models.DurationField(editable=False, blank=True, null=True) <NEW_LINE> tramo_disponible = models.DurationField(editable=False, blank=True, null=True) <NEW_LINE> tramo_entrega = models.DurationField(editable=False, blank=True, null=True) <NEW_LINE> estatus = models.TextField(blank=True, null=True) <NEW_LINE> causa_rechazo = models.TextField(blank=True, null=True) <NEW_LINE> movimiento_solicitado = models.TextField(blank=True, null=True) <NEW_LINE> movimiento_definitivo = models.TextField(blank=True, null=True) <NEW_LINE> fecha_tramite = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_recibido_cecyrd = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_registrado_cecyrd = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_rechazado = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_cancelado_movimiento_posterior = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_alta_pe = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_afectacion_padron = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_actualizacion_pe = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_reincorporacion_pe = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_exitoso = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_lote_produccion = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_listo_reimpresion = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_cpv_creada = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_cpv_registrada_mac = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_cpv_disponible = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_cpv_entregada = models.DateTimeField(blank=True, null=True) <NEW_LINE> fecha_afectacion_ln = models.DateTimeField(blank=True, null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'cecyrd_tramites' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.folio <NEW_LINE> <DEDENT> def save(self, force_insert=False, force_update=False, using=None, update_fields=None): <NEW_LINE> <INDENT> self.distrito = int(self.folio[5]) <NEW_LINE> self.mac = self.folio[2:8] <NEW_LINE> self.tramo_entrega = get_tramo(self.fecha_cpv_entregada, self.fecha_cpv_disponible) <NEW_LINE> self.tramo_disponible = get_tramo(self.fecha_cpv_disponible, self.fecha_tramite) <NEW_LINE> self.tramo_exitoso = get_tramo(self.fecha_exitoso, self.fecha_tramite) <NEW_LINE> super(Tramites, self).save( force_insert=False, force_update=False, using=None, update_fields=None )
|
Modelo para evaluar al proveedor
|
62599026d164cc6175821ec5
|
class CertificateOrderContact(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'email': {'key': 'email', 'type': 'str'}, 'name_first': {'key': 'nameFirst', 'type': 'str'}, 'name_last': {'key': 'nameLast', 'type': 'str'}, 'phone': {'key': 'phone', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, email: Optional[str] = None, name_first: Optional[str] = None, name_last: Optional[str] = None, phone: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(CertificateOrderContact, self).__init__(**kwargs) <NEW_LINE> self.email = email <NEW_LINE> self.name_first = name_first <NEW_LINE> self.name_last = name_last <NEW_LINE> self.phone = phone
|
CertificateOrderContact.
:ivar email:
:vartype email: str
:ivar name_first:
:vartype name_first: str
:ivar name_last:
:vartype name_last: str
:ivar phone:
:vartype phone: str
|
625990268e05c05ec3f6f603
|
class Settings(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 1200 <NEW_LINE> self.screen_height = 800 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_speed_factor = 1.5 <NEW_LINE> self.bullet_speed_factor = 1 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = 60, 60, 60 <NEW_LINE> self.bullets_allowed = 3
|
存储配置类
|
625990263eb6a72ae038b5b3
|
class HasSourceOf(HasSourceOfBase): <NEW_LINE> <INDENT> labels = [ _('Source ID:') ] <NEW_LINE> name = _('People with the <source>') <NEW_LINE> category = _('Citation/source filters') <NEW_LINE> description = _('Matches people who have a particular source')
|
Rule that checks people that have a particular source.
|
62599026507cdc57c63a5cf6
|
class YadisResourceDescriptor: <NEW_LINE> <INDENT> resources_list = [] <NEW_LINE> def __init__(self, resources_list): <NEW_LINE> <INDENT> self.resources_list = resources_list <NEW_LINE> <DEDENT> def get_resources_list(self): <NEW_LINE> <INDENT> return self.resources_list <NEW_LINE> <DEDENT> def set_resources_list(self, resources_list): <NEW_LINE> <INDENT> self.resources_list = resources_list
|
The yadis resource descriptor class.
|
625990261f5feb6acb163b40
|
class Trait(Mapping): <NEW_LINE> <INDENT> def __init__(self, name=None, ratings: dict = None, normalize=False): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> self.name = f"temp trait {str(random())[2:]}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if normalize: <NEW_LINE> <INDENT> self.ratings = {rating: ratings[rating] / 20 for rating in ratings} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ratings = ratings <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, item) -> Union[str, float]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.ratings[item] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if item == "name": <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __iter__(self) -> iter: <NEW_LINE> <INDENT> return iter(self.ratings) <NEW_LINE> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> return len(self.ratings) <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> if item == "name": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return item in self.ratings <NEW_LINE> <DEDENT> def __add__(self, other: 'Trait') -> 'Trait': <NEW_LINE> <INDENT> new_ratings = self.ratings.copy() <NEW_LINE> for rating in other: <NEW_LINE> <INDENT> if rating in new_ratings: <NEW_LINE> <INDENT> new_ratings[rating] = self[rating] + other[rating] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_ratings[rating] = other[rating] <NEW_LINE> <DEDENT> <DEDENT> return Trait(ratings=new_ratings) <NEW_LINE> <DEDENT> def __iadd__(self, other): <NEW_LINE> <INDENT> trait = self + other <NEW_LINE> self.ratings = trait.ratings <NEW_LINE> return self <NEW_LINE> <DEDENT> def nice_string(self): <NEW_LINE> <INDENT> rate_str = " ".join([f"{short[rating]} {self.ratings[rating]*20:+.1f}" for rating in self.ratings]) <NEW_LINE> return f"{self.name.title()} ({rate_str})" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> rate_str = " ".join([f"{short[rating]}:{self.ratings[rating]:.2f}" for rating in self.ratings]) <NEW_LINE> return f"Trait '{self.name}': {rate_str}" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"Trait {self.name} at {hex(id(self))}"
|
A representation of a single trait.
A trait is a modifier for a player - they affect stats, can have additional affects, and
can be lost / gained / expire.
Note that trait values are stored from 0-20, so this normalizes them as well.
|
6259902663f4b57ef008651a
|
class LoginViews(Resource): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.db = db.init_db() <NEW_LINE> self.parser = reqparse.RequestParser() <NEW_LINE> self.validator = Validator() <NEW_LINE> self.user_models = UserModels() <NEW_LINE> <DEDENT> def post(self): <NEW_LINE> <INDENT> self.parser.add_argument( 'email', required=True, type=self.validator.validate_string_fields, help='Enter a valid email') <NEW_LINE> self.parser.add_argument( 'password', required=True, type=self.validator.validate_string_fields, help='Password cannot be empty') <NEW_LINE> user = self.parser.parse_args() <NEW_LINE> response = self.user_models.sign_in(user['email'], user['password']) <NEW_LINE> return {"message": response}
|
Controls methods related to user login
|
62599026ac7a0e7691f73439
|
class coord_trans(coord): <NEW_LINE> <INDENT> def __init__(self, x='identity', y='identity', xlim=None, ylim=None): <NEW_LINE> <INDENT> self.trans = Bunch(x=gettrans(x), y=gettrans(y)) <NEW_LINE> self.limits = Bunch(xlim=xlim, ylim=ylim) <NEW_LINE> <DEDENT> def transform(self, data, panel_params, munch=False): <NEW_LINE> <INDENT> if not self.is_linear and munch: <NEW_LINE> <INDENT> data = self.munch(data, panel_params) <NEW_LINE> <DEDENT> def trans_x(data): <NEW_LINE> <INDENT> result = transform_value(self.trans.x, data, panel_params['x_range']) <NEW_LINE> if any(result.isnull()): <NEW_LINE> <INDENT> warn("Coordinate transform of x aesthetic " "created one or more NaN values.") <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def trans_y(data): <NEW_LINE> <INDENT> result = transform_value(self.trans.y, data, panel_params['y_range']) <NEW_LINE> if any(result.isnull()): <NEW_LINE> <INDENT> warn("Coordinate transform of y aesthetic " "created one or more NaN values.") <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> data = transform_position(data, trans_x, trans_y) <NEW_LINE> return transform_position(data, squish_infinite, squish_infinite) <NEW_LINE> <DEDENT> def setup_panel_params(self, scale_x, scale_y): <NEW_LINE> <INDENT> def train(scale, limits, trans, name): <NEW_LINE> <INDENT> if limits is None: <NEW_LINE> <INDENT> rangee = scale.dimension() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rangee = scale.transform(limits) <NEW_LINE> <DEDENT> out = scale.break_info(rangee) <NEW_LINE> out['range'] = trans.transform(out['range']) <NEW_LINE> if limits is None: <NEW_LINE> <INDENT> expand = self.expand_default(scale) <NEW_LINE> out['range'] = expand_range_distinct(out['range'], expand) <NEW_LINE> <DEDENT> out['major'] = transform_value(trans, out['major'], out['range']) <NEW_LINE> out['minor'] = transform_value(trans, out['minor'], out['range']) <NEW_LINE> for key in list(out.keys()): <NEW_LINE> <INDENT> new_key = '{}_{}'.format(name, key) <NEW_LINE> out[new_key] = out.pop(key) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> out = train(scale_x, self.limits.xlim, self.trans.x, 'x') <NEW_LINE> out.update(train(scale_y, self.limits.xlim, self.trans.y, 'y')) <NEW_LINE> return out <NEW_LINE> <DEDENT> def distance(self, x, y, panel_params): <NEW_LINE> <INDENT> max_dist = dist_euclidean(panel_params['x_range'], panel_params['y_range'])[0] <NEW_LINE> return dist_euclidean(self.trans.x.transform(x), self.trans.y.transform(y)) / max_dist
|
Transformed cartesian coordinate system
Parameters
----------
x : str | trans
Name of transform or `trans` class to
transform the x axis
y : str | trans
Name of transform or `trans` class to
transform the y axis
xlim : None | (float, float)
Limits for x axis. If None, then they are
automatically computed.
ylim : None | (float, float)
Limits for y axis. If None, then they are
automatically computed.
|
6259902656b00c62f0fb3810
|
class StandardWriter(Writer): <NEW_LINE> <INDENT> _started = False <NEW_LINE> _finalized = False <NEW_LINE> _worker = None <NEW_LINE> def __init__(self, savefun=npz.save_npz, **kwds): <NEW_LINE> <INDENT> super(StandardWriter, self).__init__() <NEW_LINE> self._savefun = savefun <NEW_LINE> self._kwds = kwds <NEW_LINE> self._started = False <NEW_LINE> self._finalized = False <NEW_LINE> <DEDENT> def __call__(self, filename, outdir, target): <NEW_LINE> <INDENT> if self._started: <NEW_LINE> <INDENT> self._worker.join() <NEW_LINE> self._started = False <NEW_LINE> <DEDENT> self._filename = filename <NEW_LINE> self._worker = self.create_worker(filename, outdir, target, **self._kwds) <NEW_LINE> self._worker.start() <NEW_LINE> self._started = True <NEW_LINE> <DEDENT> def create_worker(self, filename, outdir, target, **kwds): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> if self._started: <NEW_LINE> <INDENT> if not self._finalized: <NEW_LINE> <INDENT> self._worker.join() <NEW_LINE> <DEDENT> self._started = False <NEW_LINE> <DEDENT> self._finalized = True
|
Base class of snapshot writers which use thread or process.
This class creates a new thread or a process every time when ``__call__``
is invoked.
Args:
savefun: Callable object. It takes three arguments: the output file
path, the serialized dictionary object, and the optional keyword
arguments.
kwds: Keyword arguments for the ``savefun``.
.. seealso::
- :meth:`chainer.training.extensions.snapshot`
|
62599026796e427e5384f6cd
|
class ImportFinder(ast.NodeVisitor): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> self.imports = set() <NEW_LINE> <DEDENT> def visit_Import(self, node: ast.AST) -> None: <NEW_LINE> <INDENT> for alias in node.names: <NEW_LINE> <INDENT> self.imports.add(alias.name.split(".")[0]) <NEW_LINE> <DEDENT> <DEDENT> def visit_ImportFrom(self, node: ast.AST) -> None: <NEW_LINE> <INDENT> self.imports.add(node.module)
|
An AST walker for collecting imported modules.
|
6259902666673b3332c3133f
|
class HashGetSchema(ma.Schema): <NEW_LINE> <INDENT> id = ma.Integer(required=False) <NEW_LINE> password = ma.String(required=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> strict = True
|
Schema for fetching a hash, it can use any one of the three shown below
|
625990266fece00bbaccc90a
|
class SideEffect(Dao): <NEW_LINE> <INDENT> def __init__(self, dao): <NEW_LINE> <INDENT> self.dao = dao <NEW_LINE> self.redis = redis.Redis(**app.config.get('REDIS_CLIENT')) <NEW_LINE> self.rds_monitor_finish = 'lll_monitor_finish' <NEW_LINE> self.rds_monitor_start = 'lll_monitor_start' <NEW_LINE> <DEDENT> def insert(self, test): <NEW_LINE> <INDENT> test.id = self.dao.insert(test) <NEW_LINE> if test.env in ['luna-tank-api', 'luna-tank-api-force'] and test.status != 'finished': <NEW_LINE> <INDENT> self.schedule_to_monitor_finish(test) <NEW_LINE> <DEDENT> if test.env == 'luna-tank-api-force': <NEW_LINE> <INDENT> self.schedule_to_monitor_start(test) <NEW_LINE> <DEDENT> elif test.env == 'yandex-tank': <NEW_LINE> <INDENT> reduce_arts.apply_async(args=[test.id, test.files]) <NEW_LINE> <DEDENT> return test.id <NEW_LINE> <DEDENT> def update_by_id(self, test_id, test_diff): <NEW_LINE> <INDENT> return self.dao.update_by_id(test_id, test_diff) <NEW_LINE> <DEDENT> def get_by_id(self, **kw): <NEW_LINE> <INDENT> return self.dao.get_by_id(**kw) <NEW_LINE> <DEDENT> def get_many(self, **kw): <NEW_LINE> <INDENT> return self.dao.get_many(**kw) <NEW_LINE> <DEDENT> def schedule_to_monitor(self, key, test): <NEW_LINE> <INDENT> self.redis.hset(key, test.id, test.to_monitor_dct()) <NEW_LINE> <DEDENT> def schedule_to_monitor_finish(self, test): <NEW_LINE> <INDENT> self.schedule_to_monitor(self.rds_monitor_finish, test) <NEW_LINE> <DEDENT> def schedule_to_monitor_start(self, test): <NEW_LINE> <INDENT> self.schedule_to_monitor(self.rds_monitor_start, test)
|
Side effect wrapper, implementing test.dao interface.
|
625990266e29344779b015a1
|
class Lart(plugins.ChannelIdDatabasePlugin): <NEW_LINE> <INDENT> _meRe = re.compile(r'\bme\b', re.I) <NEW_LINE> _myRe = re.compile(r'\bmy\b', re.I) <NEW_LINE> def _replaceFirstPerson(self, s, nick): <NEW_LINE> <INDENT> s = self._meRe.sub(nick, s) <NEW_LINE> s = self._myRe.sub('%s\'s' % nick, s) <NEW_LINE> return s <NEW_LINE> <DEDENT> def addValidator(self, irc, text): <NEW_LINE> <INDENT> if '$who' not in text: <NEW_LINE> <INDENT> irc.error(_('Larts must contain $who.'), Raise=True) <NEW_LINE> <DEDENT> <DEDENT> @internationalizeDocstring <NEW_LINE> def lart(self, irc, msg, args, channel, id, text): <NEW_LINE> <INDENT> if ' for ' in text: <NEW_LINE> <INDENT> (target, reason) = list(map(str.strip, text.split(' for ', 1))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (target, reason) = (text, '') <NEW_LINE> <DEDENT> if id is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> lart = self.db.get(channel, id) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> irc.error(format(_('There is no lart with id #%i.'), id)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> lart = self.db.random(channel) <NEW_LINE> if not lart: <NEW_LINE> <INDENT> irc.error(format(_('There are no larts in my database ' 'for %s.'), channel)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> text = lart.text <NEW_LINE> if ircutils.strEqual(target, irc.nick): <NEW_LINE> <INDENT> target = msg.nick <NEW_LINE> reason = self._replaceFirstPerson(_('trying to dis me'), irc.nick) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target = self._replaceFirstPerson(target, msg.nick) <NEW_LINE> reason = self._replaceFirstPerson(reason, msg.nick) <NEW_LINE> <DEDENT> if target.endswith('.'): <NEW_LINE> <INDENT> target = target.rstrip('.') <NEW_LINE> <DEDENT> text = text.replace('$who', target) <NEW_LINE> if reason: <NEW_LINE> <INDENT> text += _(' for ') + reason <NEW_LINE> <DEDENT> if self.registryValue('showIds', channel, irc.network): <NEW_LINE> <INDENT> text += format(' (#%i)', lart.id) <NEW_LINE> <DEDENT> irc.reply(text, action=True) <NEW_LINE> <DEDENT> lart = wrap(lart, ['channeldb', optional('id'), 'text'])
|
Provides an implementation of the Luser Attitude Readjustment Tool
for users.
Example:
* If you add ``slaps $who``.
* And Someone says ``@lart ChanServ``.
* ``* bot slaps ChanServ``.
|
6259902673bcbd0ca4bcb1e2
|
class BXIError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg, cause=None): <NEW_LINE> <INDENT> super(BXIError, self).__init__(msg) <NEW_LINE> self.msg = msg <NEW_LINE> self._cause = cause <NEW_LINE> tb = sys.exc_info() <NEW_LINE> if tb == (None, None, None): <NEW_LINE> <INDENT> self.traceback_str = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.traceback_str = bxibase.traceback2str(tb[2]) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.msg + ("" if self.cause is None else "\n caused by: " + str(self.cause)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def cause(self): <NEW_LINE> <INDENT> return self._cause
|
The root class of all BXI exceptions
|
625990265166f23b2e244327
|
class KeywordsTagField(with_metaclass(models.SubfieldBase, BaseTagField)): <NEW_LINE> <INDENT> description = "Field for Storing <meta name='keywords' /> tag" <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs['db_index'] = False <NEW_LINE> kwargs['max_length'] = 255 <NEW_LINE> self.name = 'keywords' <NEW_LINE> super(KeywordsTagField, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if isinstance(value, KeywordsTag): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> keywords = super(KeywordsTagField, self).to_python(value) <NEW_LINE> keyword_tag = KeywordsTag( meta_name=_clean_i18_name(self.name), **{ 'name': _clean_i18_name(self.name), 'value': keywords } ) <NEW_LINE> return keyword_tag
|
Creates a field for Keywords Meta Tag
* Max-length 255
|
62599026d99f1b3c44d065f5
|
class NotImplemented(web.webapi.HTTPError): <NEW_LINE> <INDENT> def __init__(self, message="not implemented"): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> status = "501 Not Implemented" <NEW_LINE> headers = {"Content-Type": "text/html"} <NEW_LINE> web.webapi.HTTPError.__init__(self, status, headers, message)
|
`501 Not Implemented` error.
|
62599026bf627c535bcb2408
|
class TestResponseContainerEvent(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testResponseContainerEvent(self): <NEW_LINE> <INDENT> pass
|
ResponseContainerEvent unit test stubs
|
6259902656b00c62f0fb3812
|
class Command(BaseCommand): <NEW_LINE> <INDENT> help = ( "Update the signature information related to News items which do not" " have any related signatures yet." ) <NEW_LINE> def write(self, text): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> self.stdout.write(text) <NEW_LINE> <DEDENT> <DEDENT> def handle(self, *args, **kwargs): <NEW_LINE> <INDENT> self.verbose = int(kwargs['verbosity']) > 1 <NEW_LINE> self.write("Retrieving list of news to update...") <NEW_LINE> no_signature_news = EmailNews.objects.annotate( cnt=models.Count('signed_by')) <NEW_LINE> no_signature_news = no_signature_news.filter(cnt=0) <NEW_LINE> self.write("Processing news...") <NEW_LINE> self.write("{ID}: {TITLE}") <NEW_LINE> for news in no_signature_news: <NEW_LINE> <INDENT> self.write("{}: {}".format(news.id, news)) <NEW_LINE> news.save()
|
A Django management command which tries to update the signature information
for :class:`News <pts.core.models.News>` instances which do not have
any associated signatures.
|
625990265e10d32532ce40ad
|
class TestPack(PackTests): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> require_git_version((1, 5, 0)) <NEW_LINE> super(TestPack, self).setUp() <NEW_LINE> self._tempdir = tempfile.mkdtemp() <NEW_LINE> self.addCleanup(shutil.rmtree, self._tempdir) <NEW_LINE> <DEDENT> def test_copy(self): <NEW_LINE> <INDENT> origpack = self.get_pack(pack1_sha) <NEW_LINE> self.assertSucceeds(origpack.index.check) <NEW_LINE> pack_path = os.path.join(self._tempdir, "Elch") <NEW_LINE> write_pack(pack_path, origpack.pack_tuples()) <NEW_LINE> output = run_git_or_fail(['verify-pack', '-v', pack_path]) <NEW_LINE> pack_shas = set() <NEW_LINE> for line in output.splitlines(): <NEW_LINE> <INDENT> sha = line[:40] <NEW_LINE> try: <NEW_LINE> <INDENT> binascii.unhexlify(sha) <NEW_LINE> <DEDENT> except (TypeError, binascii.Error): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> pack_shas.add(sha) <NEW_LINE> <DEDENT> orig_shas = set(o.id for o in origpack.iterobjects()) <NEW_LINE> self.assertEqual(orig_shas, pack_shas)
|
Compatibility tests for reading and writing pack files.
|
6259902621a7993f00c66ecf
|
class any_obj: <NEW_LINE> <INDENT> pass
|
Used to create objects for spawning arbitrary attributes by assignment
|
625990263eb6a72ae038b5b7
|
class MacroRecorder(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "wm.record_macro" <NEW_LINE> bl_label = "Toggle macro recording" <NEW_LINE> v3d = None <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.space_data.type in {'TEXT_EDITOR', 'VIEW_3D'} <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> global is_macro_recording <NEW_LINE> global macro_window <NEW_LINE> global macro_recorder <NEW_LINE> if not is_macro_recording: <NEW_LINE> <INDENT> macro_recorder = SceneDiff(context) <NEW_LINE> for scene in bpy.data.scenes: <NEW_LINE> <INDENT> scene.macros.clear() <NEW_LINE> <DEDENT> is_macro_recording = True <NEW_LINE> macro_window = context.window <NEW_LINE> bpy.ops.ed.undo_push(message="Record Macro") <NEW_LINE> if context.space_data.type == 'VIEW_3D': <NEW_LINE> <INDENT> MacroRecorder.v3d = context.space_data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> MacroRecorder.v3d = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> text_block = bpy.data.texts.new("macro") <NEW_LINE> context.scene.macros.write_macro_text(text_block) <NEW_LINE> if context.space_data.type == 'TEXT_EDITOR': <NEW_LINE> <INDENT> context.space_data.text = text_block <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.report({'INFO'}, "Created %s" % text_block.name) <NEW_LINE> <DEDENT> is_macro_recording = False <NEW_LINE> macro_window = None <NEW_LINE> macro_text_block = None <NEW_LINE> MacroRecorder.v3d = None <NEW_LINE> bpy.ops.ed.undo_push(message="End Recording") <NEW_LINE> macro_recorder = None <NEW_LINE> <DEDENT> return {'FINISHED'}
|
Record operators to a text block
|
6259902691af0d3eaad3ad7b
|
class InstructionDesc: <NEW_LINE> <INDENT> def __init__(self, func, n_args): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.n_args = n_args <NEW_LINE> <DEDENT> def execute(self, args): <NEW_LINE> <INDENT> self.func(args)
|
data structure for storing instruction function and number of arguments needed.
|
62599026a4f1c619b294f548
|
class infrange(): <NEW_LINE> <INDENT> def __init__(self, min, max, step=0): <NEW_LINE> <INDENT> self.min = min <NEW_LINE> self.max = max <NEW_LINE> self.step = step <NEW_LINE> <DEDENT> @property <NEW_LINE> def range(self): <NEW_LINE> <INDENT> return abs(self.max-self.min) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return other > self.max <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return other > self.min <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.min > other <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.max > other <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> if self.step == 0: <NEW_LINE> <INDENT> return self.min < item < self.max <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return item in np.linspace(self.min, self.max, int(self.range/self.step)+1) <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, item): <NEW_LINE> <INDENT> if isinstance(item, self.__class__): <NEW_LINE> <INDENT> return all(( self.min == item.min, self.max == item.max, self.step == item.step )) <NEW_LINE> <DEDENT> return item in self <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return self.__class__(self.min+other, self.max+other, self.step) <NEW_LINE> <DEDENT> def __sub__(self, other): <NEW_LINE> <INDENT> return self.__class__(self.min - other, self.max - other, self.step) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> return self.__class__(self.min * other, self.max * other, self.step * other) <NEW_LINE> <DEDENT> def __truedic__(self, other): <NEW_LINE> <INDENT> return self.__class__(self.min / other, self.max / other, self.step / other)
|
Similar to base Python `range`, but allowing the step to be a float or even
0, useful for specifying ranges for logical comparisons.
|
625990268c3a8732951f74ac
|
class ClassResult: <NEW_LINE> <INDENT> def __init__(self, benchclass): <NEW_LINE> <INDENT> self.benchclass = benchclass <NEW_LINE> self.instresults = [] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for instresult in self.instresults: <NEW_LINE> <INDENT> yield instresult
|
Represents the results of all instances of a benchmark class.
|
62599026d99f1b3c44d065f7
|
class Session(object): <NEW_LINE> <INDENT> engine = create_engine("mysql+pymysql://{user}:{password}@{host}:{port}/{database}".format(**settings.mysql_config), encoding='utf8', connect_args=dict(connect_timeout=2), max_overflow=10, poolclass=sqlalchemy.pool.QueuePool, pool_size=32, pool_recycle=60*10, pool_timeout=1, echo=False) <NEW_LINE> session_template = sessionmaker(bind=engine, autocommit=True, autoflush=False) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> session = scoped_session(type(self).session_template) <NEW_LINE> self.session = session <NEW_LINE> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def new(cls): <NEW_LINE> <INDENT> session = cls() <NEW_LINE> return session <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self.session <NEW_LINE> <DEDENT> def __exit__(self, _type, value, _traceback): <NEW_LINE> <INDENT> if _type is None and value is None and _traceback is None: <NEW_LINE> <INDENT> logger.debug("orm session exit success") <NEW_LINE> self.session.flush() <NEW_LINE> self.session.remove() <NEW_LINE> self.session.close() <NEW_LINE> del self.session <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(_type, pymysql.err.IntegrityError): <NEW_LINE> <INDENT> logger.exception('') <NEW_LINE> <DEDENT> self.session.rollback() <NEW_LINE> self.session.remove() <NEW_LINE> self.session.close() <NEW_LINE> del self.session <NEW_LINE> return False <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> pass
|
with Session() as session:
session.query()
|
62599026d164cc6175821ecb
|
class Metric(Component): <NEW_LINE> <INDENT> meta_type = portal_type = 'ZentrospectMetric' <NEW_LINE> metric_name = None <NEW_LINE> _properties = Component._properties + ( {'id': 'metric_name', 'type': 'string', 'mode': 'w'}, ) <NEW_LINE> _relations = Component._relations + ( ('process', ToOne(ToManyCont, MODULE_NAME['Process'], 'metrics')), ) <NEW_LINE> def getRRDTemplates(self): <NEW_LINE> <INDENT> basename = self.getRRDTemplateName() <NEW_LINE> process = self.process() <NEW_LINE> powerset = sorted_powerset(( process.system().system_name, process.process_name, self.metric_name)) <NEW_LINE> for parts in powerset: <NEW_LINE> <INDENT> template = self.getRRDTemplateByName('-'.join((basename,) + parts)) <NEW_LINE> if template: <NEW_LINE> <INDENT> return [template] <NEW_LINE> <DEDENT> <DEDENT> return []
|
Model class for Metric.
|
62599026796e427e5384f6d1
|
@implementer(IIntervalTicksDailyEvent) <NEW_LINE> class IntervalTicksDailyEvent(IntervalTicksGenericEvent): <NEW_LINE> <INDENT> pass
|
An Event that will be fired daily from a cronjob
|
625990268e05c05ec3f6f606
|
class ProfessionalForm(forms.ModelForm): <NEW_LINE> <INDENT> phone2 = forms.CharField(label="Phone 2") <NEW_LINE> email2 = forms.EmailField(label="E-mail 2") <NEW_LINE> website2 = forms.URLField(label="Website 2") <NEW_LINE> goals = forms.CharField(label="What are this professionals goals or needs? What opportunities exist to suggest win-win situations?") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Professional <NEW_LINE> fields = ('name', 'email', 'phone', 'website', 'level', 'neighborhood', 'goals', 'address', 'phone2', 'email2', 'website2', 'company', 'strengths' )
|
renames a variety of model columns for user interaction
|
62599026c432627299fa3f47
|
class OperatorSymbol(QuantumSymbol, Operator): <NEW_LINE> <INDENT> def _pseudo_inverse(self): <NEW_LINE> <INDENT> return PseudoInverse(self)
|
Symbolic operator
See :class:`.QuantumSymbol`.
|
625990266fece00bbaccc90e
|
class VocabularyDetail(RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> serializer_class = VocabularySerializer <NEW_LINE> lookup_field = 'slug' <NEW_LINE> lookup_url_kwarg = 'vocab_slug' <NEW_LINE> permission_classes = ( ViewVocabularyPermission, ManageTaxonomyPermission, IsAuthenticated, ) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> repo = Repository.objects.get(slug=self.kwargs['repo_slug']) <NEW_LINE> return repo.vocabulary_set.filter( slug=self.kwargs['vocab_slug'] ) <NEW_LINE> <DEDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> vocab = self.get_object() <NEW_LINE> new_types = self.request.data.get('learning_resource_types', None) <NEW_LINE> if new_types is not None: <NEW_LINE> <INDENT> old_types = set( t.name for t in vocab.learning_resource_types.all() ) <NEW_LINE> removed_types = old_types - set(new_types) <NEW_LINE> resource_ids_to_reindex = [] <NEW_LINE> with transaction.atomic(): <NEW_LINE> <INDENT> for term in vocab.term_set.all(): <NEW_LINE> <INDENT> for resource in term.learning_resources.all(): <NEW_LINE> <INDENT> if (resource.learning_resource_type.name in removed_types): <NEW_LINE> <INDENT> resource_ids_to_reindex.append(resource.id) <NEW_LINE> term.learning_resources.remove(resource) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if len(resource_ids_to_reindex) > 0: <NEW_LINE> <INDENT> index_resources.delay(resource_ids_to_reindex) <NEW_LINE> <DEDENT> <DEDENT> return super(VocabularyDetail, self).update( request, *args, **kwargs) <NEW_LINE> <DEDENT> def delete(self, request, *args, **kwargs): <NEW_LINE> <INDENT> vocab = self.get_object() <NEW_LINE> resource_ids = list(LearningResource.objects.filter( terms__vocabulary__id=vocab.id ).values_list("id", flat=True)) <NEW_LINE> ret = super(VocabularyDetail, self).delete(request, *args, **kwargs) <NEW_LINE> index_resources.delay(resource_ids) <NEW_LINE> return ret
|
REST detail view for Vocabulary.
|
6259902691af0d3eaad3ad7d
|
class FakeResponse(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.deferred = defer.fail(err)
|
Fake Response.
|
6259902621a7993f00c66ed3
|
class APITemplateView(HomeAssistantView): <NEW_LINE> <INDENT> url = URL_API_TEMPLATE <NEW_LINE> name = "api:template" <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return template.render(self.hass, request.json['template'], request.json.get('variables')) <NEW_LINE> <DEDENT> except TemplateError as ex: <NEW_LINE> <INDENT> return self.json_message('Error rendering template: {}'.format(ex), HTTP_BAD_REQUEST)
|
View to handle requests.
|
625990266e29344779b015a7
|
class Comment(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> name = models.CharField(max_length=100, help_text='Nom et Prénom') <NEW_LINE> email = models.EmailField(blank=True, null=True, help_text='Adresse email (yyyyyy@monfai.fr)') <NEW_LINE> comment = models.TextField(help_text='Commentaire') <NEW_LINE> pub_date = models.DateTimeField(auto_now_add=True) <NEW_LINE> event = models.ForeignKey(Event,help_text='Évènement') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('pub_date',) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "%s on %s - %0*d/%0*d%d" % (self.name, self.event, 2, self.pub_date.day, 2, self.pub_date.month, self.pub_date.year) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return "%s#c%d" % (self.event.get_absolute_url(), self.id) <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_delete_url(self): <NEW_LINE> <INDENT> return (OPENVOLUNTEER_APP_PREFIX + 'views.event_comment_delete', (), {'comment_id': str(self.id)})
|
A very simple comment system for Event details
|
625990266fece00bbaccc910
|
class PrefixKeyFunc: <NEW_LINE> <INDENT> def __init__(self, prefix): <NEW_LINE> <INDENT> self.prefix = prefix <NEW_LINE> <DEDENT> def __call__(self, *a, **kw): <NEW_LINE> <INDENT> return self.prefix + "-" + self.encode_args(a, kw) <NEW_LINE> <DEDENT> def encode_args(self, args, kw={}): <NEW_LINE> <INDENT> a = self.json_encode(list(args))[1:-1] <NEW_LINE> if kw: <NEW_LINE> <INDENT> return a + "-" + self.json_encode(kw) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return a <NEW_LINE> <DEDENT> <DEDENT> def json_encode(self, value): <NEW_LINE> <INDENT> return simplejson.dumps(value, separators=(",", ":"), sort_keys=True)
|
A function to generate cache keys using a prefix and arguments.
|
625990268c3a8732951f74af
|
class Histogram(object): <NEW_LINE> <INDENT> __slots__ = ["_nanosecond_timestamp", "_binStart", "_binSize", "_bins"] <NEW_LINE> def __init__(self, timestamp, binStart, binSize, bins): <NEW_LINE> <INDENT> self._binStart = binStart <NEW_LINE> self._binSize = binSize <NEW_LINE> self._bins = bins <NEW_LINE> if isinstance(timestamp, datetime): <NEW_LINE> <INDENT> self._nanosecond_timestamp = int((timestamp-UNIX_EPOCH).total_seconds()*NANOSECONDS_PER_SECOND) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._nanosecond_timestamp = int(timestamp) <NEW_LINE> <DEDENT> assert self._nanosecond_timestamp >= 0, "timestamp must be greater than 0, or later than Jan 1, 1970" <NEW_LINE> <DEDENT> @property <NEW_LINE> def timestamp(self): <NEW_LINE> <INDENT> return datetime.utcfromtimestamp(self._nanosecond_timestamp / float(NANOSECONDS_PER_SECOND)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nanseconds(self): <NEW_LINE> <INDENT> return self._nanosecond_timestamp%NANOSECONDS_PER_SECOND <NEW_LINE> <DEDENT> @property <NEW_LINE> def timestamp_nanoseconds(self): <NEW_LINE> <INDENT> return self._nanosecond_timestamp <NEW_LINE> <DEDENT> @property <NEW_LINE> def bin_start(self): <NEW_LINE> <INDENT> return self._binStart <NEW_LINE> <DEDENT> @property <NEW_LINE> def bin_size(self): <NEW_LINE> <INDENT> return self._binSize <NEW_LINE> <DEDENT> @property <NEW_LINE> def bins(self): <NEW_LINE> <INDENT> return self._bins <NEW_LINE> <DEDENT> def descriptor(self, sample_rate): <NEW_LINE> <INDENT> return descriptor(sample_rate, self.bin_start, self.bin_size, len(self.bins)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Histogram(Bin Start:%s,Bin Size:%s, %s, %s)"%(self.bin_start, self.bin_size, self.timestamp, self.bins) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> for i, binValue in enumerate(other.bins): <NEW_LINE> <INDENT> if binvalue != self.bins[i]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return self.bin_start == other.bin_start and self.bin_size == other.bin_size and self.timestamp_nanoseconds == other.timestamp_nanoseconds <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other)
|
Point represents a datapoint as a timestamp and value in a timeseries dataset.
|
6259902630c21e258be9976d
|
class ChannelList(QListView): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(ChannelList, self).__init__(parent) <NEW_LINE> self.slider = None <NEW_LINE> self._nxt = 0.0 <NEW_LINE> self.start = False <NEW_LINE> self.residual = 0.0 <NEW_LINE> <DEDENT> def slideStart(self): <NEW_LINE> <INDENT> p = self.mapFromGlobal(QCursor.pos()) <NEW_LINE> item = self.indexAt(p).internalPointer() <NEW_LINE> if isinstance(item, Slider): <NEW_LINE> <INDENT> self.slider = item <NEW_LINE> self.start = True <NEW_LINE> <DEDENT> <DEDENT> def slideStop(self): <NEW_LINE> <INDENT> self.slider = None <NEW_LINE> <DEDENT> def slideTick(self, val, offset, mul): <NEW_LINE> <INDENT> if self.slider is not None: <NEW_LINE> <INDENT> mx = self.slider.maxValue <NEW_LINE> mn = self.slider.minValue <NEW_LINE> tick = 20.0 / mul <NEW_LINE> if self.start or mul == 1.0: <NEW_LINE> <INDENT> self.start = False <NEW_LINE> val = (val * (mx - mn)) + mn <NEW_LINE> rn = round(val * tick) / tick <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val = offset * (mx - mn) <NEW_LINE> val = self.slider.value + (val * mul) <NEW_LINE> val += self.residual <NEW_LINE> rn = round(val * tick) / tick <NEW_LINE> self.residual = val - rn <NEW_LINE> <DEDENT> rn = min(max(rn, mn), mx) <NEW_LINE> self._nxt = rn <NEW_LINE> QTimer.singleShot(0, self.setval) <NEW_LINE> <DEDENT> <DEDENT> def setval(self): <NEW_LINE> <INDENT> if self.slider is not None and self._nxt is not None: <NEW_LINE> <INDENT> self.slider.value = self._nxt <NEW_LINE> <DEDENT> self._nxt = None
|
A list to display the chosen channels
|
62599026a8ecb03325872176
|
class OnstarDeviceTracker: <NEW_LINE> <INDENT> def __init__(self, see, data): <NEW_LINE> <INDENT> self._see = see <NEW_LINE> self._data = data <NEW_LINE> <DEDENT> def setup(self, hass): <NEW_LINE> <INDENT> self.update() <NEW_LINE> track_utc_time_change( hass, lambda now: self.update(), second=range(0, 60, 30) ) <NEW_LINE> <DEDENT> def update(self) -> None: <NEW_LINE> <INDENT> dev_id = slugify(self._data.status['onstar.plate']) <NEW_LINE> if self._data._pin is None: <NEW_LINE> <INDENT> _LOGGER.debug("Tracking is disabled for vehicle %s", dev_id) <NEW_LINE> return <NEW_LINE> <DEDENT> _LOGGER.info("Updating %s", dev_id) <NEW_LINE> attrs = {"vin": self._data.status['onstar.vin']} <NEW_LINE> self._see( dev_id=dev_id, host_name=self._data.status['onstar.plate'], gps=self._data.gps_position, attributes=attrs, icon="mdi:car", )
|
BMW Connected Drive device tracker.
|
62599026796e427e5384f6d5
|
class FargateTaskDefinitionBase(Base): <NEW_LINE> <INDENT> fargate_task_definition: FargateTaskDefinition <NEW_LINE> fargate_container = FargateContainer
|
Fargate基底class
|
625990263eb6a72ae038b5bd
|
@orientation_helper(axis_forward='Y', axis_up='Z') <NEW_LINE> class ExportB3D(bpy.types.Operator, ExportHelper): <NEW_LINE> <INDENT> bl_idname = "export_scene.blitz3d_b3d" <NEW_LINE> bl_label = 'Export B3D' <NEW_LINE> filename_ext = ".b3d" <NEW_LINE> filter_glob: StringProperty( default="*.b3d", options={'HIDDEN'}, ) <NEW_LINE> use_selection: BoolProperty( name="Selection Only", description="Export selected objects only", default=False, ) <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> from . import export_b3d <NEW_LINE> keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob", "check_existing", )) <NEW_LINE> global_matrix = axis_conversion(to_forward=self.axis_forward, to_up=self.axis_up, ).to_4x4() <NEW_LINE> keywords["global_matrix"] = global_matrix <NEW_LINE> return export_b3d.save(self, context, **keywords)
|
Export to B3D file format (.b3d)
|
62599026d18da76e235b78fa
|
class Sip(TwiML): <NEW_LINE> <INDENT> def __init__(self, uri, **kwargs): <NEW_LINE> <INDENT> super(Sip, self).__init__(**kwargs) <NEW_LINE> self.value = uri
|
<Sip> element
|
625990266e29344779b015a9
|
class ForestParam(TimestampedModel): <NEW_LINE> <INDENT> default = models.NullBooleanField(unique=True) <NEW_LINE> notes = models.TextField(blank=True) <NEW_LINE> name = models.TextField(blank=True) <NEW_LINE> jasmine_json_string = models.TextField() <NEW_LINE> willow_json_string = models.TextField() <NEW_LINE> def params_for_tree(self, tree_name): <NEW_LINE> <INDENT> if tree_name not in ForestTree.values(): <NEW_LINE> <INDENT> raise KeyError(f"Invalid tree \"{tree_name}\". Must be one of {ForestTree.values()}.") <NEW_LINE> <DEDENT> json_string_field_name = f"{tree_name}_json_string" <NEW_LINE> return json.loads(getattr(self, json_string_field_name))
|
Model for tracking params used in Forest analyses. There is one object for all trees.
When adding support for a new tree, make sure to add a migration to populate existing
ForestMetadata objects with the default metadata for the new tree. This way, all existing
ForestTasks are still associated to the same ForestMetadata object and we don't have to give a
warning to users that the metadata have changed.
|
625990266fece00bbaccc912
|
class Param(Element): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Element.__init__(self) <NEW_LINE> <DEDENT> def get_input(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.get_type() in ('file_open', 'file_save'): <NEW_LINE> <INDENT> input_widget = FileParam(self, *args, **kwargs) <NEW_LINE> <DEDENT> elif self.is_enum(): <NEW_LINE> <INDENT> input_widget = EnumParam(self, *args, **kwargs) <NEW_LINE> <DEDENT> elif self.get_options(): <NEW_LINE> <INDENT> input_widget = EnumEntryParam(self, *args, **kwargs) <NEW_LINE> <DEDENT> elif self.get_type() == '_multiline': <NEW_LINE> <INDENT> input_widget = MultiLineEntryParam(self, *args, **kwargs) <NEW_LINE> <DEDENT> elif self.get_type() == '_multiline_python_external': <NEW_LINE> <INDENT> input_widget = PythonEditorParam(self, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> input_widget = EntryParam(self, *args, **kwargs) <NEW_LINE> <DEDENT> return input_widget <NEW_LINE> <DEDENT> def get_markup(self): <NEW_LINE> <INDENT> return Utils.parse_template(PARAM_MARKUP_TMPL, param=self, font=Constants.PARAM_FONT)
|
The graphical parameter.
|
625990265166f23b2e24432f
|
class TestMLen(TestCase): <NEW_LINE> <INDENT> def test_non_mxp_string(self): <NEW_LINE> <INDENT> self.assertEqual(utils.m_len('Test_string'), 11) <NEW_LINE> <DEDENT> def test_mxp_string(self): <NEW_LINE> <INDENT> self.assertEqual(utils.m_len('|lclook|ltat|le'), 2) <NEW_LINE> <DEDENT> def test_mxp_ansi_string(self): <NEW_LINE> <INDENT> self.assertEqual(utils.m_len(ANSIString('|lcl|gook|ltat|le|n')), 2) <NEW_LINE> <DEDENT> def test_non_mxp_ansi_string(self): <NEW_LINE> <INDENT> self.assertEqual(utils.m_len(ANSIString('{gHello{n')), 5) <NEW_LINE> self.assertEqual(utils.m_len(ANSIString('|gHello|n')), 5) <NEW_LINE> <DEDENT> def test_list(self): <NEW_LINE> <INDENT> self.assertEqual(utils.m_len([None, None]), 2) <NEW_LINE> <DEDENT> def test_dict(self): <NEW_LINE> <INDENT> self.assertEqual(utils.m_len({'hello': True, 'Goodbye': False}), 2)
|
Verifies that m_len behaves like len in all situations except those
where MXP may be involved.
|
62599026287bf620b6272b49
|
class L1Regularizer(Regularizer): <NEW_LINE> <INDENT> def __init__(self, reg): <NEW_LINE> <INDENT> super().__init__(reg) <NEW_LINE> <DEDENT> def loss(self, w): <NEW_LINE> <INDENT> return self._lambda * np.linalg.norm(w[:-1], 1) <NEW_LINE> <DEDENT> def gradient(self, w): <NEW_LINE> <INDENT> gradient = np.zeros_like(w) <NEW_LINE> gradient[:-1] = self._lambda * np.sign(w[:-1]) <NEW_LINE> return gradient
|
docstring for L2Regularizer
|
62599026a4f1c619b294f54e
|
class INT(GPIDefaultType): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(INT, self).__init__() <NEW_LINE> self._type = int <NEW_LINE> self._range = None <NEW_LINE> <DEDENT> def edgeTip(self, data): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> return str(data) <NEW_LINE> <DEDENT> def toolTip_Data(self, data): <NEW_LINE> <INDENT> msg = str(self._type) + '\n' <NEW_LINE> msg += "val: "+str(data) <NEW_LINE> return msg <NEW_LINE> <DEDENT> def toolTip_Port(self): <NEW_LINE> <INDENT> msg = str(self._type) + '\n' <NEW_LINE> if self._range is not None: <NEW_LINE> <INDENT> msg += "range: "+str(self._range) <NEW_LINE> <DEDENT> return msg <NEW_LINE> <DEDENT> def setDataAttr(self, data): <NEW_LINE> <INDENT> return osuper(INT, self).setDataAttr(data) <NEW_LINE> <DEDENT> def matchesType(self, type_cls): <NEW_LINE> <INDENT> if self.isFreeType(type_cls): <NEW_LINE> <INDENT> self.log.info(str(self.__class__)+"matchesType(): upstream port is free.") <NEW_LINE> return True <NEW_LINE> <DEDENT> if type(type_cls) != type(self): <NEW_LINE> <INDENT> self.log.info(str(self.__class__)+"matchesType(): port class cannot be compared.") <NEW_LINE> return False <NEW_LINE> <DEDENT> if self._range is not None: <NEW_LINE> <INDENT> if type_cls._range is not None: <NEW_LINE> <INDENT> if self._range[0] > type_cls._range[0]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._range[1] < type_cls._range[1]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def matchesData(self, data): <NEW_LINE> <INDENT> if self._type != type(data): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._range is not None: <NEW_LINE> <INDENT> if self._range[0] > data: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._range[1] < data: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def set_range(self, val): <NEW_LINE> <INDENT> if type(val) != tuple: <NEW_LINE> <INDENT> raise Exception("ERROR: \'range\' requires an \'tuple\'!") <NEW_LINE> <DEDENT> if len(val) != 2: <NEW_LINE> <INDENT> raise Exception( "ERROR: \'range\' requires an \'tuple\' of len = 2!") <NEW_LINE> <DEDENT> for dim in val: <NEW_LINE> <INDENT> if type(dim) != self._type: <NEW_LINE> <INDENT> raise Exception("ERROR: \'range\' requires a \'tuple\' of \'" + str(self._type)+"\' types!") <NEW_LINE> <DEDENT> <DEDENT> self._range = val
|
Enforcement for the standard python-int.
|
62599026925a0f43d25e8fa1
|
class DeltaHVMetric(Metric): <NEW_LINE> <INDENT> def __init__(self, pf: np.ndarray): <NEW_LINE> <INDENT> super(DeltaHVMetric, self).__init__() <NEW_LINE> self._hv = hv = Hypervolume(pf=pf, normalize=True) <NEW_LINE> self.hv_true = hv.calc(pf) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> return 'delta_hv' <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_names(self) -> List[str]: <NEW_LINE> <INDENT> return ['delta_hv', 'hv', 'true_hv'] <NEW_LINE> <DEDENT> def _calculate_values(self, algorithm: Algorithm) -> List[float]: <NEW_LINE> <INDENT> f = self._get_pop_f(algorithm) <NEW_LINE> hv = self._hv.calc(f) <NEW_LINE> delta_hv = (self.hv_true-hv)/self.hv_true <NEW_LINE> return [delta_hv, hv, self.hv_true]
|
Metric measuring the difference to the pre-known hypervolume. It has a value between 1 and 0, where 0 means the
hypervolume is exactly the same, meaning the true Pareto front has been found.
Implementation based on:
Palar, P.S., "On Multi-Objective Efficient Global Optimization Via Universal Kriging Surrogate Model", 2017,
10.1109/CEC.2017.7969368
|
62599026d164cc6175821ed2
|
class FTPServer: <NEW_LINE> <INDENT> def __init__(self, src, sport): <NEW_LINE> <INDENT> self.src = src <NEW_LINE> self.sport = sport <NEW_LINE> self.verbose = False <NEW_LINE> self.tcp_flags = { 'TCP_FIN': 0x01, 'TCP_SYN': 0x02, 'TCP_RST': 0x04, 'TCP_PSH': 0x08, 'TCP_ACK': 0x10, 'TCP_URG': 0x20, 'TCP_ECE': 0x40, 'TCP_CWR': 0x80 } <NEW_LINE> <DEDENT> def handshake(self, pkt): <NEW_LINE> <INDENT> dst = pkt[IP].src <NEW_LINE> src = pkt[IP].dst <NEW_LINE> sport = pkt[TCP].dport <NEW_LINE> dport = pkt[TCP].sport <NEW_LINE> ackno = pkt[TCP].seq + 1 <NEW_LINE> seqno = 0 <NEW_LINE> synack = IP(src=src, dst=dst)/TCP(sport=self.sport, dport=dport, flags='SA', seq=seqno, ack=ackno) <NEW_LINE> reply = None <NEW_LINE> while not reply: <NEW_LINE> <INDENT> reply = sr1(synack, timeout=1, verbose=self.verbose) <NEW_LINE> <DEDENT> seqno += 1 <NEW_LINE> serv = FTPServerConnection(src, dst, sport, dport, seqno, ackno) <NEW_LINE> serv_thread = Thread(target=serv.run) <NEW_LINE> serv_thread.start() <NEW_LINE> print('New connection created') <NEW_LINE> <DEDENT> def sniff_filter(self, pkt): <NEW_LINE> <INDENT> return pkt.haslayer(IP) and (not self.src or pkt[IP].dst == self.src) and pkt.haslayer(TCP) and pkt[TCP].dport == self.sport and pkt[TCP].flags == self.tcp_flags['TCP_SYN'] <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> sniff(prn=self.handshake, lfilter=self.sniff_filter)
|
Wrapper class on the FTPServerConnection. Listens and creates new connection
for each for new SYN.
|
625990261d351010ab8f4a71
|
class CacheWrapper(BaseCache): <NEW_LINE> <INDENT> def __init__(self, server, params: dict): <NEW_LINE> <INDENT> super().__init__(server, params) <NEW_LINE> self._cache = {} <NEW_LINE> self._threshold = self._options.get("THRESHOLD", 0) <NEW_LINE> <DEDENT> def _prune(self): <NEW_LINE> <INDENT> if self._threshold == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if len(self._cache) > self._threshold: <NEW_LINE> <INDENT> now = time() <NEW_LINE> for idx, (key, (expires, _)) in enumerate(self._cache.items()): <NEW_LINE> <INDENT> if expires is not None and (expires <= now or idx % 3 == 0): <NEW_LINE> <INDENT> self._cache.pop(key, None) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> key = self.make_key(key) <NEW_LINE> expires, value = self._cache.get(key, (0, None)) <NEW_LINE> if expires is None or expires > time(): <NEW_LINE> <INDENT> return self.decode(value) <NEW_LINE> <DEDENT> <DEDENT> def set(self, key, value, timeout=DEFAULT_TIMEOUT): <NEW_LINE> <INDENT> key = self.make_key(key) <NEW_LINE> timeout = self.get_backend_timeout(timeout) <NEW_LINE> self._prune() <NEW_LINE> self._cache[key] = ((time() + timeout) if timeout else timeout, self.encode(value)) <NEW_LINE> <DEDENT> async def add(self, key, value, timeout=DEFAULT_TIMEOUT): <NEW_LINE> <INDENT> key = self.make_key(key) <NEW_LINE> timeout = self.get_backend_timeout(timeout) <NEW_LINE> if len(self._cache) > self._threshold: <NEW_LINE> <INDENT> self._prune() <NEW_LINE> <DEDENT> item = ((time() + timeout) if timeout else timeout, self.encode(value)) <NEW_LINE> self._cache.setdefault(key, item) <NEW_LINE> <DEDENT> async def delete(self, key): <NEW_LINE> <INDENT> key = self.make_key(key) <NEW_LINE> self._cache.pop(key, None) <NEW_LINE> <DEDENT> async def clear(self): <NEW_LINE> <INDENT> self._cache.clear() <NEW_LINE> <DEDENT> async def clear_keys(self, key_prefix): <NEW_LINE> <INDENT> key = self.make_key(key_prefix) <NEW_LINE> del_keys = [k for k in self._cache.keys() if k.startswith(key)] <NEW_LINE> for k in del_keys: <NEW_LINE> <INDENT> self._cache.pop(k, None) <NEW_LINE> <DEDENT> return len(del_keys)
|
简单的内存缓存;
适用于单个进程环境,主要用于开发服务器;
非线程安全的
|
62599026c432627299fa3f4d
|
class XmlMeasure(): <NEW_LINE> <INDENT> def __init__(self, number, time): <NEW_LINE> <INDENT> self.number = number <NEW_LINE> self.composition = [] <NEW_LINE> self.currentTime = 0 <NEW_LINE> self.timelast = time <NEW_LINE> <DEDENT> def appendNote(self, note): <NEW_LINE> <INDENT> if isinstance(note, XmlNote): <NEW_LINE> <INDENT> if note.step != REST: <NEW_LINE> <INDENT> if self.currentTime <= self.timelast : <NEW_LINE> <INDENT> self.composition.append(note) <NEW_LINE> self.currentTime += note.duration <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.composition.append(note) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(note, XmlHarmony): <NEW_LINE> <INDENT> self.composition.append(note) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> res = [] <NEW_LINE> res.append("\t<measure number=\"{number}\">\n".format(number=self.number)) <NEW_LINE> for note in self.composition: <NEW_LINE> <INDENT> res.append(str(note)) <NEW_LINE> <DEDENT> res.append("\t</measure>\n") <NEW_LINE> return "".join(res)
|
Define what a measure is in MusicXML format
|
625990261f5feb6acb163b4c
|
class GSFMeta(ABCMeta): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{0}.{1}'.format(__package__, self.__name__)
|
GSF Metaclass for overriding string representations.
|
6259902773bcbd0ca4bcb1ec
|
class LineItemViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.LineItem.objects.all() <NEW_LINE> serializer_class = serializers.LineItemSerializer
|
API endpoint that allows Line Items to be viewed or edited.
@samphillips1879
|
62599027d99f1b3c44d065fe
|
class AS3Element(object): <NEW_LINE> <INDENT> folderPattern = re.compile('.*/$') <NEW_LINE> def __init__(self, key, etag, cont_type, metadata): <NEW_LINE> <INDENT> self.key = key.encode('utf-8') <NEW_LINE> self.etag = etag.encode('utf-8') <NEW_LINE> self.content_type = cont_type.encode('utf-8') <NEW_LINE> self.metadata = metadata <NEW_LINE> <DEDENT> def is_file(self): <NEW_LINE> <INDENT> return self.folderPattern.match(self.key) is None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> ret_val = [] <NEW_LINE> ret_val.append("AS3Element : [key=") <NEW_LINE> ret_val.append(self.key) <NEW_LINE> ret_val.append(", etag=") <NEW_LINE> ret_val.append(self.etag) <NEW_LINE> ret_val.append(", content_type=") <NEW_LINE> ret_val.append(self.content_type) <NEW_LINE> ret_val.append(", metadata=") <NEW_LINE> ret_val.append(str(self.metadata)) <NEW_LINE> ret_val.append(", is_file=") <NEW_LINE> ret_val.append(str(self.is_file())) <NEW_LINE> ret_val.append("]") <NEW_LINE> return "".join(ret_val) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def json_decode(cls, obj): <NEW_LINE> <INDENT> cls_name = '__{}__'.format(cls.__name__) <NEW_LINE> if cls_name in obj: <NEW_LINE> <INDENT> as3_ele = obj[cls_name] <NEW_LINE> return cls(CorpusItem.json_decode(as3_ele['key']), as3_ele['etag'], as3_ele['content_type'], as3_ele['metadata']) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_s3_object(cls, s3_obj, sha1=ByteSequence.EMPTY_SHA1): <NEW_LINE> <INDENT> etag = s3_obj.e_tag[1:-1].encode('utf-8') <NEW_LINE> corpus_item = CorpusItem(sha1, s3_obj.content_length, s3_obj.last_modified, s3_obj.key.encode('utf-8')) <NEW_LINE> s3_ele = AS3Element(s3_obj.key.encode('utf-8'), etag, s3_obj.content_type.encode('utf-8'), s3_obj.metadata) <NEW_LINE> return corpus_item, s3_ele
|
Encapsulates the attributes of an Amazon S3 Storage element.
These are analagous to files and folders on a regular file system.
|
62599027ac7a0e7691f73445
|
class Cluster: <NEW_LINE> <INDENT> cluster_id = int() <NEW_LINE> oids = [] <NEW_LINE> def __init__(self, cluster_id): <NEW_LINE> <INDENT> self.cluster_id = cluster_id <NEW_LINE> self.oids = [] <NEW_LINE> <DEDENT> def getCluster_id(self): <NEW_LINE> <INDENT> return self.cluster_id
|
generated source for class Cluster
|
625990278c3a8732951f74b4
|
class Mount(mount.Mount): <NEW_LINE> <INDENT> mode = 'nbd' <NEW_LINE> device_id_string = mode <NEW_LINE> _DEVICES = ['/dev/nbd%s' % i for i in range(FLAGS.max_nbd_devices)] <NEW_LINE> def _allocate_nbd(self): <NEW_LINE> <INDENT> if not os.path.exists("/sys/block/nbd0"): <NEW_LINE> <INDENT> self.error = _('nbd unavailable: module not loaded') <NEW_LINE> return None <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> if not self._DEVICES: <NEW_LINE> <INDENT> self.error = _('No free nbd devices') <NEW_LINE> return None <NEW_LINE> <DEDENT> device = self._DEVICES.pop() <NEW_LINE> if not os.path.exists("/sys/block/%s/pid" % os.path.basename(device)): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return device <NEW_LINE> <DEDENT> def _free_nbd(self, device): <NEW_LINE> <INDENT> if not device in self._DEVICES: <NEW_LINE> <INDENT> self._DEVICES.append(device) <NEW_LINE> <DEDENT> <DEDENT> def get_dev(self): <NEW_LINE> <INDENT> device = self._allocate_nbd() <NEW_LINE> if not device: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> _out, err = utils.trycmd('qemu-nbd', '-c', device, self.image, run_as_root=True) <NEW_LINE> if err: <NEW_LINE> <INDENT> self.error = _('qemu-nbd error: %s') % err <NEW_LINE> self._free_nbd(device) <NEW_LINE> return False <NEW_LINE> <DEDENT> for _i in range(FLAGS.timeout_nbd): <NEW_LINE> <INDENT> if os.path.exists("/sys/block/%s/pid" % os.path.basename(device)): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> break <NEW_LINE> <DEDENT> time.sleep(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.error = _('nbd device %s did not show up') % device <NEW_LINE> self._free_nbd(device) <NEW_LINE> return False <NEW_LINE> <DEDENT> self.linked = True <NEW_LINE> return True <NEW_LINE> <DEDENT> def unget_dev(self): <NEW_LINE> <INDENT> if not self.linked: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> utils.execute('qemu-nbd', '-d', self.device, run_as_root=True) <NEW_LINE> self._free_nbd(self.device) <NEW_LINE> self.linked = False <NEW_LINE> self.device = None
|
qemu-nbd support disk images.
|
62599027925a0f43d25e8fa3
|
class MarkovChain: <NEW_LINE> <INDENT> def __init__(self, proposal, constraints, accept, initial_state, total_steps=1000): <NEW_LINE> <INDENT> if callable(constraints): <NEW_LINE> <INDENT> is_valid = constraints <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_valid = Validator(constraints) <NEW_LINE> <DEDENT> if not is_valid(initial_state): <NEW_LINE> <INDENT> failed = [ constraint for constraint in is_valid.constraints if not constraint(initial_state) ] <NEW_LINE> message = ( "The given initial_state is not valid according is_valid. " "The failed constraints were: " + ",".join([f.__name__ for f in failed]) ) <NEW_LINE> raise ValueError(message) <NEW_LINE> <DEDENT> self.proposal = proposal <NEW_LINE> self.is_valid = is_valid <NEW_LINE> self.accept = accept <NEW_LINE> self.total_steps = total_steps <NEW_LINE> self.initial_state = initial_state <NEW_LINE> self.state = initial_state <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self.counter = 0 <NEW_LINE> self.state = self.initial_state <NEW_LINE> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.counter == 0: <NEW_LINE> <INDENT> self.counter += 1 <NEW_LINE> return self.state <NEW_LINE> <DEDENT> while self.counter < self.total_steps: <NEW_LINE> <INDENT> proposed_next_state = self.proposal(self.state) <NEW_LINE> self.state.parent = None <NEW_LINE> if self.is_valid(proposed_next_state): <NEW_LINE> <INDENT> proposed_next_state.accepted = self.accept(proposed_next_state) <NEW_LINE> if proposed_next_state.accepted: <NEW_LINE> <INDENT> self.state = proposed_next_state <NEW_LINE> <DEDENT> self.counter += 1 <NEW_LINE> return proposed_next_state <NEW_LINE> <DEDENT> <DEDENT> raise StopIteration <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.total_steps <NEW_LINE> <DEDENT> def with_progress_bar(self): <NEW_LINE> <INDENT> from tqdm.auto import tqdm <NEW_LINE> return tqdm(self)
|
MarkovChain is an iterator that allows the user to iterate over the states
of a Markov chain run.
Example usage:
.. code-block:: python
chain = MarkovChain(proposal, is_valid, accept, initial_state)
for state in chain:
# Do whatever you want - print output, compute scores, ...
|
62599027a8ecb0332587217a
|
class EmptyTileFilter: <NEW_LINE> <INDENT> def filter(self, tiles): <NEW_LINE> <INDENT> return [tile for tile in tiles if tile.piece is None]
|
Filter that only returns Empty Tiles
|
625990273eb6a72ae038b5c1
|
class TestSplitJobActionDto(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testSplitJobActionDto(self): <NEW_LINE> <INDENT> pass
|
SplitJobActionDto unit test stubs
|
62599027d99f1b3c44d06600
|
class UnitLengthScaler(BaseScaler): <NEW_LINE> <INDENT> def _calculate_constant_reduction(self, column): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def _calculate_factor_divisor(self, column): <NEW_LINE> <INDENT> return np.linalg.norm(column)
|
Will normalize all provided columns to be unit length.
Or, ||x|| = 1
Calculation is:
x' = x / ||x||
|
62599027287bf620b6272b4d
|
class DeleteResult(object): <NEW_LINE> <INDENT> def __init__(self, deleted_count): <NEW_LINE> <INDENT> self.deleted_count = deleted_count
|
The return type for delete methods.
|
62599027925a0f43d25e8fa5
|
class Qgroup(models.Model): <NEW_LINE> <INDENT> uuid = models.CharField(max_length=4096, unique=True)
|
uuid of the qgroup
|
625990275e10d32532ce40b3
|
class Resource(Model): <NEW_LINE> <INDENT> def __init__(self, resource_id, project_id, first_sample_timestamp, last_sample_timestamp, source, user_id, metadata): <NEW_LINE> <INDENT> Model.__init__(self, resource_id=resource_id, first_sample_timestamp=first_sample_timestamp, last_sample_timestamp=last_sample_timestamp, project_id=project_id, source=source, user_id=user_id, metadata=metadata, )
|
Something for which sample data has been collected.
|
62599027a8ecb0332587217c
|
class BaseStaffDashboardView(UserDataMixin, TemplateView): <NEW_LINE> <INDENT> def get_filter_args(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(BaseStaffDashboardView, self).get_context_data(**kwargs) <NEW_LINE> project_dict = OrderedDict() <NEW_LINE> project_dict[ProjectGroup('Proposed Projects', "proposed")] = Project.objects.get_proposed(*self.get_filter_args()) <NEW_LINE> project_dict[ProjectGroup('Staged projects', "staged")] = Project.objects.get_staged(*self.get_filter_args()) <NEW_LINE> project_dict[ProjectGroup('Active Projects', "active")] = Project.objects.get_active(*self.get_filter_args()) <NEW_LINE> project_dict[ProjectGroup('Completed Projects', "completed")] = Project.objects.get_completed(*self.get_filter_args()) <NEW_LINE> context["project_dict"] = project_dict <NEW_LINE> context["role"] = self.role or "donor" <NEW_LINE> context['donated_projects'] = Project.objects.donated_projects(self.user_profile) <NEW_LINE> statistics_dictionary = aggregate_stats(self.user_profile) <NEW_LINE> statistics_dictionary['total_donated'] = total_donations() <NEW_LINE> statistics_dictionary['people_served'] = Project.objects.aggregate(n=Sum('people_affected'))['n'] <NEW_LINE> humanize_integers(statistics_dictionary) <NEW_LINE> context['statistics'] = statistics_dictionary <NEW_LINE> return context
|
Base view for the administrator and ambassador dashboard views. The
specific views in administrator/views.py and ambassador/views.py
will inherit from this view.
|
625990271d351010ab8f4a76
|
class system_usermanager(j.code.classGetBase()): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> self._te={} <NEW_LINE> self.actorname="usermanager" <NEW_LINE> self.appname="system" <NEW_LINE> <DEDENT> def authenticate(self, name, secret, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method authenticate") <NEW_LINE> <DEDENT> def create(self, username, password, groups, emails, domain, provider, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method create") <NEW_LINE> <DEDENT> def createGroup(self, name, domain, description, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method createGroup") <NEW_LINE> <DEDENT> def delete(self, username, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method delete") <NEW_LINE> <DEDENT> def deleteGroup(self, id, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method deleteGroup") <NEW_LINE> <DEDENT> def editGroup(self, name, domain, description, users, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method editGroup") <NEW_LINE> <DEDENT> def editUser(self, username, groups, password, emails, domain, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method editUser") <NEW_LINE> <DEDENT> def userexists(self, name, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method userexists") <NEW_LINE> <DEDENT> def userget(self, name, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method userget") <NEW_LINE> <DEDENT> def whoami(self, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError ("not implemented method whoami")
|
get a user
|
6259902721bff66bcd723bc3
|
class CachedHistoryIterator(HistoryIterator): <NEW_LINE> <INDENT> def __init__(self, messageable, limit, before=None, after=None, around=None, oldest_first=None): <NEW_LINE> <INDENT> super().__init__(messageable, limit, before, after, around, oldest_first) <NEW_LINE> self.prefill = self.reverse is False and around is None <NEW_LINE> <DEDENT> async def next(self): <NEW_LINE> <INDENT> if self.prefill: <NEW_LINE> <INDENT> await self.prefill_from_cache() <NEW_LINE> self.prefill = False <NEW_LINE> <DEDENT> return await super().next() <NEW_LINE> <DEDENT> async def prefill_from_cache(self): <NEW_LINE> <INDENT> if not hasattr(self, 'channel'): <NEW_LINE> <INDENT> channel = await self.messageable._get_channel() <NEW_LINE> self.channel = channel <NEW_LINE> <DEDENT> for msg in reversed(self.channel._state._messages): <NEW_LINE> <INDENT> if msg.channel.id == self.channel.id and self.limit > 0 and (not self.before or msg.id < self.before.id): <NEW_LINE> <INDENT> self.limit -= 1 <NEW_LINE> self.before = discord.Object(id=msg.id) <NEW_LINE> await self.messages.put(msg)
|
HistoryIterator, but we hit the cache first.
|
62599027bf627c535bcb2417
|
class MessageType(object): <NEW_LINE> <INDENT> X224_TPDU_CONNECTION_REQUEST = 0xE0 <NEW_LINE> X224_TPDU_CONNECTION_CONFIRM = 0xD0 <NEW_LINE> X224_TPDU_DISCONNECT_REQUEST = 0x80 <NEW_LINE> X224_TPDU_DATA = 0xF0 <NEW_LINE> X224_TPDU_ERROR = 0x70
|
@summary: Message type
|
6259902766673b3332c3134f
|
class LogisticNormal(TransformedDistribution): <NEW_LINE> <INDENT> arg_constraints = {'loc': constraints.real, 'scale': constraints.positive} <NEW_LINE> support = constraints.simplex <NEW_LINE> has_rsample = True <NEW_LINE> def __init__(self, loc, scale, validate_args=None): <NEW_LINE> <INDENT> base_dist = Normal(loc, scale, validate_args=validate_args) <NEW_LINE> if not base_dist.batch_shape: <NEW_LINE> <INDENT> base_dist = base_dist.expand([1]) <NEW_LINE> <DEDENT> super(LogisticNormal, self).__init__(base_dist, StickBreakingTransform(), validate_args=validate_args) <NEW_LINE> <DEDENT> def expand(self, batch_shape, _instance=None): <NEW_LINE> <INDENT> new = self._get_checked_instance(LogisticNormal, _instance) <NEW_LINE> return super(LogisticNormal, self).expand(batch_shape, _instance=new) <NEW_LINE> <DEDENT> @property <NEW_LINE> def loc(self): <NEW_LINE> <INDENT> return self.base_dist.base_dist.loc <NEW_LINE> <DEDENT> @property <NEW_LINE> def scale(self): <NEW_LINE> <INDENT> return self.base_dist.base_dist.scale
|
Creates a logistic-normal distribution parameterized by :attr:`loc` and :attr:`scale`
that define the base `Normal` distribution transformed with the
`StickBreakingTransform` such that::
X ~ LogisticNormal(loc, scale)
Y = log(X / (1 - X.cumsum(-1)))[..., :-1] ~ Normal(loc, scale)
Args:
loc (float or Tensor): mean of the base distribution
scale (float or Tensor): standard deviation of the base distribution
Example::
>>> # logistic-normal distributed with mean=(0, 0, 0) and stddev=(1, 1, 1)
>>> # of the base Normal distribution
>>> m = distributions.LogisticNormal(torch.tensor([0.0] * 3), torch.tensor([1.0] * 3))
>>> m.sample()
tensor([ 0.7653, 0.0341, 0.0579, 0.1427])
|
62599027d18da76e235b78fe
|
class Episode(object): <NEW_LINE> <INDENT> REQUIRED_KEYS = ('title', 'url', 'podcast_title', 'podcast_url', 'description', 'website', 'released', 'mygpo_link') <NEW_LINE> def __init__(self, title, url, podcast_title, podcast_url, description, website, released, mygpo_link): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.url = url <NEW_LINE> self.podcast_title = podcast_title <NEW_LINE> self.podcast_url = podcast_url <NEW_LINE> self.description = description <NEW_LINE> self.website = website <NEW_LINE> self.released = released <NEW_LINE> self.mygpo_link = mygpo_link <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, d): <NEW_LINE> <INDENT> for key in cls.REQUIRED_KEYS: <NEW_LINE> <INDENT> if key not in d: <NEW_LINE> <INDENT> raise ValueError('Missing keys for episode') <NEW_LINE> <DEDENT> <DEDENT> return cls(*(d.get(k) for k in cls.REQUIRED_KEYS)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return all(getattr(self, k) == getattr(other, k) for k in self.REQUIRED_KEYS)
|
Container Class for Episodes
Attributes:
title -
url -
podcast_title -
podcast_url -
description -
website -
released -
mygpo_link -
|
6259902721a7993f00c66ede
|
class CyclicEF(LinFixedEF): <NEW_LINE> <INDENT> def __init__(self, fid: str, sfid: str = None, name: str = None, desc: str = None, parent: CardDF = None, rec_len={1, None}, **kwargs): <NEW_LINE> <INDENT> super().__init__(fid=fid, sfid=sfid, name=name, desc=desc, parent=parent, rec_len=rec_len, **kwargs)
|
Cyclic EF (Entry File) in the smart card filesystem
|
62599027d99f1b3c44d06604
|
class PasswordUpdateForm(forms.Form): <NEW_LINE> <INDENT> your_password = forms.CharField(widget=forms.PasswordInput()) <NEW_LINE> new_password = forms.CharField(widget=forms.PasswordInput()) <NEW_LINE> confirm_new_password = forms.CharField(widget=forms.PasswordInput()) <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super(PasswordUpdateForm, self).clean() <NEW_LINE> password = cleaned_data.get('new_password') <NEW_LINE> confirm_password = cleaned_data.get('confirm_new_password') <NEW_LINE> if password != confirm_password: <NEW_LINE> <INDENT> self.add_error('confirm_new_password', "The entered passwords do not match.") <NEW_LINE> <DEDENT> return cleaned_data
|
Form to update an user password
|
6259902763f4b57ef0086523
|
@permission_classes((permissions.IsAuthenticated,)) <NEW_LINE> @authentication_classes((authentication.TokenAuthentication,authentication.SessionAuthentication,)) <NEW_LINE> class NERView(APIView): <NEW_LINE> <INDENT> @permission_classes((permissions.AllowAny,)) <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> data = request.GET <NEW_LINE> ner_obj = NLTKner(data) <NEW_LINE> res = ner_obj.ner() <NEW_LINE> return Response(res)
|
View for Named Entity Recognition
|
62599027ac7a0e7691f7344b
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.