code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class Lista(db.Model): <NEW_LINE> <INDENT> __tablename__ = "Lista" <NEW_LINE> id = db.Column(db.String(10), primary_key=True) <NEW_LINE> descripcion = db.Column(db.String(50)) <NEW_LINE> frente_id = db.Column(db.Integer, db.ForeignKey('Frente.id')) <NEW_LINE> frente = db.relationship('Frente', backref=db.backref( 'listas', lazy='dynamic')) <NEW_LINE> def __init__(self, id=None, descripcion=None, idFrente=None, frente=None): <NEW_LINE> <INDENT> if(id): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> self.descripcion = descripcion <NEW_LINE> if idFrente: <NEW_LINE> <INDENT> self.frente_id = idFrente <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.frente = frente <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str.upper(self.frente.descripcion + ' - ' + self.descripcion)
|
docstring for Lista
|
6259904bb5575c28eb7136cb
|
class ApiCallHandler(object): <NEW_LINE> <INDENT> __metaclass__ = registry.MetaclassRegistry <NEW_LINE> args_type = None <NEW_LINE> result_type = None <NEW_LINE> max_execution_time = 60 <NEW_LINE> strip_json_root_fields_types = True <NEW_LINE> def Handle(self, args, token=None): <NEW_LINE> <INDENT> raise NotImplementedError()
|
Baseclass for restful API renderers.
|
6259904b009cb60464d0293a
|
class FrameOperation(HookBaseClass): <NEW_LINE> <INDENT> def get_frame_range(self, **kwargs): <NEW_LINE> <INDENT> app = self.parent <NEW_LINE> engine = sgtk.platform.current_engine() <NEW_LINE> dcc_app = engine.app <NEW_LINE> frame_range = dcc_app.get_frame_range() <NEW_LINE> start_frame = frame_range.get("start_frame", 0) <NEW_LINE> stop_frame = frame_range.get("stop_frame", 0) <NEW_LINE> return (start_frame, stop_frame) <NEW_LINE> <DEDENT> def set_frame_range(self, in_frame=None, out_frame=None, **kwargs): <NEW_LINE> <INDENT> app = self.parent <NEW_LINE> engine = sgtk.platform.current_engine() <NEW_LINE> dcc_app = engine.app <NEW_LINE> target_frame_duration = out_frame - in_frame + 1 <NEW_LINE> dcc_app.set_frame_count(out_frame) <NEW_LINE> dcc_app.set_start_frame(in_frame) <NEW_LINE> dcc_app.set_stop_frame(out_frame)
|
Hook called to perform a frame operation with the
current scene
|
6259904bd6c5a102081e3521
|
class ExpressionAttributeLookupSpecial(ExpressionAttributeLookup): <NEW_LINE> <INDENT> kind = "EXPRESSION_ATTRIBUTE_LOOKUP_SPECIAL" <NEW_LINE> def computeExpression(self, constraint_collection): <NEW_LINE> <INDENT> return self.getLookupSource().computeExpressionAttributeSpecial( lookup_node = self, attribute_name = self.getAttributeName(), constraint_collection = constraint_collection )
|
Special lookup up an attribute of an object.
Typically from code like this: with source: pass
These directly go to slots, and are performed for with statements
of Python2.7 or higher.
|
6259904ba8ecb03325872616
|
class BadParameter (SinonException) : <NEW_LINE> <INDENT> def __init__ (self, msg, obj=None) : <NEW_LINE> <INDENT> SinonException.__init__(self, obj)
|
:param msg: Error message, indicating the cause for the exception
being raised.
:type msg: string
:raises: --
A given parameter is out of bound or ill formatted.
|
6259904b7d847024c075d7d7
|
class UserPreferencesView(FormView, MailmanClientMixin): <NEW_LINE> <INDENT> form_class = UserPreferences <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> data = super(UserPreferencesView, self).get_context_data(**kwargs) <NEW_LINE> data['mm_user'] = self.mm_user <NEW_LINE> return data <NEW_LINE> <DEDENT> def get_form_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(UserPreferencesView, self).get_form_kwargs() <NEW_LINE> kwargs['preferences'] = self._get_preferences() <NEW_LINE> return kwargs <NEW_LINE> <DEDENT> def _set_view_attributes(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.mm_user = MailmanUser.objects.get_or_create_from_django( request.user) <NEW_LINE> <DEDENT> @method_decorator(login_required) <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self._set_view_attributes(request, *args, **kwargs) <NEW_LINE> return super(UserPreferencesView, self).dispatch( request, *args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> form.save() <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> messages.error(self.request, e.msg) <NEW_LINE> <DEDENT> if form.has_changed(): <NEW_LINE> <INDENT> messages.success( self.request, _('Your preferences have been updated.')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.info(self.request, _('Your preferences did not change.')) <NEW_LINE> <DEDENT> return super(UserPreferencesView, self).form_valid(form)
|
Generic view for the logged-in user's various preferences.
|
6259904b1f5feb6acb163ffa
|
class AttachmentManagerTestCase(test.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(AttachmentManagerTestCase, self).setUp() <NEW_LINE> self.manager = importutils.import_object(CONF.volume_manager) <NEW_LINE> self.configuration = mock.Mock(conf.Configuration) <NEW_LINE> self.context = context.get_admin_context() <NEW_LINE> self.context.user_id = fake.USER_ID <NEW_LINE> self.project_id = fake.PROJECT3_ID <NEW_LINE> self.context.project_id = self.project_id <NEW_LINE> self.manager.driver.set_initialized() <NEW_LINE> self.manager.stats = {'allocated_capacity_gb': 100, 'pools': {}} <NEW_LINE> <DEDENT> def test_attachment_update(self): <NEW_LINE> <INDENT> volume_params = {'status': 'available'} <NEW_LINE> connector = { "initiator": "iqn.1993-08.org.debian:01:cad181614cec", "ip": "192.168.1.20", "platform": "x86_64", "host": "tempest-1", "os_type": "linux2", "multipath": False} <NEW_LINE> vref = tests_utils.create_volume(self.context, **volume_params) <NEW_LINE> self.manager.create_volume(self.context, vref) <NEW_LINE> values = {'volume_id': vref.id, 'volume_host': vref.host, 'attach_status': 'reserved', 'instance_uuid': fake.UUID1} <NEW_LINE> attachment_ref = db.volume_attach(self.context, values) <NEW_LINE> with mock.patch.object(self.manager, '_notify_about_volume_usage', return_value=None): <NEW_LINE> <INDENT> expected = { 'encrypted': False, 'qos_specs': None, 'access_mode': 'rw', 'driver_volume_type': 'iscsi', 'attachment_id': attachment_ref.id} <NEW_LINE> self.assertEqual(expected, self.manager.attachment_update( self.context, vref, connector, attachment_ref.id)) <NEW_LINE> <DEDENT> <DEDENT> def test_attachment_delete(self): <NEW_LINE> <INDENT> volume_params = {'status': 'available'} <NEW_LINE> vref = tests_utils.create_volume(self.context, **volume_params) <NEW_LINE> self.manager.create_volume(self.context, vref) <NEW_LINE> values = {'volume_id': vref.id, 'volume_host': vref.host, 'attach_status': 'reserved', 'instance_uuid': fake.UUID1} <NEW_LINE> attachment_ref = db.volume_attach(self.context, values) <NEW_LINE> attachment_ref = db.volume_attachment_get( self.context, attachment_ref['id']) <NEW_LINE> self.manager.attachment_delete(self.context, attachment_ref['id'], vref) <NEW_LINE> self.assertRaises(exception.VolumeAttachmentNotFound, db.volume_attachment_get, self.context, attachment_ref.id)
|
Attachment related test for volume.manager.py.
|
6259904b004d5f362081f9ea
|
class CheckinsEndpointTestCase(BaseAuthenticatedEndpointTestCase): <NEW_LINE> <INDENT> def test_checkin(self): <NEW_LINE> <INDENT> response = self.api.checkins.add(params={'venueId': self.default_venueid}) <NEW_LINE> assert 'checkin' in response <NEW_LINE> <DEDENT> def test_recent(self): <NEW_LINE> <INDENT> response = self.api.checkins.recent() <NEW_LINE> assert 'recent' in response <NEW_LINE> <DEDENT> def test_recent_location(self): <NEW_LINE> <INDENT> response = self.api.checkins.recent(params={'ll': self.default_geo}) <NEW_LINE> assert 'recent' in response <NEW_LINE> <DEDENT> def test_recent_limit(self): <NEW_LINE> <INDENT> response = self.api.checkins.recent(params={'limit': 10}) <NEW_LINE> assert 'recent' in response
|
General
|
6259904b50485f2cf55dc390
|
class ScheduleSerializer(serializers.Serializer): <NEW_LINE> <INDENT> user = serializers.HiddenField( default=serializers.CurrentUserDefault() ) <NEW_LINE> school = serializers.HiddenField( default=CurrentSchoolDefault() ) <NEW_LINE> sport = serializers.PrimaryKeyRelatedField(queryset=Sports.objects.all(), required=True, label='运动') <NEW_LINE> sport_time = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S', label='运动开始时间') <NEW_LINE> sport_end_time = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S', label='运动结束时间') <NEW_LINE> add_time = serializers.DateTimeField(read_only=True, format='%Y-%m-%d %H:%M:%S') <NEW_LINE> address = serializers.CharField(required=True, max_length=25, error_messages={'max_length': '详细地址长度不能超过25'}, label='详细地址') <NEW_LINE> now_people = serializers.IntegerField(read_only=True, default=1) <NEW_LINE> people_nums = serializers.IntegerField(min_value=2, required=True, max_value=20, error_messages={'required': '请填写总人数', 'min_value': '人数至少为两人,一起运动更愉快!', 'max_value': '请按照运动适当指定人数,不能超过20人', }, label='约定总人数' ) <NEW_LINE> def validate(self, attrs): <NEW_LINE> <INDENT> now_time = datetime.now() <NEW_LINE> sport_time = attrs['sport_time'] <NEW_LINE> sport_end_time = attrs['sport_end_time'] <NEW_LINE> if sport_time <= now_time: <NEW_LINE> <INDENT> raise serializers.ValidationError('约定运动开始的时间应该大于目前时间') <NEW_LINE> <DEDENT> if (sport_time - now_time).days >= 10: <NEW_LINE> <INDENT> raise serializers.ValidationError('只能发布十天以内的约运动') <NEW_LINE> <DEDENT> if (sport_end_time - sport_time).seconds <= 1800: <NEW_LINE> <INDENT> raise serializers.ValidationError('约定运动结束时间应该大于开始时间,且应该在半小时以上') <NEW_LINE> <DEDENT> if (sport_end_time - sport_time).days != 0: <NEW_LINE> <INDENT> raise serializers.ValidationError('运动得适量哦,结束时间应小于一天') <NEW_LINE> <DEDENT> return attrs <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> user = self.context['request'].user <NEW_LINE> sport_time = validated_data['sport_time'] <NEW_LINE> validated_data['now_people'] = 1 <NEW_LINE> validated_data['join_type'] = 1 <NEW_LINE> validated_data['status'] = 2 <NEW_LINE> existed = Schedule.objects.filter(user=user, sport_time=sport_time) <NEW_LINE> if existed: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> existed = Schedule.objects.create(**validated_data) <NEW_LINE> <DEDENT> return existed <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Schedule <NEW_LINE> validators = [ UniqueTogetherValidator( queryset=Schedule.objects.all(), fields=('user', 'sport_time'), message='该时间段已有预约' ) ] <NEW_LINE> fields = ( 'user', 'sport', 'address', 'sport_time', 'sport_end_time', 'people_nums', 'add_time', 'now_people', 'school')
|
约运动序列化
|
6259904b24f1403a926862d0
|
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.set_filename('button10.xlsx') <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet1 = workbook.add_worksheet() <NEW_LINE> worksheet2 = workbook.add_worksheet() <NEW_LINE> worksheet3 = workbook.add_worksheet() <NEW_LINE> worksheet1.write_comment('A1', 'Some text') <NEW_LINE> worksheet2.insert_button('B2', {}) <NEW_LINE> worksheet3.write_comment('C2', 'More text') <NEW_LINE> worksheet1.set_comments_author('John') <NEW_LINE> worksheet3.set_comments_author('John') <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual()
|
Test file created by XlsxWriter against a file created by Excel.
|
6259904bb830903b9686ee7d
|
class Thread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> (threading.Thread.__init__)(self, *args, **kw) <NEW_LINE> self.killed = False <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._Thread__run_backup = self.run <NEW_LINE> self.run = self._Thread__run <NEW_LINE> threading.Thread.start(self) <NEW_LINE> <DEDENT> def __run(self): <NEW_LINE> <INDENT> sys.settrace(self.globaltrace) <NEW_LINE> self._Thread__run_backup() <NEW_LINE> self.run = self._Thread__run_backup <NEW_LINE> <DEDENT> def globaltrace(self, frame, event, arg): <NEW_LINE> <INDENT> if event == "call": <NEW_LINE> <INDENT> return self.localtrace <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def localtrace(self, frame, event, arg): <NEW_LINE> <INDENT> if self.killed: <NEW_LINE> <INDENT> if event == "line": <NEW_LINE> <INDENT> raise SystemExit <NEW_LINE> <DEDENT> <DEDENT> return self.localtrace <NEW_LINE> <DEDENT> def kill(self): <NEW_LINE> <INDENT> self.killed = True
|
A traced thread wrapper.
|
6259904b462c4b4f79dbce05
|
class Parallel(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import pypar <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> self._not_parallel() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if pypar.size() >= 2: <NEW_LINE> <INDENT> self.rank = pypar.rank() <NEW_LINE> self.size = pypar.size() <NEW_LINE> self.node = pypar.get_processor_name() <NEW_LINE> self.is_parallel = True <NEW_LINE> self.file_tag = str(self.rank) <NEW_LINE> self.log_file_tag = str(self.rank) <NEW_LINE> atexit.register(pypar.finalize) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._not_parallel() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _not_parallel(self): <NEW_LINE> <INDENT> self.rank = 0 <NEW_LINE> self.size = 1 <NEW_LINE> self.node = socket.gethostname() <NEW_LINE> self.is_parallel = False <NEW_LINE> self.log_file_tag = str(self.rank)
|
Parallelise to run on a cluster.
:param rank: What is the id of this node in the cluster.
:param size: How many processors are there in the cluster.
:param node: name of the cluster node.
:param is_parallel: True if parallel is operational
:param file_tag: A string that can be added to files to identify who
wrote the file.
|
6259904b63b5f9789fe86573
|
class BookReviewListByUser(APIView): <NEW_LINE> <INDENT> permission_classes = (permissions.IsAuthenticated, IsOwnerOrReadOnly,) <NEW_LINE> def get(self, request, format=None): <NEW_LINE> <INDENT> reviews = book_review.objects.filter(reviewed_by=self.request.user.id) <NEW_LINE> serializer = BookReviewSerializer(reviews, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK)
|
Retrieve all books reviews made by a user - User must be authenticated
|
6259904b45492302aabfd8d9
|
class DatasetServiceFactory(PRecord): <NEW_LINE> <INDENT> agent_service_factory = field(initial=AgentService.from_configuration) <NEW_LINE> configuration_factory = field(initial=get_configuration) <NEW_LINE> def get_service(self, reactor, options): <NEW_LINE> <INDENT> configuration = self.configuration_factory(options) <NEW_LINE> agent_service = self.agent_service_factory(configuration) <NEW_LINE> agent_service = agent_service.set(reactor=reactor) <NEW_LINE> api = agent_service.get_api() <NEW_LINE> deployer = agent_service.get_deployer(api) <NEW_LINE> loop_service = agent_service.get_loop_service(deployer) <NEW_LINE> return loop_service
|
A helper for creating most of the pieces that go into a dataset convergence
agent.
|
6259904bcb5e8a47e493cb8a
|
class SmoothClassifier(nn.Module): <NEW_LINE> <INDENT> ABSTAIN = -1 <NEW_LINE> def __init__(self, base_classifier: nn.Module, num_classes: int, sigma: float): <NEW_LINE> <INDENT> super(SmoothClassifier, self).__init__() <NEW_LINE> self.base_classifier = base_classifier <NEW_LINE> self.num_classes = num_classes <NEW_LINE> self.sigma = sigma <NEW_LINE> <DEDENT> def device(self): <NEW_LINE> <INDENT> return self.base_classifier.device() <NEW_LINE> <DEDENT> def certify(self, inputs: torch.Tensor, n0: int, num_samples: int, alpha: float, batch_size: int) -> Tuple[int, float]: <NEW_LINE> <INDENT> self.base_classifier.eval() <NEW_LINE> top_class = self.predict(inputs, n0, alpha, batch_size) <NEW_LINE> if top_class == SmoothClassifier.ABSTAIN: <NEW_LINE> <INDENT> return SmoothClassifier.ABSTAIN, 0.0 <NEW_LINE> <DEDENT> class_counts = self._sample_noise_predictions(inputs, num_samples, batch_size) <NEW_LINE> top_class_counts = class_counts[top_class].item() <NEW_LINE> p_A_lower_bound = lower_confidence_bound(top_class_counts, num_samples, alpha) <NEW_LINE> if p_A_lower_bound < 0.5: <NEW_LINE> <INDENT> return SmoothClassifier.ABSTAIN, 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> radius = self.sigma * norm.ppf(p_A_lower_bound) <NEW_LINE> return top_class, radius <NEW_LINE> <DEDENT> <DEDENT> def predict(self, inputs: torch.tensor, num_samples: int, alpha: float, batch_size: int) -> int: <NEW_LINE> <INDENT> self.base_classifier.eval() <NEW_LINE> class_counts = self._sample_noise_predictions(inputs, num_samples, batch_size).cpu() <NEW_LINE> _, indices = torch.sort(class_counts) <NEW_LINE> top_class = indices[-1] <NEW_LINE> count1 = class_counts[top_class] <NEW_LINE> count2 = class_counts[indices[-2]] <NEW_LINE> if(binom_test(count1, count1 + count2, p=0.5) > alpha): <NEW_LINE> <INDENT> return SmoothClassifier.ABSTAIN <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return top_class <NEW_LINE> <DEDENT> <DEDENT> def _sample_noise_predictions(self, inputs: torch.tensor, num_samples: int, batch_size: int) -> torch.Tensor: <NEW_LINE> <INDENT> num_remaining = num_samples <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> classes = torch.arange(self.num_classes).to(self.device()) <NEW_LINE> class_counts = torch.zeros([self.num_classes], dtype=torch.long, device=self.device()) <NEW_LINE> for it in range(ceil(num_samples / batch_size)): <NEW_LINE> <INDENT> this_batch_size = min(num_remaining, batch_size) <NEW_LINE> num_remaining -= batch_size <NEW_LINE> input_batch = inputs.repeat(this_batch_size, 1, 1, 1) <NEW_LINE> logits_pert = self.forward(input_batch) <NEW_LINE> labels_pert = torch.argmax(logits_pert.data, 1) <NEW_LINE> class_pert, class_pert_counts = labels_pert.unique(sorted=False,return_counts=True) <NEW_LINE> for i, class_idx in enumerate(class_pert): <NEW_LINE> <INDENT> class_counts[class_idx] += class_pert_counts[i] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return class_counts <NEW_LINE> <DEDENT> def forward(self, inputs: torch.Tensor) -> torch.Tensor: <NEW_LINE> <INDENT> noise = torch.randn_like(inputs) * self.sigma <NEW_LINE> return self.base_classifier((inputs + noise).clamp(0, 1))
|
Randomized smoothing classifier.
|
6259904bcad5886f8bdc5a81
|
class CacheableObject(object): <NEW_LINE> <INDENT> CACHE_NAME = "defaultcache" <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CacheableObject, self).__init__(*args, **kwargs) <NEW_LINE> self.cache = LocMemCache() <NEW_LINE> <DEDENT> def _cached(self, method, key=None, *args, **kwargs): <NEW_LINE> <INDENT> if key is None: <NEW_LINE> <INDENT> raise ValueError("Invalid cache key: None") <NEW_LINE> <DEDENT> if key in self.cache: <NEW_LINE> <INDENT> ret = self.cache.get(key) <NEW_LINE> log.debug("%s(%s, %s): key=%s, value=%s" % (method.__name__, args, kwargs, key, ret)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = method(self, *args, **kwargs) <NEW_LINE> self.cache.set(key, ret) <NEW_LINE> log.debug("%s(%s, %s): key=%s, value=<EMPTY CACHE>" % (method.__name__, args, kwargs, key)) <NEW_LINE> <DEDENT> return ret
|
Object whose methods can be cached
|
6259904b07f4c71912bb083b
|
class AT_081: <NEW_LINE> <INDENT> play = Buff(ENEMY_MINIONS, "AT_081e")
|
Eadric the Pure
|
6259904b498bea3a75a58f27
|
class PluginsDialog(aw.Dialog): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> aw.Dialog.__init__(self, *args, **kwargs) <NEW_LINE> p = PluginsPanel(self) <NEW_LINE> self.AddSizedPanel(p) <NEW_LINE> self.Bind(wx.EVT_BUTTON, self.OnClose, id=wdr.ID_PLUGINEND) <NEW_LINE> <DEDENT> def OnClose(self, event): <NEW_LINE> <INDENT> self.EndModal(wx.ID_OK)
|
Dialog Informazioni sui plugin installati.
|
6259904b379a373c97d9a431
|
class PopulationTree(AbstractTree): <NEW_LINE> <INDENT> def __init__(self, world, root=None, subtrees=None, data_size=0): <NEW_LINE> <INDENT> if world: <NEW_LINE> <INDENT> region_trees = _load_data() <NEW_LINE> AbstractTree.__init__(self, 'World', region_trees) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if subtrees is None: <NEW_LINE> <INDENT> subtrees = [] <NEW_LINE> <DEDENT> AbstractTree.__init__(self, root, subtrees, data_size)
|
A tree representation of country population data.
This tree always has three levels:
- The root represents the entire world.
- Each node in the second level is a region (defined by the World Bank).
- Each node in the third level is a country.
The data_size attribute corresponds to the 2014 population of the country,
as reported by the World Bank.
See https://datahelpdesk.worldbank.org/ for details about this API.
|
6259904b63d6d428bbee3bd2
|
class EnabledLink(ItemLink): <NEW_LINE> <INDENT> def _enable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def AppendToMenu(self, menu, window, selection): <NEW_LINE> <INDENT> menuItem = super(EnabledLink, self).AppendToMenu(menu, window, selection) <NEW_LINE> menuItem.Enable(self._enable()) <NEW_LINE> return menuItem
|
A menu item that may be disabled.
The item is by default enabled. Override _enable() to disable\enable
based on some condition. Subclasses MUST define self.text, preferably as
a class attribute.
|
6259904b3eb6a72ae038ba63
|
class TriangularScheduler(optim.lr_scheduler._LRScheduler): <NEW_LINE> <INDENT> def __init__(self, step_size:int, min_lr:float, max_lr:float, optimizer:optim.Optimizer): <NEW_LINE> <INDENT> self.step_size = step_size <NEW_LINE> self.min_lr = min_lr <NEW_LINE> self.max_lr = max_lr <NEW_LINE> super().__init__(optimizer) <NEW_LINE> <DEDENT> def get_lr(self): <NEW_LINE> <INDENT> it = self.last_epoch <NEW_LINE> lr = _triangular_f(it, self.step_size, self.min_lr, self.max_lr) <NEW_LINE> return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]
|
TODO: docstring
|
6259904ba79ad1619776b487
|
class MultiRateCyclicSendTask(CyclicSendTask): <NEW_LINE> <INDENT> def __init__(self, channel, message, count, initial_period, subsequent_period): <NEW_LINE> <INDENT> super(MultiRateCyclicSendTask, self).__init__(channel, message, subsequent_period) <NEW_LINE> msg_frame = _build_can_frame(message) <NEW_LINE> frame = _create_bcm_frame(opcode=CAN_BCM_TX_SETUP, flags=SETTIMER | STARTTIMER, count=count, ival1_seconds=int(initial_period), ival1_usec=int(1e6 * (initial_period - int(initial_period))), ival2_seconds=int(subsequent_period), ival2_usec=int(1e6 * (subsequent_period - int(subsequent_period))), can_id=message.arbitration_id, nframes=1, msg_frame=msg_frame) <NEW_LINE> log.info("Sending BCM TX_SETUP command") <NEW_LINE> bytes_sent = libc.send(self.bcm_socket, ctypes.byref(frame), ctypes.sizeof(frame)) <NEW_LINE> if bytes_sent == -1: <NEW_LINE> <INDENT> log.debug("Error sending frame :-/")
|
Exposes more of the full power of the TX_SETUP opcode.
Transmits a message `count` times at `initial_period` then
continues to transmit message at `subsequent_period`.
|
6259904b4e696a045264e824
|
class Variable(metaclass=_VariableMeta): <NEW_LINE> <INDENT> def __init__(self, tosh): <NEW_LINE> <INDENT> self._tosh = tosh <NEW_LINE> self._var_name = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load_task(cls, tosh, argument): <NEW_LINE> <INDENT> return LoadVariableTask(tosh, cls, argument) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @task("Loading {pos[0].class_name} {pos[1]}") <NEW_LINE> async def load(cls, argument, *, task): <NEW_LINE> <INDENT> return (await cls._load(argument, task)) <NEW_LINE> <DEDENT> def _token(self, text, style=Token.Task.Result): <NEW_LINE> <INDENT> return (style, text) <NEW_LINE> <DEDENT> async def load_in_box(self, handler): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def type(self): <NEW_LINE> <INDENT> return self.__class__ <NEW_LINE> <DEDENT> @property <NEW_LINE> def var_name(self): <NEW_LINE> <INDENT> if self._var_name: <NEW_LINE> <INDENT> return self._var_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.default_var_name <NEW_LINE> <DEDENT> <DEDENT> @var_name.setter <NEW_LINE> def var_name(self, varname): <NEW_LINE> <INDENT> self._var_name = varname <NEW_LINE> <DEDENT> async def attribute(self, attrname, task=None): <NEW_LINE> <INDENT> def _is_task_function(func): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return func._returns_task <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _autobox(result): <NEW_LINE> <INDENT> from .vars import String, Integer <NEW_LINE> if isinstance(result, str): <NEW_LINE> <INDENT> return String(self._tosh, result) <NEW_LINE> <DEDENT> elif isinstance(result, int): <NEW_LINE> <INDENT> return Integer(self._tosh, result) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> attribute_task = self.attributes[attrname][1] <NEW_LINE> if _is_task_function(attribute_task): <NEW_LINE> <INDENT> if task: <NEW_LINE> <INDENT> result = await task.sub(attribute_task, self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = await attribute_task(self) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result = attribute_task(self) <NEW_LINE> <DEDENT> return _autobox(result)
|
Class representing a Variable.
Subclasses must/can implement:
- `__init__`, calling the parent with a tosh instance
- `prefix` attribute if they want to be loaded as literals, e.g: u"username" (optional)
- `_load()` to initialize the variable, called from a task
- `tokens()` for screen representation
- `load_in_box()` to be executed when opening an interactive rails session with this variable (optional)
Attributes can be registered like this (they can be plain functions or tasks (@task)):
```
@attributes.register("attribute", Type)
def attribute(self):
pass
```
|
6259904b3cc13d1c6d466b40
|
class SSSDCheck8to9(Actor): <NEW_LINE> <INDENT> name = 'sssd_check_8to9' <NEW_LINE> consumes = (SSSDConfig8to9,) <NEW_LINE> produces = (Report,) <NEW_LINE> tags = (IPUWorkflowTag, ChecksPhaseTag) <NEW_LINE> def process(self): <NEW_LINE> <INDENT> model = next(self.consume(SSSDConfig8to9), None) <NEW_LINE> if not model: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if model.enable_files_domain_set: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if model.explicit_files_domain: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not model.pam_cert_auth: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> create_report([ reporting.Title('SSSD implicit files domain is now disabled by default.'), reporting.Summary('Default value of [sssd]/enable_files_domain has ' 'changed from true to false.'), reporting.Tags(COMMON_REPORT_TAGS), reporting.Remediation( hint='If you use smartcard authentication for local users, ' 'set this option to true explicitly and call ' '"authselect enable-feature with-files-domain".' ), reporting.Severity(reporting.Severity.MEDIUM) ] + related)
|
Check SSSD configuration for changes in RHEL9 and report them in model.
Implicit files domain is disabled by default. This may affect local
smartcard authentication if there is not explicit files domain created.
If there is no files domain and smartcard authentication is enabled,
we will notify the administrator.
|
6259904bdc8b845886d549c5
|
class DataProvider: <NEW_LINE> <INDENT> def all(self) -> Collection[str]: <NEW_LINE> <INDENT> raise NotImplementedError
|
Data provider interface (contract)
|
6259904bb830903b9686ee7e
|
class HTTPAuth(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> def default_auth_error(): <NEW_LINE> <INDENT> return "Unauthorized Access" <NEW_LINE> <DEDENT> self.auth_error_callback = None <NEW_LINE> self.get_verify_token_callback = None <NEW_LINE> self.error_handler(default_auth_error) <NEW_LINE> <DEDENT> def error_handler(self, f): <NEW_LINE> <INDENT> @wraps(f) <NEW_LINE> def decorated(*args, **kwargs): <NEW_LINE> <INDENT> res = f(*args, **kwargs) <NEW_LINE> if type(res) == str: <NEW_LINE> <INDENT> res = make_response(res) <NEW_LINE> res.status_code = 401 <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> self.auth_error_callback = decorated <NEW_LINE> return decorated <NEW_LINE> <DEDENT> def login_required(self, f): <NEW_LINE> <INDENT> @wraps(f) <NEW_LINE> def decorated(*args, **kwargs): <NEW_LINE> <INDENT> token = self.get_token() <NEW_LINE> if not self.authenticate(token): <NEW_LINE> <INDENT> return self.auth_error_callback() <NEW_LINE> <DEDENT> return f(*args, **kwargs) <NEW_LINE> <DEDENT> return decorated <NEW_LINE> <DEDENT> def get_token(self): <NEW_LINE> <INDENT> auth = request.headers.get('authorization') <NEW_LINE> if auth: <NEW_LINE> <INDENT> auth = auth.split() <NEW_LINE> if len(auth) != 2 or auth[0] != 'Bearer' or not auth[1]: <NEW_LINE> <INDENT> token = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> token = auth[1] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> token = None <NEW_LINE> <DEDENT> return token <NEW_LINE> <DEDENT> def verify_token(self, f): <NEW_LINE> <INDENT> self.get_verify_token_callback = f <NEW_LINE> return f <NEW_LINE> <DEDENT> def authenticate(self, token): <NEW_LINE> <INDENT> return self.get_verify_token_callback(token)
|
HTTP Bases authentication using authorization token
|
6259904be76e3b2f99fd9e13
|
class S3OrgMenuLayout(S3NavigationItem): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def layout(item): <NEW_LINE> <INDENT> name = "IFRC" <NEW_LINE> logo = None <NEW_LINE> root_org = current.auth.root_org() <NEW_LINE> if root_org: <NEW_LINE> <INDENT> s3db = current.s3db <NEW_LINE> table = s3db.org_organisation <NEW_LINE> record = current.db(table.id == root_org).select(table.name, table.acronym, table.logo, limitby = (0, 1), cache = s3db.cache, ).first() <NEW_LINE> if record: <NEW_LINE> <INDENT> if record.acronym: <NEW_LINE> <INDENT> name = _name = record.acronym <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _name = record.name <NEW_LINE> names = _name.split(" ") <NEW_LINE> names_with_breaks = [] <NEW_LINE> nappend = names_with_breaks.append <NEW_LINE> for name in names: <NEW_LINE> <INDENT> nappend(name) <NEW_LINE> nappend(BR()) <NEW_LINE> <DEDENT> names_with_breaks.pop() <NEW_LINE> name = TAG[""](*names_with_breaks) <NEW_LINE> <DEDENT> if record.logo: <NEW_LINE> <INDENT> size = (60, None) <NEW_LINE> image = s3db.pr_image_represent(record.logo, size=size) <NEW_LINE> url_small = URL(c="default", f="download", args=image) <NEW_LINE> alt = "%s logo" % _name <NEW_LINE> logo = IMG(_src=url_small, _alt=alt, _width=60, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not logo: <NEW_LINE> <INDENT> logo = IMG(_src="/eden/static/themes/IFRC/img/dummy_flag.png", _alt=current.T("Red Cross/Red Crescent"), _width=60, ) <NEW_LINE> <DEDENT> return (name, logo)
|
Layout for the organisation-specific menu
|
6259904b009cb60464d0293e
|
class OpticalGaussian(RegriddableModel1D): <NEW_LINE> <INDENT> def __init__(self, name='opticalgaussian'): <NEW_LINE> <INDENT> self.fwhm = Parameter(name, 'fwhm', 100., tinyval, hard_min=tinyval, units="km/s") <NEW_LINE> self.pos = Parameter(name, 'pos', 5000., tinyval, frozen=True, units='angstroms') <NEW_LINE> self.tau = Parameter(name, 'tau', 0.5) <NEW_LINE> self.limit = Parameter(name, 'limit', 4., alwaysfrozen=True, hidden=True ) <NEW_LINE> ArithmeticModel.__init__(self, name, (self.fwhm, self.pos, self.tau, self.limit)) <NEW_LINE> <DEDENT> def calc(self, p, x, xhi=None, **kwargs): <NEW_LINE> <INDENT> x = numpy.asarray(x, dtype=SherpaFloat) <NEW_LINE> if 0.0 == p[0]: <NEW_LINE> <INDENT> raise ValueError('model evaluation failed, ' + '%s fwhm cannot be zero' % self.name) <NEW_LINE> <DEDENT> if 0.0 == p[1]: <NEW_LINE> <INDENT> raise ValueError('model evaluation failed, ' + '%s pos cannot be zero' % self.name) <NEW_LINE> <DEDENT> y = numpy.ones_like(x) <NEW_LINE> sigma = p[1] * p[0] / 705951.5 <NEW_LINE> delta = numpy.abs((x - p[1]) / sigma) <NEW_LINE> idx = (delta < self.limit.val) <NEW_LINE> y[idx] = numpy.exp(-p[2] * numpy.exp(- delta[idx] * delta[idx] / 2.0)) <NEW_LINE> return y
|
Gaussian function for modeling absorption (optical depth).
This model is intended to be used to modify another model (e.g.
by multiplying the two together). It is for use when the
independent axis is in Angstroms.
Attributes
----------
fwhm
The full-width half-maximum of the model in km/s.
pos
The center of the gaussian, in Angstroms.
tau
The optical depth of the model.
limit
The model is only evaluated for points that lie within
limit sigma of pos. It is a hidden parameter, with a
value fixed at 4.
See Also
--------
AbsorptionEdge, AbsorptionGaussian, AbsorptionLorentz,
AbsorptionVoigt, EmissionGaussian
Notes
-----
The functional form of the model for points is::
f(x) = exp(-tau * g(x))
g(x) = exp(-0.5 * ((x - pos) / sigma)^2)
sigma = pos * fwhm / (2.9979e5 * 2.354820044)
and for integrated data sets the low-edge of the grid is used.
The calculation is only done for those points that are in the
range::
|x - pos| < limit * sigma
Outside this range the model is set to 1.
|
6259904b94891a1f408ba0f9
|
class Win32Wrapper: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> pythoncom.CoInitialize() <NEW_LINE> self.wmi = win32com.client.GetObject("winmgmts:") <NEW_LINE> <DEDENT> def _read_cdispatch_fields(self, win32_element: Any, element_fields_list: List[str]) -> dict: <NEW_LINE> <INDENT> if not win32_element: <NEW_LINE> <INDENT> return dict() <NEW_LINE> <DEDENT> return {k: self._read_cdispatch_field(win32_element, k) for k in element_fields_list} <NEW_LINE> <DEDENT> def _read_cdispatch_field(self, win32_element: Any, key: str) -> Any: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return getattr(win32_element, key) <NEW_LINE> <DEDENT> except AttributeError as e: <NEW_LINE> <INDENT> logger.debug(f"Failed getting an attribute value: {e}") <NEW_LINE> return UNKNOWN_VALUE <NEW_LINE> <DEDENT> <DEDENT> def map_element(self, win32_element: Any, to_cls: type) -> ComponentDescriptor: <NEW_LINE> <INDENT> instance = to_cls() <NEW_LINE> instance.set_data_values(self._read_cdispatch_fields(win32_element, instance.field_names)) <NEW_LINE> return cast(ComponentDescriptor, instance) <NEW_LINE> <DEDENT> def _get_list_iterator(self, win32_class_name: str, list_filter: Optional[str]) -> Generator[Any, None, None]: <NEW_LINE> <INDENT> if list_filter: <NEW_LINE> <INDENT> query = f"Select * from {win32_class_name} where {list_filter}" <NEW_LINE> return self.wmi.ExecQuery(query) <NEW_LINE> <DEDENT> return self.wmi.InstancesOf(win32_class_name) <NEW_LINE> <DEDENT> def element_generator( self, to_cls: type, win32_class_name: str, list_filter: Optional[str] ) -> Generator["ComponentDescriptor", None, None]: <NEW_LINE> <INDENT> for element in self._get_list_iterator(win32_class_name, list_filter): <NEW_LINE> <INDENT> yield self.map_element(win32_element=element, to_cls=to_cls)
|
Wraps win32 objects and methods in order to simplify their use.
|
6259904bd53ae8145f91986a
|
class ContinousColorRange(ColorRange): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> for _s in [ColorRange]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{})) <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, ContinousColorRange, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> for _s in [ColorRange]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{})) <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, ContinousColorRange, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _ilwisobjects.new_ContinousColorRange(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return _ilwisobjects.ContinousColorRange_clone(self) <NEW_LINE> <DEDENT> def ensure(self, *args): <NEW_LINE> <INDENT> return _ilwisobjects.ContinousColorRange_ensure(self, *args) <NEW_LINE> <DEDENT> def containsVar(self, *args): <NEW_LINE> <INDENT> return _ilwisobjects.ContinousColorRange_containsVar(self, *args) <NEW_LINE> <DEDENT> def containsColor(self, *args): <NEW_LINE> <INDENT> return _ilwisobjects.ContinousColorRange_containsColor(self, *args) <NEW_LINE> <DEDENT> def containsRange(self, *args): <NEW_LINE> <INDENT> return _ilwisobjects.ContinousColorRange_containsRange(self, *args) <NEW_LINE> <DEDENT> def impliedValue(self, *args): <NEW_LINE> <INDENT> return _ilwisobjects.ContinousColorRange_impliedValue(self, *args) <NEW_LINE> <DEDENT> __swig_destroy__ = _ilwisobjects.delete_ContinousColorRange <NEW_LINE> __del__ = lambda self : None;
|
Proxy of C++ pythonapi::ContinousColorRange class
|
6259904bb57a9660fecd2e85
|
class UserDetailsConfirmForm(forms.ModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.fields['first_name'].required = True <NEW_LINE> self.fields['last_name'].required = True <NEW_LINE> self.confirm_button_text = 'Confirm' <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Customer <NEW_LINE> fields = ['first_name', 'last_name', ]
|
These details may be prepopulated with the Oauth
Scope Data
|
6259904b91af0d3eaad3b22d
|
class TransformixCoordinateTransformationWorkflow(WorkflowBase): <NEW_LINE> <INDENT> input_path = luigi.Parameter() <NEW_LINE> input_key = luigi.Parameter() <NEW_LINE> output_path = luigi.Parameter() <NEW_LINE> output_key = luigi.Parameter() <NEW_LINE> transformation_file = luigi.Parameter() <NEW_LINE> elastix_directory = luigi.Parameter() <NEW_LINE> shape = luigi.Parameter() <NEW_LINE> resolution = luigi.Parameter(default=None) <NEW_LINE> scale_factor = luigi.FloatParameter(default=1.e-3) <NEW_LINE> def requires(self): <NEW_LINE> <INDENT> transformix_task = getattr(transformix_coordinate_tasks, self._get_task_name('TransformixCoordinate')) <NEW_LINE> if self.resolution is None: <NEW_LINE> <INDENT> resolution = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resolution = [res * self.scale_factor for res in self.resolution] <NEW_LINE> <DEDENT> dep = transformix_task(tmp_folder=self.tmp_folder, max_jobs=self.max_jobs, config_dir=self.config_dir, dependency=self.dependency, input_path=self.input_path, input_key=self.input_key, output_path=self.output_path, output_key=self.output_key, transformation_file=self.transformation_file, elastix_directory=self.elastix_directory, resolution=resolution, shape=self.shape) <NEW_LINE> return dep <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_config(): <NEW_LINE> <INDENT> configs = super(TransformixCoordinateTransformationWorkflow, TransformixCoordinateTransformationWorkflow).get_config() <NEW_LINE> configs.update({'transformix_coordinate': transformix_coordinate_tasks.TransformixCoordinateLocal.default_task_config()}) <NEW_LINE> return configs
|
Apply elastix transform via transformix based on transforming coordinates.
|
6259904b07f4c71912bb083d
|
class LoginTestCase(APITestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> cls.login_url = reverse('api:auth:login') <NEW_LINE> cls.user = UserFactory() <NEW_LINE> cls.user_password = 'Hola.Chau' <NEW_LINE> cls.user.set_password(cls.user_password) <NEW_LINE> cls.user.save(update_fields=['password']) <NEW_LINE> <DEDENT> def test_successful(self): <NEW_LINE> <INDENT> self.client.logout() <NEW_LINE> data = { 'username': self.user.username, 'password': self.user_password } <NEW_LINE> response = self.client.post(self.login_url, data) <NEW_LINE> assert response.status_code == status.HTTP_200_OK <NEW_LINE> assert response.data['user']['id'] == self.user.id <NEW_LINE> assert response.data['token'] is not None <NEW_LINE> assert response.data['token'] != '' <NEW_LINE> <DEDENT> def test_wrong_credentials(self): <NEW_LINE> <INDENT> self.client.logout() <NEW_LINE> data = {'username': self.user.username, 'password': 'asd'} <NEW_LINE> response = self.client.post(self.login_url, data) <NEW_LINE> assert response.status_code == status.HTTP_400_BAD_REQUEST <NEW_LINE> <DEDENT> def test_user_disabled(self): <NEW_LINE> <INDENT> self.client.logout() <NEW_LINE> self.user.is_active = False <NEW_LINE> self.user.save(update_fields=['is_active']) <NEW_LINE> data = { 'username': self.user.username, 'password': self.user_password } <NEW_LINE> response = self.client.post(self.login_url, data) <NEW_LINE> assert response.status_code == status.HTTP_400_BAD_REQUEST <NEW_LINE> self.user.is_active = True <NEW_LINE> self.user.save(update_fields=['is_active'])
|
Test JWT login.
|
6259904bec188e330fdf9ca7
|
class MutationHelper(object): <NEW_LINE> <INDENT> num_trials = 1000 <NEW_LINE> def _always_mutate(self, mutator, expected_percent): <NEW_LINE> <INDENT> num_mutations = 0 <NEW_LINE> for trial in range(self.num_trials): <NEW_LINE> <INDENT> new_org = mutator.mutate(self.organism) <NEW_LINE> if new_org != self.organism: <NEW_LINE> <INDENT> num_mutations += 1 <NEW_LINE> <DEDENT> <DEDENT> percent_mutants = float(num_mutations) / float(self.num_trials) <NEW_LINE> assert percent_mutants > expected_percent, "Did not recieve an acceptable number of mutations." <NEW_LINE> <DEDENT> def _never_mutate(self, mutator): <NEW_LINE> <INDENT> for trial in range(self.num_trials): <NEW_LINE> <INDENT> new_org = mutator.mutate(self.organism) <NEW_LINE> assert new_org == self.organism, "Unexpected mutation found"
|
Mixin class which provides useful functions for testing mutations.
|
6259904b435de62698e9d210
|
class BaconDecorator(DecoratorFood): <NEW_LINE> <INDENT> def __init__(self, food_wrapper): <NEW_LINE> <INDENT> DecoratorFood.__init__(self, food_wrapper) <NEW_LINE> <DEDENT> def description(self): <NEW_LINE> <INDENT> return DecoratorFood.description(self) + ', with bacon' <NEW_LINE> <DEDENT> def price(self): <NEW_LINE> <INDENT> return DecoratorFood.price(self) + 3
|
This class is responsible
to decorate a food with Bacon
|
6259904b4e696a045264e825
|
class Game: <NEW_LINE> <INDENT> def __init__( self, p1_cls: Type[Player], p2_cls: Type[Player], p1_kwargs: Dict[str, object]={}, p2_kwargs: Dict[str, object]={}): <NEW_LINE> <INDENT> self._board: Board = Board() <NEW_LINE> self._pnum2player: Dict[int, Player] = { 1: p1_cls(1, self._board, **p1_kwargs), 2: p2_cls(2, self._board, **p2_kwargs)} <NEW_LINE> self._winner: Optional[int] = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def board(self): <NEW_LINE> <INDENT> return self._board <NEW_LINE> <DEDENT> @property <NEW_LINE> def winner(self) -> Optional[int]: <NEW_LINE> <INDENT> return self._winner <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_winner(pnum: int, board: Board) -> bool: <NEW_LINE> <INDENT> pchipidxs = board.chip_idxs[pnum] <NEW_LINE> for r, c in pchipidxs: <NEW_LINE> <INDENT> for dr, dc in ((0, 1), (1, 0), (1, 1), (-1, 1)): <NEW_LINE> <INDENT> consec = consec_direction(pchipidxs, r, c, dr, dc) <NEW_LINE> if consec >= 4: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def play(self, verbose=False) -> None: <NEW_LINE> <INDENT> for pnum in cycle((1, 2)): <NEW_LINE> <INDENT> column = self._pnum2player[pnum].move() <NEW_LINE> last_move = self._board.place_chip(pnum, column) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print((f'Player {pnum} placed a chip in column {last_move[1]}' + f' -> row {last_move[0]}')) <NEW_LINE> print(self._board) <NEW_LINE> <DEDENT> if Game.is_winner(pnum, self._board): <NEW_LINE> <INDENT> self._winner = pnum <NEW_LINE> break <NEW_LINE> <DEDENT> elif self._board.isfull(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if verbose: <NEW_LINE> <INDENT> print('Tie' if self._winner is None else f'Player {self._winner} wins!')
|
Connect 4 game class. Creates and plays connect 4 game instances
|
6259904b8e05c05ec3f6f85f
|
class EyeballTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def check40BitOptions(self): <NEW_LINE> <INDENT> userPass = 'userpass' <NEW_LINE> for canPrint in (0, 1): <NEW_LINE> <INDENT> for canModify in (0, 1): <NEW_LINE> <INDENT> for canCopy in (0, 1): <NEW_LINE> <INDENT> for canAnnotate in (0, 1): <NEW_LINE> <INDENT> for strength in (40, 128): <NEW_LINE> <INDENT> p = m = c = a = 'x' <NEW_LINE> if canPrint: p = 'P' <NEW_LINE> if canModify: m = 'M' <NEW_LINE> if canCopy: c = 'C' <NEW_LINE> if canAnnotate: a = 'A' <NEW_LINE> filename = 'test_crypto_%03dbit_%s_%s%s%s%s.pdf' % ( strength, userPass, p, m, c, a) <NEW_LINE> import os <NEW_LINE> filepath = outputfile(filename) <NEW_LINE> canv = Canvas(filepath) <NEW_LINE> canv.setFont('Helvetica', 24) <NEW_LINE> canv.drawString(100,700, 'PDF Encryption test case') <NEW_LINE> canv.setFont('Helvetica', 16) <NEW_LINE> canv.drawString(100, 675, 'Verify by looking at File - Document Info - Security') <NEW_LINE> canv.drawString(100, 600, 'open password = %s' % userPass) <NEW_LINE> canv.drawString(100, 575, 'strength = %d buts' % strength) <NEW_LINE> canv.drawString(100, 500, 'canPrint = %d' % canPrint) <NEW_LINE> canv.drawString(100, 475, 'canModify = %d' % canModify) <NEW_LINE> canv.drawString(100, 450, 'canCopy = %d' % canCopy) <NEW_LINE> canv.drawString(100, 425, 'canAnnotate = %d' % canAnnotate) <NEW_LINE> encryptCanvas(canv, userPass, canPrint=canPrint, canModify=canModify, canCopy=canCopy, canAnnotate=canAnnotate, strength=strength) <NEW_LINE> canv.save() <NEW_LINE> if VERBOSE: print('saved %s' % filepath)
|
This makes a gaxillion self-explanatory files
|
6259904c097d151d1a2c2478
|
class CreateWhitelistResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Msg = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Msg = params.get("Msg") <NEW_LINE> self.RequestId = params.get("RequestId")
|
CreateWhitelist返回参数结构体
|
6259904c1f037a2d8b9e5271
|
class TransmissionFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): <NEW_LINE> <INDENT> VERSION = 1 <NEW_LINE> CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL <NEW_LINE> @staticmethod <NEW_LINE> @callback <NEW_LINE> def async_get_options_flow(config_entry): <NEW_LINE> <INDENT> return TransmissionOptionsFlowHandler(config_entry) <NEW_LINE> <DEDENT> async def async_step_user(self, user_input=None): <NEW_LINE> <INDENT> errors = {} <NEW_LINE> if user_input is not None: <NEW_LINE> <INDENT> for entry in self.hass.config_entries.async_entries(DOMAIN): <NEW_LINE> <INDENT> if ( entry.data[CONF_HOST] == user_input[CONF_HOST] and entry.data[CONF_PORT] == user_input[CONF_PORT] ): <NEW_LINE> <INDENT> return self.async_abort(reason="already_configured") <NEW_LINE> <DEDENT> if entry.data[CONF_NAME] == user_input[CONF_NAME]: <NEW_LINE> <INDENT> errors[CONF_NAME] = "name_exists" <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> await get_api(self.hass, user_input) <NEW_LINE> <DEDENT> except AuthenticationError: <NEW_LINE> <INDENT> errors[CONF_USERNAME] = "invalid_auth" <NEW_LINE> errors[CONF_PASSWORD] = "invalid_auth" <NEW_LINE> <DEDENT> except (CannotConnect, UnknownError): <NEW_LINE> <INDENT> errors["base"] = "cannot_connect" <NEW_LINE> <DEDENT> if not errors: <NEW_LINE> <INDENT> return self.async_create_entry( title=user_input[CONF_NAME], data=user_input ) <NEW_LINE> <DEDENT> <DEDENT> return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors, ) <NEW_LINE> <DEDENT> async def async_step_import(self, import_config): <NEW_LINE> <INDENT> import_config[CONF_SCAN_INTERVAL] = import_config[ CONF_SCAN_INTERVAL ].total_seconds() <NEW_LINE> return await self.async_step_user(user_input=import_config)
|
Handle Tansmission config flow.
|
6259904c6e29344779b01a4c
|
class DCAMTimeoutError(DCAMError): <NEW_LINE> <INDENT> pass
|
Timeout while waiting.
|
6259904cb57a9660fecd2e87
|
class ConnectionError(Exception): <NEW_LINE> <INDENT> pass
|
Raised when connection to Cloud Datastore Emulator is lost.
|
6259904ccb5e8a47e493cb8c
|
class StatePyDriver(py_driver.PyDriver): <NEW_LINE> <INDENT> def run( self, time_step, policy_state = () ): <NEW_LINE> <INDENT> num_steps = 0 <NEW_LINE> num_episodes = 0 <NEW_LINE> while num_steps < self._max_steps and num_episodes < self._max_episodes: <NEW_LINE> <INDENT> action_step = self.policy.action(time_step, policy_state) <NEW_LINE> next_time_step = self.env.step(action_step.action) <NEW_LINE> if next_time_step.step_type == 0: <NEW_LINE> <INDENT> policy_state = self.policy.get_initial_state(self.env.batch_size or 1) <NEW_LINE> <DEDENT> next_time_step.observation['policy_state'] = ( policy_state['actor_network_state'][0][0].numpy(), policy_state['actor_network_state'][1][0].numpy()) <NEW_LINE> traj = trajectory.from_transition(time_step, action_step, next_time_step) <NEW_LINE> for observer in self._transition_observers: <NEW_LINE> <INDENT> observer((time_step, action_step, next_time_step)) <NEW_LINE> <DEDENT> for observer in self.observers: <NEW_LINE> <INDENT> observer(traj) <NEW_LINE> <DEDENT> num_episodes += np.sum(traj.is_boundary()) <NEW_LINE> num_steps += np.sum(~traj.is_boundary()) <NEW_LINE> time_step = next_time_step <NEW_LINE> policy_state = action_step.state <NEW_LINE> <DEDENT> return time_step, policy_state
|
A PyDriver that adds policy state to observations.
These policy states are used to compute attention weights in the attention
architecture.
|
6259904c8e71fb1e983bced0
|
class ProfileForm(CSRFForm): <NEW_LINE> <INDENT> email = EmailField("Team E-Mail", validators=[required_validator]) <NEW_LINE> old_password = PasswordField( "Old Password", validators=[password_required_and_valid_if_pw_change], description=("This only needs to be entered if you wish to change " "your password, otherwise, it is not required."), ) <NEW_LINE> password = PasswordField("New Password", validators=[password_equal_validator, password_min_length_if_set_validator, password_max_length_if_set_validator, ] ) <NEW_LINE> password_repeat = PasswordField("Repeat New Password") <NEW_LINE> avatar = AvatarField( "Avatar", description=("Upload an avatar image. The File must not be larger " "than %d%s and must have maximum dimensions of 90x25px" % (avatar_size_validator.max_size, avatar_size_validator.unit) ), validators=[avatar_size_validator], ) <NEW_LINE> country = QuerySelectField("Country/State", query_factory=get_all_countries ) <NEW_LINE> timezone = SelectField("Timezone", choices=[(tz, tz) for tz in common_timezones], default=((utc.zone, utc.zone)), ) <NEW_LINE> size = team_size_field() <NEW_LINE> submit = SubmitField("Save") <NEW_LINE> cancel = SubmitField("Cancel") <NEW_LINE> def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None, **kwargs): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> raise ValueError("Must always provide an existing team") <NEW_LINE> <DEDENT> CSRFForm.__init__(self, formdata, obj, prefix, csrf_context, **kwargs)
|
A form to edit a team's profile.
Attrs:
``email``: The email address. Required
``old_password``: The old password, needed only for a password change.
``password``: The password. Optional, only needed if wanting to change.
``password_repeat``: Repeat the new password.
``avatar``: Display an avatar and upload a new one.
``country``: Change location. Required.
``timezone``: Change timezone. Required.
``submit``: Save changes
``cancel``: Abort
|
6259904c10dbd63aa1c71fe7
|
class MockedGoal(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name
|
Mocked Goal for testing
|
6259904cd4950a0f3b111848
|
class _DigitalUnit(_Unit): <NEW_LINE> <INDENT> pass
|
Defines the abstract digital-unit class for tagging.
@since 2018.07.23
@author tsungjung411@gmail.com
@see http://tw.bestconverter.org/unitconverter_number.php
|
6259904cb57a9660fecd2e88
|
class ChangeLogEntry(object): <NEW_LINE> <INDENT> version_class = Version <NEW_LINE> def __init__(self, date=None, version=None, **kwargs): <NEW_LINE> <INDENT> self.__dict__.update(kwargs) <NEW_LINE> if version: <NEW_LINE> <INDENT> self.version = self.version_class(version) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.version = None <NEW_LINE> <DEDENT> self.date = date <NEW_LINE> self.messages = [] <NEW_LINE> <DEDENT> def add_message(self, msg): <NEW_LINE> <INDENT> self.messages.append([msg]) <NEW_LINE> <DEDENT> def complete_latest_message(self, msg_suite): <NEW_LINE> <INDENT> if not self.messages: <NEW_LINE> <INDENT> print('Ignoring %r (unexpected format)' % msg_suite, file=sys.stderr) <NEW_LINE> <DEDENT> self.messages[-1].append(msg_suite) <NEW_LINE> <DEDENT> def write(self, stream=sys.stdout): <NEW_LINE> <INDENT> stream.write('%s -- %s\n' % (self.date or '', self.version or '')) <NEW_LINE> for msg in self.messages: <NEW_LINE> <INDENT> stream.write('%s%s %s\n' % (INDENT, BULLET, msg[0])) <NEW_LINE> stream.write(''.join(msg[1:]))
|
a change log entry, ie a set of messages associated to a version and
its release date
|
6259904c379a373c97d9a435
|
class JSONRPCv1(JSONRPC): <NEW_LINE> <INDENT> allow_batches = False <NEW_LINE> @classmethod <NEW_LINE> def _message_id(cls, message, require_id): <NEW_LINE> <INDENT> if 'id' not in message: <NEW_LINE> <INDENT> raise ProtocolError.invalid_request('request has no "id"') <NEW_LINE> <DEDENT> return message['id'] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _request_args(cls, request): <NEW_LINE> <INDENT> args = request.get('params') <NEW_LINE> if not isinstance(args, list): <NEW_LINE> <INDENT> raise ProtocolError.invalid_args( f'invalid request arguments: {args}') <NEW_LINE> <DEDENT> return args <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _best_effort_error(cls, error): <NEW_LINE> <INDENT> code = cls.ERROR_CODE_UNAVAILABLE <NEW_LINE> message = 'no error message provided' <NEW_LINE> if isinstance(error, str): <NEW_LINE> <INDENT> message = error <NEW_LINE> <DEDENT> elif isinstance(error, int): <NEW_LINE> <INDENT> code = error <NEW_LINE> <DEDENT> elif isinstance(error, dict): <NEW_LINE> <INDENT> if isinstance(error.get('message'), str): <NEW_LINE> <INDENT> message = error['message'] <NEW_LINE> <DEDENT> if isinstance(error.get('code'), int): <NEW_LINE> <INDENT> code = error['code'] <NEW_LINE> <DEDENT> <DEDENT> return RPCError(code, message) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def response_value(cls, payload): <NEW_LINE> <INDENT> if 'result' not in payload or 'error' not in payload: <NEW_LINE> <INDENT> raise ProtocolError.invalid_request( 'response must contain both "result" and "error"') <NEW_LINE> <DEDENT> result = payload['result'] <NEW_LINE> error = payload['error'] <NEW_LINE> if error is None: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> if result is not None: <NEW_LINE> <INDENT> raise ProtocolError.invalid_request( 'response has a "result" and an "error"') <NEW_LINE> <DEDENT> return cls._best_effort_error(error) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def request_payload(cls, request, request_id): <NEW_LINE> <INDENT> if isinstance(request.args, dict): <NEW_LINE> <INDENT> raise ProtocolError.invalid_args( 'JSONRPCv1 does not support named arguments') <NEW_LINE> <DEDENT> return { 'method': request.method, 'params': request.args, 'id': request_id } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def response_payload(cls, result, request_id): <NEW_LINE> <INDENT> return { 'result': result, 'error': None, 'id': request_id } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def error_payload(cls, error, request_id): <NEW_LINE> <INDENT> return { 'result': None, 'error': {'code': error.code, 'message': error.message}, 'id': request_id }
|
JSON RPC version 1.0.
|
6259904c50485f2cf55dc396
|
class CloudServiceOsProfile(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'secrets': {'key': 'secrets', 'type': '[CloudServiceVaultSecretGroup]'}, } <NEW_LINE> def __init__( self, *, secrets: Optional[List["CloudServiceVaultSecretGroup"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(CloudServiceOsProfile, self).__init__(**kwargs) <NEW_LINE> self.secrets = secrets
|
Describes the OS profile for the cloud service.
:ivar secrets: Specifies set of certificates that should be installed onto the role instances.
:vartype secrets: list[~azure.mgmt.compute.v2021_03_01.models.CloudServiceVaultSecretGroup]
|
6259904c435de62698e9d212
|
class AlcatelSrosSSH(CiscoSSHConnection): <NEW_LINE> <INDENT> def session_preparation(self): <NEW_LINE> <INDENT> self._test_channel_read() <NEW_LINE> self.set_base_prompt() <NEW_LINE> self.disable_paging(command="environment no more") <NEW_LINE> time.sleep(.3 * self.global_delay_factor) <NEW_LINE> self.clear_buffer() <NEW_LINE> <DEDENT> def set_base_prompt(self, *args, **kwargs): <NEW_LINE> <INDENT> cur_base_prompt = super(AlcatelSrosSSH, self).set_base_prompt(*args, **kwargs) <NEW_LINE> match = re.search(r'(.*)(>.*)*#', cur_base_prompt) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.base_prompt = match.group(1) <NEW_LINE> return self.base_prompt <NEW_LINE> <DEDENT> <DEDENT> def enable(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def config_mode(self, config_command='configure', pattern='#'): <NEW_LINE> <INDENT> return super(AlcatelSrosSSH, self).config_mode(config_command=config_command, pattern=pattern) <NEW_LINE> <DEDENT> def exit_config_mode(self, exit_config='exit all', pattern='#'): <NEW_LINE> <INDENT> return super(AlcatelSrosSSH, self).exit_config_mode(exit_config=exit_config, pattern=pattern) <NEW_LINE> <DEDENT> def check_config_mode(self, check_string='config', pattern='#'): <NEW_LINE> <INDENT> return super(AlcatelSrosSSH, self).check_config_mode(check_string=check_string, pattern=pattern) <NEW_LINE> <DEDENT> def save_config(self, cmd='', confirm=True, confirm_response=''): <NEW_LINE> <INDENT> raise NotImplementedError
|
Alcatel-Lucent SROS support.
|
6259904c8e05c05ec3f6f860
|
class SettingOptions(SettingItem): <NEW_LINE> <INDENT> options = ListProperty([]) <NEW_LINE> popup = ObjectProperty(None, allownone=True) <NEW_LINE> def on_panel(self, instance, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.bind(on_release=self._create_popup) <NEW_LINE> <DEDENT> def _set_option(self, instance): <NEW_LINE> <INDENT> self.value = instance.text <NEW_LINE> self.popup.dismiss() <NEW_LINE> <DEDENT> def _create_popup(self, instance): <NEW_LINE> <INDENT> content = BoxLayout(orientation='vertical', spacing='5dp') <NEW_LINE> self.popup = popup = Popup(content=content, title=self.title, size_hint=(None, None), size=('400dp', '400dp')) <NEW_LINE> popup.height = len(self.options) * dp(55) + dp(150) <NEW_LINE> content.add_widget(Widget(size_hint_y=None, height=1)) <NEW_LINE> uid = str(self.uid) <NEW_LINE> for option in self.options: <NEW_LINE> <INDENT> state = 'down' if option == self.value else 'normal' <NEW_LINE> btn = ToggleButton(text=option, state=state, group=uid) <NEW_LINE> btn.bind(on_release=self._set_option) <NEW_LINE> content.add_widget(btn) <NEW_LINE> <DEDENT> content.add_widget(SettingSpacer()) <NEW_LINE> btn = Button(text='Cancel', size_hint_y=None, height=dp(50)) <NEW_LINE> btn.bind(on_release=popup.dismiss) <NEW_LINE> content.add_widget(btn) <NEW_LINE> popup.open()
|
Implementation of an option list on top of :class:`SettingItem`.
It is visualized with a :class:`~kivy.uix.label.Label` widget that, when
clicked, will open a :class:`~kivy.uix.popup.Popup` with a
list of options from which the user can select.
|
6259904c596a897236128fb4
|
class CapsuleNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_size, classes, routings): <NEW_LINE> <INDENT> super(CapsuleNet, self).__init__() <NEW_LINE> self.input_size = input_size <NEW_LINE> self.classes = classes <NEW_LINE> self.routings = routings <NEW_LINE> self.conv1 = nn.Conv2d(input_size[0], 256, kernel_size=5, stride=1, padding=0) <NEW_LINE> self.primarycaps = PrimaryCapsule(256, 256, 8, kernel_size=5, stride=2, padding=0) <NEW_LINE> self.digitcaps = DenseCapsule(in_num_caps=32*6*6, in_dim_caps=8, out_num_caps=classes, out_dim_caps=16, routings=routings) <NEW_LINE> self.decoder = nn.Sequential( nn.Linear(16*classes, 512), nn.ReLU(inplace=True), nn.Linear(512, 1024), nn.ReLU(inplace=True), nn.Linear(1024, input_size[0] * input_size[1] * input_size[2]), nn.Sigmoid() ) <NEW_LINE> self.relu = nn.ReLU() <NEW_LINE> <DEDENT> def forward(self, x, y=None): <NEW_LINE> <INDENT> x = self.relu(self.conv1(x)) <NEW_LINE> x = self.primarycaps(x) <NEW_LINE> x = self.digitcaps(x) <NEW_LINE> length = x.norm(dim=-1) <NEW_LINE> if y is None: <NEW_LINE> <INDENT> index = length.data.max(dim=1)[1] <NEW_LINE> y = Variable(torch.zeros(length.size()).scatter_(1, index.view(-1, 1), 1.)) <NEW_LINE> <DEDENT> reconstruction = self.decoder((x * y[:, :, None]).view(x.size(0), -1)) <NEW_LINE> return length, reconstruction.view(-1, *self.input_size)
|
A Capsule Network on peptide.
:param input_size: data size = [channels, width, height]
:param classes: number of classes
:param routings: number of routing iterations
Shape:
- Input: (batch, channels, width, height), optional (batch, classes) .
- Output:((batch, classes), (batch, channels, width, height))
|
6259904c76d4e153a661dc7d
|
class worker_test(unittest.TestCase): <NEW_LINE> <INDENT> def NOtest_notutf8(self): <NEW_LINE> <INDENT> b='username=alexmadon&password=invalid\xff' <NEW_LINE> con = http.client.HTTPConnection('atpic.faa:80') <NEW_LINE> params=b <NEW_LINE> headers={} <NEW_LINE> headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain", "User-Agent": "Mozilla", } <NEW_LINE> con.request("POST", "/login?f=xml", params, headers) <NEW_LINE> r = con.getresponse() <NEW_LINE> print(r.status,r.reason) <NEW_LINE> content=r.readall() <NEW_LINE> print(content) <NEW_LINE> con.close() <NEW_LINE> self.assertEqual(content,b"'utf-8' codec can't decode byte 0xff in position 35: invalid start byte") <NEW_LINE> <DEDENT> def test_no_user_agent(self): <NEW_LINE> <INDENT> b='username=alexmadon&password=mypass' <NEW_LINE> con = http.client.HTTPConnection('atpic.faa:80') <NEW_LINE> params=b <NEW_LINE> headers={} <NEW_LINE> headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain", } <NEW_LINE> con.request("POST", "/login?f=xml", params, headers) <NEW_LINE> r = con.getresponse() <NEW_LINE> print(r.status,r.reason) <NEW_LINE> content=r.readall() <NEW_LINE> print(content) <NEW_LINE> con.close()
|
USER legacy urls
|
6259904c96565a6dacd2d98f
|
class CityTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_city_country(self): <NEW_LINE> <INDENT> cityc = city_country('richmond', 'virginia') <NEW_LINE> self.assertEqual(cityc, 'Richmond, Virginia')
|
testing some city funcs
|
6259904cb5575c28eb7136cf
|
class IsUserLeagueStatusOwner(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> return UserLeagueStatus.objects.filter(pk=view.kwargs['pk'], user=request.user).exists()
|
Check if request user is the owner of the UserLeagueStatus
|
6259904ce76e3b2f99fd9e17
|
class CharEntity(Entity): <NEW_LINE> <INDENT> char_race = ('Earth Pony', 'Unicorn', 'Pegasus') <NEW_LINE> def __init__(self, name, race, type='None', ignoreerror=False): <NEW_LINE> <INDENT> Entity.__init__(self, name, 'PonyEn') <NEW_LINE> if race.title() in CharEntity.char_race: <NEW_LINE> <INDENT> self.race = race.title() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if ignoreerror: <NEW_LINE> <INDENT> self.race = 'N/A' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidRaceError(race.title() + ' doesn\'t exist.') <NEW_LINE> <DEDENT> <DEDENT> self.type = type.title()
|
Character Entity. The child of the Entity type class
|
6259904ca8ecb0332587261e
|
class UpdateMatch(graphene.Mutation): <NEW_LINE> <INDENT> match = graphene.Field(lambda: Matches, description="Match updated by this mutation.") <NEW_LINE> class Arguments: <NEW_LINE> <INDENT> input = UpdateMatchInput(required=True) <NEW_LINE> <DEDENT> def mutate(self, info, input): <NEW_LINE> <INDENT> data = utils.input_to_dictionary(input) <NEW_LINE> if "match_date" in data.keys(): <NEW_LINE> <INDENT> data['match_date'] = datetime.strptime( data['match_date'], '%Y-%m-%d') <NEW_LINE> <DEDENT> match = db_session.query(ModelMatches).filter_by(match_id=data['match_id']) <NEW_LINE> match.update(data) <NEW_LINE> db_session.commit() <NEW_LINE> match = db_session.query(ModelMatches).filter_by(match_id=data['match_id']).first() <NEW_LINE> return UpdateMatch(match=match)
|
Update a match.
|
6259904c07f4c71912bb0841
|
class LeadSentenceSelectorTests(unittest.TestCase): <NEW_LINE> <INDENT> Preprocessor.load_models() <NEW_LINE> def test_select_content(self): <NEW_LINE> <INDENT> sentence_1 = 'In a park somewhere, a bunch of puppies played fetch with their owners today.' <NEW_LINE> doc_id_1 = 'TST_ENG_20190101.0001' <NEW_LINE> sentence_2 = 'I took my small puppy to the dog park today.' <NEW_LINE> doc_id_2 = 'TST_ENG_20190101.0002' <NEW_LINE> selector = LeadSentenceSelector() <NEW_LINE> documents = [Document(doc_id_1), Document(doc_id_2)] <NEW_LINE> expected_sentences = [Sentence(sentence_1, 1, doc_id_1), Sentence(sentence_2, 1, doc_id_2)] <NEW_LINE> selector.select_content(documents, []) <NEW_LINE> selected_sentences = selector.selected_content <NEW_LINE> self.assertCountEqual(expected_sentences, selected_sentences)
|
Tests for LeadSentenceSelector
|
6259904cd4950a0f3b111849
|
class GBRModel(BaseModel): <NEW_LINE> <INDENT> def __init__(self, n_estimators=100, learning_rate=0.1, max_depth=20, random_state=1, verbose=0, n_features=None, max_features=None, validation_data=None): <NEW_LINE> <INDENT> BaseModel.__init__(self, "GradientBoostingClassifierModel", n_features=n_features) <NEW_LINE> self.model = GradientBoostingRegressor(n_estimators=n_estimators, learning_rate=learning_rate, max_depth=max_depth, random_state=random_state, max_features=max_features, verbose=verbose) <NEW_LINE> <DEDENT> def fit(self, x, y): <NEW_LINE> <INDENT> self.model.fit(x, y) <NEW_LINE> <DEDENT> def predict(self, x): <NEW_LINE> <INDENT> y_pred = self.model.predict(x) <NEW_LINE> y_pred = y_pred.reshape(-1, 1) <NEW_LINE> return y_pred
|
All sk-learn models need to inherit from this model.
|
6259904ce64d504609df9dd6
|
class AlignedCorpora: <NEW_LINE> <INDENT> def __init__(self, parallel_dict): <NEW_LINE> <INDENT> self.langs = list(parallel_dict.keys()) <NEW_LINE> self.langs.sort() <NEW_LINE> self.parallel_dict = parallel_dict <NEW_LINE> <DEDENT> def generate_fastalign_output(self, output_dir): <NEW_LINE> <INDENT> all_lang_pairs = list(itertools.combinations(self.langs, 2)) <NEW_LINE> for (l1, l2) in all_lang_pairs: <NEW_LINE> <INDENT> print('creating ' + output_dir + l1 + '_' + l2 + '.txt ..') <NEW_LINE> f = codecs.open(output_dir + l1 + '_' + l2 + '.txt', 'w', 'utf-8') <NEW_LINE> num_verses = len(self.parallel_dict[l1]) <NEW_LINE> for idx in range(0, num_verses): <NEW_LINE> <INDENT> f.write(self.parallel_dict[l1][idx] + ' ||| ' + self.parallel_dict[l2][idx] + '\n') <NEW_LINE> <DEDENT> f.close() <NEW_LINE> <DEDENT> return all_lang_pairs
|
This class works with multiple aligned
|
6259904c1f5feb6acb164002
|
class ISQLFolderEngine(Interface): <NEW_LINE> <INDENT> pass
|
Interface for getting an Engine from a SQLFolder
|
6259904cd7e4931a7ef3d484
|
class LayerCall: <NEW_LINE> <INDENT> def __init__(self, call_collection, call_fn, name): <NEW_LINE> <INDENT> self.call_collection = call_collection <NEW_LINE> self.wrapped_call = tf.function( layer_call_wrapper(call_collection, call_fn, name)) <NEW_LINE> self.original_layer_call = call_collection.layer_call_method <NEW_LINE> <DEDENT> def _maybe_trace(self, args, kwargs): <NEW_LINE> <INDENT> if tracing_enabled(): <NEW_LINE> <INDENT> self.call_collection.add_trace(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._maybe_trace(args, kwargs) <NEW_LINE> return self.wrapped_call(*args, **kwargs) <NEW_LINE> <DEDENT> def get_concrete_function(self, *args, **kwargs): <NEW_LINE> <INDENT> self._maybe_trace(args, kwargs) <NEW_LINE> return self.wrapped_call.get_concrete_function(*args, **kwargs)
|
Function that triggers traces of other functions in the same collection.
|
6259904c07d97122c42180b0
|
class SACReplayBuffer: <NEW_LINE> <INDENT> def __init__(self, env_spec, max_size): <NEW_LINE> <INDENT> obs_dim = env_spec.observation_space.shape[0] <NEW_LINE> act_dim = env_spec.action_space.shape[0] <NEW_LINE> self.obs1_buf = np.zeros([max_size, obs_dim], dtype=np.float32) <NEW_LINE> self.obs2_buf = np.zeros([max_size, obs_dim], dtype=np.float32) <NEW_LINE> self.acts_buf = np.zeros([max_size, act_dim], dtype=np.float32) <NEW_LINE> self.rews_buf = np.zeros(max_size, dtype=np.float32) <NEW_LINE> self.done_buf = np.zeros(max_size, dtype=np.float32) <NEW_LINE> self.ptr, self.n_transitions_stored, self.max_size = 0, 0, max_size <NEW_LINE> <DEDENT> def store(self, obs, act, rew, next_obs, done): <NEW_LINE> <INDENT> self.obs1_buf[self.ptr] = obs <NEW_LINE> self.obs2_buf[self.ptr] = next_obs <NEW_LINE> self.acts_buf[self.ptr] = act <NEW_LINE> self.rews_buf[self.ptr] = rew <NEW_LINE> self.done_buf[self.ptr] = done <NEW_LINE> self.ptr = (self.ptr+1) % self.max_size <NEW_LINE> self.n_transitions_stored = min(self.n_transitions_stored+1, self.max_size) <NEW_LINE> <DEDENT> def add_transitions(self, **kwargs): <NEW_LINE> <INDENT> assert(len(kwargs["observation"]) == 1) <NEW_LINE> self.store(kwargs["observation"][0], kwargs["action"][0], kwargs["reward"], kwargs["next_observation"][0], kwargs["terminal"]) <NEW_LINE> <DEDENT> def sample(self, batch_size=32): <NEW_LINE> <INDENT> idxs = np.random.randint(0, self.n_transitions_stored, size=batch_size) <NEW_LINE> return dict(observation=self.obs1_buf[idxs], next_observation=self.obs2_buf[idxs], action=self.acts_buf[idxs], reward=self.rews_buf[idxs], terminal=self.done_buf[idxs])
|
A simple FIFO experience replay buffer for SAC agents.
|
6259904c0a366e3fb87dddf3
|
class CBCT16(CBCTBankMixin, TestCase): <NEW_LINE> <INDENT> file_path = ['CBCT_16.zip'] <NEW_LINE> expected_roll = 0.2 <NEW_LINE> slice_locations = {'HU': 32, 'UN': 6, 'SR': 44, 'LC': 20} <NEW_LINE> hu_values = {'Poly': -37, 'Acrylic': 128, 'Delrin': 342, 'Air': -995, 'Teflon': 1000, 'PMP': -181, 'LDPE': -87} <NEW_LINE> unif_values = {'Center': 17, 'Left': 20, 'Right': 18, 'Top': 19, 'Bottom': 19} <NEW_LINE> mtf_values = {80: 0.42, 90: 0.33, 60: 0.53, 70: 0.48, 95: 0.26} <NEW_LINE> avg_line_length = 49.6 <NEW_LINE> lowcon_visible = 3
|
A Varian CBCT dataset
|
6259904c435de62698e9d214
|
class CourseInfoAPIHandler(UMBaseHandler): <NEW_LINE> <INDENT> @is_super_admin <NEW_LINE> def get(self): <NEW_LINE> <INDENT> units = Courses.get_units() <NEW_LINE> dict_units = {} <NEW_LINE> for unit in units: <NEW_LINE> <INDENT> dict_units[unit['unit_id']] = unit['title'] <NEW_LINE> <DEDENT> self.write_json(dict_units)
|
Handlers for /api/course/info
|
6259904cdc8b845886d549ca
|
class TaskapiStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.addTask = channel.unary_unary( '/Taskapi/addTask', request_serializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.SerializeToString, response_deserializer=task__pb2.Task.FromString, ) <NEW_LINE> self.delTask = channel.unary_unary( '/Taskapi/delTask', request_serializer=google_dot_protobuf_dot_wrappers__pb2.UInt64Value.SerializeToString, response_deserializer=task__pb2.Task.FromString, ) <NEW_LINE> self.nondestructive_editTask = channel.unary_unary( '/Taskapi/nondestructive_editTask', request_serializer=task__pb2.Task.SerializeToString, response_deserializer=task__pb2.Task.FromString, ) <NEW_LINE> self.destructive_editTask = channel.unary_unary( '/Taskapi/destructive_editTask', request_serializer=task__pb2.Task.SerializeToString, response_deserializer=task__pb2.Task.FromString, ) <NEW_LINE> self.listTasks = channel.unary_unary( '/Taskapi/listTasks', request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, response_deserializer=task__pb2.Tasks.FromString, )
|
Task service API
|
6259904c24f1403a926862d4
|
class StoryCommitLogEntryModelUnitTest(test_utils.GenericTestBase): <NEW_LINE> <INDENT> def test_get_export_policy(self) -> None: <NEW_LINE> <INDENT> expexted_export_policy_dict = { 'story_id': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'created_on': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'last_updated': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'deleted': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'commit_cmds': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'commit_message': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'commit_type': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'post_commit_community_owned': ( base_models.EXPORT_POLICY.NOT_APPLICABLE), 'post_commit_is_private': ( base_models.EXPORT_POLICY.NOT_APPLICABLE), 'post_commit_status': ( base_models.EXPORT_POLICY.NOT_APPLICABLE), 'user_id': base_models.EXPORT_POLICY.NOT_APPLICABLE, 'version': base_models.EXPORT_POLICY.NOT_APPLICABLE, } <NEW_LINE> self.assertEqual( story_models.StoryCommitLogEntryModel.get_export_policy(), expexted_export_policy_dict) <NEW_LINE> <DEDENT> def test_get_model_association_to_user(self) -> None: <NEW_LINE> <INDENT> self.assertEqual( story_models.StoryCommitLogEntryModel. get_model_association_to_user(), base_models.MODEL_ASSOCIATION_TO_USER.NOT_CORRESPONDING_TO_USER) <NEW_LINE> <DEDENT> def test_has_reference_to_user_id(self) -> None: <NEW_LINE> <INDENT> commit = story_models.StoryCommitLogEntryModel.create( 'b', 0, 'committer_id', 'msg', 'create', [{}], constants.ACTIVITY_STATUS_PUBLIC, False) <NEW_LINE> commit.story_id = 'b' <NEW_LINE> commit.update_timestamps() <NEW_LINE> commit.put() <NEW_LINE> self.assertTrue( story_models.StoryCommitLogEntryModel .has_reference_to_user_id('committer_id')) <NEW_LINE> self.assertFalse( story_models.StoryCommitLogEntryModel .has_reference_to_user_id('x_id'))
|
Test the StoryCommitLogEntryModel class.
|
6259904c07d97122c42180b1
|
class EmailForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = EmailMailing <NEW_LINE> fields = ("email",)
|
Форма emaila
|
6259904cd6c5a102081e352b
|
class TestRouterInit(RouterTestCase): <NEW_LINE> <INDENT> def test_sets_default_route_to_not_found_handler(self): <NEW_LINE> <INDENT> default = Route(path=None, endpoint=not_found) <NEW_LINE> verify(self.routes.setdefault).called_with(default)
|
Router()
|
6259904c82261d6c527308cd
|
class PrometheusScrapeTarget(ops.framework.Object): <NEW_LINE> <INDENT> relation_name: str = None <NEW_LINE> def __init__(self, charm: ops.charm.CharmBase, relation_name: str): <NEW_LINE> <INDENT> super().__init__(charm, relation_name) <NEW_LINE> self.relation_name = relation_name <NEW_LINE> <DEDENT> def publish_info( self, hostname: str, port: str, metrics_path: str, scrape_interval: str, scrape_timeout: str, ) -> NoReturn: <NEW_LINE> <INDENT> if self.framework.model.unit.is_leader(): <NEW_LINE> <INDENT> for relation in self.framework.model.relations[self.relation_name]: <NEW_LINE> <INDENT> relation.data[self.framework.model.app]["hostname"] = hostname <NEW_LINE> relation.data[self.framework.model.app]["port"] = port <NEW_LINE> relation.data[self.framework.model.app]["metrics_path"] = metrics_path <NEW_LINE> relation.data[self.framework.model.app][ "scrape_interval" ] = scrape_interval <NEW_LINE> relation.data[self.framework.model.app][ "scrape_timeout" ] = scrape_timeout
|
Provides side of a Prometheus Scrape endpoint
|
6259904cd53ae8145f91986f
|
class ModifyBackupNameRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.InstanceId = None <NEW_LINE> self.BackupId = None <NEW_LINE> self.BackupName = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.InstanceId = params.get("InstanceId") <NEW_LINE> self.BackupId = params.get("BackupId") <NEW_LINE> self.BackupName = params.get("BackupName")
|
ModifyBackupName请求参数结构体
|
6259904cd53ae8145f919870
|
class DBRouter: <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'store': <NEW_LINE> <INDENT> return 'db_store' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'warehouse': <NEW_LINE> <INDENT> return 'db_warehouse' <NEW_LINE> <DEDENT> return 'default' <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'store': <NEW_LINE> <INDENT> return 'db_store' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'warehouse': <NEW_LINE> <INDENT> return 'db_warehouse' <NEW_LINE> <DEDENT> return 'default' <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._meta.app_label == 'store' or obj2._meta.app_label == 'store': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'store' not in [obj1._meta.app_label, obj2._meta.app_label]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif obj1._meta.app_label == 'warehouse' or obj2._meta.app_label == 'warehouse': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'warehouse' not in [obj1._meta.app_label, obj2._meta.app_label]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
|
A router to control all database operations on models in the
store and warehouse applications.
|
6259904c711fe17d825e16a5
|
class Task( namedtuple('Task', ( 'func args caller_id exception_handler should_return_results arg_checker ' 'fail_on_error'))): <NEW_LINE> <INDENT> pass
|
Task class representing work to be completed.
Args:
func: The function to be executed.
args: The arguments to func.
caller_id: The globally-unique caller ID corresponding to the Apply call.
exception_handler: The exception handler to use if the call to func fails.
should_return_results: True iff the results of this function should be
returned from the Apply call.
arg_checker: Used to determine whether we should process the current
argument or simply skip it. Also handles any logging that
is specific to a particular type of argument.
fail_on_error: If true, then raise any exceptions encountered when
executing func. This is only applicable in the case of
process_count == thread_count == 1.
|
6259904cac7a0e7691f738e9
|
class MinimalIfError(InterpreterError): <NEW_LINE> <INDENT> pass
|
Raised when the top of stack is not boolean processing OP_IF or OP_NOTIF.
|
6259904c07f4c71912bb0843
|
class Groups(list): <NEW_LINE> <INDENT> def append(self, value): <NEW_LINE> <INDENT> if isinstance(value, basestring): <NEW_LINE> <INDENT> if not list.__contains__(self, value): <NEW_LINE> <INDENT> list.append(self, value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise GroupException("Only strings can be used as list names") <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, i, y): <NEW_LINE> <INDENT> if isinstance(y, basestring): <NEW_LINE> <INDENT> list.__setitem__(self, i, y) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise GroupException("Only strings can be used as list names") <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Groups): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if (list.sort(self) == other.sort()): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if other is None or not isinstance(other, Groups): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if (list.sort(self) == other.sort()): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
|
A list of strings used to identify associations that an element might
have. Enforces that all elements must be strings, and that
the same element cannot be provided more than once.
>>> g = Groups()
>>> g.append("hello")
>>> g[0]
'hello'
>>> g.append("hello") # not added as already present
>>> len(g)
1
>>> g
['hello']
>>> g.append(5)
Traceback (most recent call last):
GroupException: Only strings can be used as list names
|
6259904cd7e4931a7ef3d486
|
class CitationDictsDataCacher(DataCacher): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> def fill(): <NEW_LINE> <INDENT> alldicts = {} <NEW_LINE> from invenio.bibrank_tag_based_indexer import fromDB <NEW_LINE> redis = get_redis() <NEW_LINE> serialized_weights = redis.get('citations_weights') <NEW_LINE> if serialized_weights: <NEW_LINE> <INDENT> weights = deserialize_via_marshal(serialized_weights) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weights = fromDB('citation') <NEW_LINE> <DEDENT> alldicts['citations_weights'] = weights <NEW_LINE> alldicts['citations_keys'] = intbitset(weights.keys()) <NEW_LINE> alldicts['citations_counts'] = [t for t in weights.iteritems()] <NEW_LINE> alldicts['citations_counts'].sort(key=itemgetter(1), reverse=True) <NEW_LINE> serialized_weights = redis.get('selfcites_weights') <NEW_LINE> if serialized_weights: <NEW_LINE> <INDENT> selfcites = deserialize_via_marshal(serialized_weights) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> selfcites = fromDB('selfcites') <NEW_LINE> <DEDENT> selfcites_weights = {} <NEW_LINE> for recid, counts in alldicts['citations_counts']: <NEW_LINE> <INDENT> selfcites_weights[recid] = counts - selfcites.get(recid, 0) <NEW_LINE> <DEDENT> alldicts['selfcites_weights'] = selfcites_weights <NEW_LINE> alldicts['selfcites_counts'] = [(recid, selfcites_weights.get(recid, cites)) for recid, cites in alldicts['citations_counts']] <NEW_LINE> alldicts['selfcites_counts'].sort(key=itemgetter(1), reverse=True) <NEW_LINE> return alldicts <NEW_LINE> <DEDENT> def cache_filler(): <NEW_LINE> <INDENT> self.cache = None <NEW_LINE> return fill() <NEW_LINE> <DEDENT> from invenio.bibrank_tag_based_indexer import get_lastupdated <NEW_LINE> def timestamp_verifier(): <NEW_LINE> <INDENT> citation_lastupdate = get_lastupdated('citation') <NEW_LINE> if citation_lastupdate: <NEW_LINE> <INDENT> return citation_lastupdate.strftime("%Y-%m-%d %H:%M:%S") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "0000-00-00 00:00:00" <NEW_LINE> <DEDENT> <DEDENT> DataCacher.__init__(self, cache_filler, timestamp_verifier)
|
Cache holding all citation dictionaries (citationdict,
reversedict, selfcitdict, selfcitedbydict).
|
6259904cbaa26c4b54d506b9
|
class NuSTAR(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._ref_epoch = Time('2010-01-01T00:00:00', format='fits', scale='utc') <NEW_LINE> self._raw_pixel = 604.8 * u.micron <NEW_LINE> self._pixel_um = self._raw_pixel / 5. <NEW_LINE> self._pixel = 2.54 * u.arcsec <NEW_LINE> self._launch = Time('2012-06-13T00:00:00') <NEW_LINE> self._tick = 16./14745600. <NEW_LINE> <DEDENT> @property <NEW_LINE> def launch(self): <NEW_LINE> <INDENT> return self._launch <NEW_LINE> <DEDENT> @property <NEW_LINE> def ref_epoch(self): <NEW_LINE> <INDENT> return self._ref_epoch <NEW_LINE> <DEDENT> @property <NEW_LINE> def pixel(self): <NEW_LINE> <INDENT> return self._pixel <NEW_LINE> <DEDENT> @property <NEW_LINE> def pixel_um(self): <NEW_LINE> <INDENT> return self._pixel_um <NEW_LINE> <DEDENT> @property <NEW_LINE> def tick(self): <NEW_LINE> <INDENT> return self._tick <NEW_LINE> <DEDENT> def time_to_met(self, time): <NEW_LINE> <INDENT> met = (time.tt - self.ref_epoch).sec <NEW_LINE> return met <NEW_LINE> <DEDENT> def met_to_time(self, met): <NEW_LINE> <INDENT> from numpy import asarray <NEW_LINE> foo = asarray(met) <NEW_LINE> if foo.ndim == 0: <NEW_LINE> <INDENT> assert isinstance(met, float), "met_to_time: met must be a float" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert isinstance(met[0], float), "met_to_time: met must be a float" <NEW_LINE> <DEDENT> this_time = TimeDelta(met, format='sec', scale ='tt') + self.ref_epoch <NEW_LINE> return this_time <NEW_LINE> <DEDENT> def rate_conversion(self, rate, incident=False): <NEW_LINE> <INDENT> if incident is True: <NEW_LINE> <INDENT> result = rate / (1.0 + rate * 2.5e-3) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = rate / (1.0 - rate * 2.5e-3) <NEW_LINE> <DEDENT> return result
|
Class for holding constant attributes about NuSTAR and for time conversion from
MET to 'TIME' objects and back again
|
6259904c8da39b475be045ff
|
class SpatialFieldPrimitiveHideMode(Enum, IComparable, IFormattable, IConvertible): <NEW_LINE> <INDENT> def __eq__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __format__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ge__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __gt__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __le__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __lt__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ne__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __reduce_ex__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Default = None <NEW_LINE> HideNone = None <NEW_LINE> HideOnlyReference = None <NEW_LINE> HideWholeElement = None <NEW_LINE> value__ = None
|
Defines modes which can be used by a SpatialFieldPrimitive to hide the original referenced element.
enum SpatialFieldPrimitiveHideMode, values: Default (0), HideNone (1), HideOnlyReference (2), HideWholeElement (3)
|
6259904c4e696a045264e828
|
class Plugin(object): <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def instance(cls, obj): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return _instances[cls][obj] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> instances = _instances.setdefault(cls, weakref.WeakKeyDictionary()) <NEW_LINE> result = instances[obj] = cls.__new__(cls, obj) <NEW_LINE> result._parent = weakref.ref(obj) <NEW_LINE> result.__init__(obj) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def instances(cls): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return _instances[cls].values() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return ()
|
Base class for plugins.
A Plugin is coupled to another object and is automatically garbage collected
as soon as the other object disappears.
Use the instance() class method to get/create the Plugin instance for an object.
Implement the __init__() method if you want to do some setup.
The instances() class method returns all living instances of this plugin type.
|
6259904c3eb6a72ae038ba6b
|
class TimezoneField(fields.String): <NEW_LINE> <INDENT> pass
|
Schema for timezone
|
6259904c3c8af77a43b68945
|
class DefaultOrchestratorInfo(NodesFilterMixin, BaseHandler): <NEW_LINE> <INDENT> _serializer = None <NEW_LINE> @content_json <NEW_LINE> def GET(self, cluster_id): <NEW_LINE> <INDENT> cluster = self.get_object_or_404(objects.Cluster, cluster_id) <NEW_LINE> nodes = self.get_nodes(cluster) <NEW_LINE> return self._serializer.serialize( cluster, nodes, ignore_customized=True)
|
Base class for default orchestrator data.
Need to redefine serializer variable
|
6259904c50485f2cf55dc39b
|
class Monitoring(Service, Singletone): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Monitoring, self).__init__() <NEW_LINE> self.listeners = [] <NEW_LINE> self.clients = [] <NEW_LINE> self.client_classes = { 'server-agent': ServerAgentClient, 'graphite': GraphiteClient, 'local': LocalClient, } <NEW_LINE> <DEDENT> def add_listener(self, listener): <NEW_LINE> <INDENT> assert isinstance(listener, MonitoringListener) <NEW_LINE> self.listeners.append(listener) <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> super(Monitoring, self).prepare() <NEW_LINE> clients = (param for param in self.parameters if param not in ('run-at', 'module')) <NEW_LINE> for client_name in clients: <NEW_LINE> <INDENT> if client_name in self.client_classes: <NEW_LINE> <INDENT> client_class = self.client_classes[client_name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.warning('Unknown monitoring found: %s', client_name) <NEW_LINE> continue <NEW_LINE> <DEDENT> for config in self.parameters.get(client_name, []): <NEW_LINE> <INDENT> label = config.get('label', None) <NEW_LINE> if client_name == 'local': <NEW_LINE> <INDENT> if any([client for client in self.clients if isinstance(client, self.client_classes[client_name])]): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(self.parameters.get(client_name, [])) > 1: <NEW_LINE> <INDENT> self.log.warning('LocalMonitoring client found twice, configs will be joined') <NEW_LINE> <DEDENT> config = BetterDict() <NEW_LINE> for cfg in self.parameters.get(client_name, []): <NEW_LINE> <INDENT> config.merge(cfg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> client = client_class(self.log, label, config, self.engine) <NEW_LINE> self.clients.append(client) <NEW_LINE> client.connect() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def startup(self): <NEW_LINE> <INDENT> for client in self.clients: <NEW_LINE> <INDENT> client.start() <NEW_LINE> <DEDENT> super(Monitoring, self).startup() <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for client in self.clients: <NEW_LINE> <INDENT> results.extend(client.get_data()) <NEW_LINE> <DEDENT> if results: <NEW_LINE> <INDENT> for listener in self.listeners: <NEW_LINE> <INDENT> listener.monitoring_data(results) <NEW_LINE> <DEDENT> <DEDENT> return super(Monitoring, self).check() <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> for client in self.clients: <NEW_LINE> <INDENT> client.disconnect() <NEW_LINE> <DEDENT> super(Monitoring, self).shutdown() <NEW_LINE> <DEDENT> def get_widget(self): <NEW_LINE> <INDENT> widget = MonitoringWidget() <NEW_LINE> self.add_listener(widget) <NEW_LINE> return widget
|
:type clients: list[ServerAgentClient]
:type listeners: list[MonitoringListener]
|
6259904c097d151d1a2c247e
|
class ZoomOAuth2(BaseOAuth2): <NEW_LINE> <INDENT> name = 'zoom-oauth2' <NEW_LINE> AUTHORIZATION_URL = 'https://zoom.us/oauth/authorize' <NEW_LINE> ACCESS_TOKEN_URL = 'https://zoom.us/oauth/token' <NEW_LINE> USER_DETAILS_URL = 'https://api.zoom.us/v2/users/me' <NEW_LINE> DEFAULT_SCOPE = ['user:read'] <NEW_LINE> ACCESS_TOKEN_METHOD = 'POST' <NEW_LINE> REFRESH_TOKEN_METHOD = 'POST' <NEW_LINE> REDIRECT_STATE = False <NEW_LINE> EXTRA_DATA = [ ('expires_in', 'expires') ] <NEW_LINE> def user_data(self, access_token, *args, **kwargs): <NEW_LINE> <INDENT> response = self.get_json( self.USER_DETAILS_URL, headers={ 'Authorization': 'Bearer {access_token}'.format( access_token=access_token ) } ) <NEW_LINE> return response <NEW_LINE> <DEDENT> def get_user_details(self, response): <NEW_LINE> <INDENT> username = response.get('id', '') <NEW_LINE> first_name = response.get('first_name', '') <NEW_LINE> last_name = response.get('last_name', '') <NEW_LINE> email = response.get('email', '') <NEW_LINE> fullname = '' <NEW_LINE> return { 'username': username, 'email': email, 'fullname': fullname, 'first_name': first_name, 'last_name': last_name, } <NEW_LINE> <DEDENT> def auth_complete_params(self, state=None): <NEW_LINE> <INDENT> return { 'grant_type': 'authorization_code', 'code': self.data.get('code', ''), 'redirect_uri': self.get_redirect_uri(state), } <NEW_LINE> <DEDENT> def auth_headers(self): <NEW_LINE> <INDENT> return { 'Authorization': b'Basic ' + base64.urlsafe_b64encode( '{}:{}'.format(*self.get_key_and_secret()).encode() ) } <NEW_LINE> <DEDENT> def refresh_token_params(self, token, *args, **kwargs): <NEW_LINE> <INDENT> return {'refresh_token': token, 'grant_type': 'refresh_token'}
|
Zoom OAuth2 authentication backend
Doc Reference: https://marketplace.zoom.us/docs/guides/auth/oauth
|
6259904cb5575c28eb7136d1
|
class TBufferedTransport(TTransportBase): <NEW_LINE> <INDENT> DEFAULT_BUFFER = 4096 <NEW_LINE> def __init__(self, trans, rbuf_size=DEFAULT_BUFFER): <NEW_LINE> <INDENT> self.__trans = trans <NEW_LINE> self.__wbuf = BytesIO() <NEW_LINE> self.__rbuf = BytesIO(b"") <NEW_LINE> self.__rbuf_size = rbuf_size <NEW_LINE> <DEDENT> def isOpen(self): <NEW_LINE> <INDENT> return self.__trans.isOpen() <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> return self.__trans.open() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> return self.__trans.close() <NEW_LINE> <DEDENT> def _read(self, sz): <NEW_LINE> <INDENT> ret = self.__rbuf.read(sz) <NEW_LINE> if len(ret) != 0: <NEW_LINE> <INDENT> return ret <NEW_LINE> <DEDENT> self.__rbuf = BytesIO(self.__trans.read(max(sz, self.__rbuf_size))) <NEW_LINE> return self.__rbuf.read(sz) <NEW_LINE> <DEDENT> def write(self, buf): <NEW_LINE> <INDENT> self.__wbuf.write(buf) <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> out = self.__wbuf.getvalue() <NEW_LINE> self.__wbuf = BytesIO() <NEW_LINE> self.__trans.write(out) <NEW_LINE> self.__trans.flush()
|
Class that wraps another transport and buffers its I/O.
The implementation uses a (configurable) fixed-size read buffer
but buffers all writes until a flush is performed.
|
6259904c23849d37ff8524cd
|
class define(object): <NEW_LINE> <INDENT> def __init__(self, _name, **kwargs): <NEW_LINE> <INDENT> self.name = _name <NEW_LINE> if _name in Forge._registry: <NEW_LINE> <INDENT> raise DuplicateFactoryError <NEW_LINE> <DEDENT> Forge._registry[_name] = dict(**kwargs) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.factory = OpenStruct(**Forge._registry[self.name]) <NEW_LINE> return self.factory <NEW_LINE> <DEDENT> def __exit__(self, type, value, tb): <NEW_LINE> <INDENT> Forge._registry[self.name] = self.factory.__dict__
|
Defines a factory with default attributes.
**Parameters**:
* `_name`: Name representing the factory you're defining.
* `kwargs`: Default attributes to set when building this factory.
**Example**:
::
# Forge.define as a method:
Forge.define('user', name='Frankenstein')
# Forge.define using `with`
with Forge.define('user') as f:
f.name = 'Frankenstein'
|
6259904c45492302aabfd8e3
|
class Cube(module3d.Object3D): <NEW_LINE> <INDENT> def __init__(self, width, height=0, depth=0, texture=None): <NEW_LINE> <INDENT> module3d.Object3D.__init__(self, 'cube_%s' % texture) <NEW_LINE> self.width = width <NEW_LINE> self.height = height or width <NEW_LINE> self.depth = depth or width <NEW_LINE> fg = self.createFaceGroup('cube') <NEW_LINE> v = [(x,y,z) for z in [0,self.depth] for y in [0,self.height] for x in [0,self.width]] <NEW_LINE> uv = ([0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0]) <NEW_LINE> f = [ (4, 5, 6, 7), (1, 0, 3, 2), (0, 4, 7, 3), (5, 1, 2, 6), (0, 1, 5, 4), (7, 6, 2, 3), ] <NEW_LINE> self.setCoords(v) <NEW_LINE> self.setUVs(uv) <NEW_LINE> self.setFaces(f, fg.idx) <NEW_LINE> self.setCameraProjection(0) <NEW_LINE> self.updateIndexBuffer() <NEW_LINE> <DEDENT> def resize(self, width, height, depth): <NEW_LINE> <INDENT> v = [(x,y,z) for z in [0,depth] for y in [0,height] for x in [0,width]] <NEW_LINE> self.changeCoords(v) <NEW_LINE> self.update()
|
A cube.
:param width: The width.
:type width: int or float
:param height: The height, if 0 it will be equal to width.
:type height: int or float
:param depth: The depth, if 0 it will be equal to width.
:type depth: int or float
:param texture: The texture.
:type texture: str
|
6259904ca8ecb03325872622
|
class OpQueryExports(OpCode): <NEW_LINE> <INDENT> OP_ID = "OP_BACKUP_QUERY" <NEW_LINE> __slots__ = ["nodes", "use_locking"]
|
Compute the list of exported images.
|
6259904c3cc13d1c6d466b49
|
class PluginInfo(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self._filename = filename <NEW_LINE> self._parser = DesktopParser() <NEW_LINE> self._parser.read(filename) <NEW_LINE> if not self._parser.has_section('Gazpacho Plugin'): <NEW_LINE> <INDENT> msg = "The plugin file %s should have a [Gazpacho Plugin] section" <NEW_LINE> raise PluginInfoError(msg % filename) <NEW_LINE> <DEDENT> self.name = self._read_value('name') <NEW_LINE> self.title = self._read_value('title', self.name) <NEW_LINE> self.class_name = self._read_value('class') <NEW_LINE> self.description = self._read_value('description', _('No description available')) <NEW_LINE> self.author = self._read_value('author', _('No author available')) <NEW_LINE> self.version = self._read_value('version', _('No version available')) <NEW_LINE> <DEDENT> def _read_value(self, key, default=None): <NEW_LINE> <INDENT> ret = None <NEW_LINE> try: <NEW_LINE> <INDENT> ret = self._parser.get('Gazpacho Plugin', key) <NEW_LINE> <DEDENT> except ConfigParser.NoOptionError: <NEW_LINE> <INDENT> if default: <NEW_LINE> <INDENT> ret = default <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = ("The plugin file %s should have a %s option" % (self._filename, key)) <NEW_LINE> raise PluginInfoError(msg) <NEW_LINE> <DEDENT> <DEDENT> return ret
|
This class parses and stores the metada of a .plugin file for a Plugin.
The format of such files is:
[Gazpacho Plugin]
name = plugin_name
title = short human readable string
class = plugin.dotted.class.name
description = text description
author = author name and e-mail
version = plugin version
|
6259904c0c0af96317c57769
|
@method_decorator(login_required, name='dispatch') <NEW_LINE> class ShopGroups(ListView): <NEW_LINE> <INDENT> template_name = 'shop_groups.html' <NEW_LINE> model = Group <NEW_LINE> context_object_name = 'groups' <NEW_LINE> paginate_by = 12 <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> groups = Group.objects.all() <NEW_LINE> category = self.request.GET.get('category', '1') <NEW_LINE> groups = groups.filter(category=category) <NEW_LINE> return groups <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> if 'purchase_id' in self.request.session: <NEW_LINE> <INDENT> context['basket_products_count'] = InvoiceLine.objects.filter(purchase=self.request.session.get('purchase_id')).count() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> context['basket_products_count'] = 0 <NEW_LINE> <DEDENT> context['groups_count'] = self.get_queryset().count() <NEW_LINE> context['categories'] = [(category.id, category.name) for category in Category.objects.all()] <NEW_LINE> context['groups'] = [(group.id, group.name) for group in Group.objects.all()] <NEW_LINE> context['brands'] = [(brand.id, brand.name) for brand in Brand.objects.all()] <NEW_LINE> return context
|
ShopGroups - view for shop template with product groups of a category
|
6259904cd7e4931a7ef3d488
|
class IoThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> global stopFlag, utcTime <NEW_LINE> with serial.Serial(os.getenv("SERIAL_PORT"), baudrate=os.getenv("SERIAL_BAUD"), timeout=1) as ser: <NEW_LINE> <INDENT> for i in range(0,5): <NEW_LINE> <INDENT> ser.readline() <NEW_LINE> <DEDENT> while stopFlag == False: <NEW_LINE> <INDENT> sioMesage = ser.readline().decode('ascii') <NEW_LINE> startTime = time.perf_counter() <NEW_LINE> while sioMesage.split(',')[0] != os.getenv("NMEA_TYPE"): <NEW_LINE> <INDENT> sioMesage = ser.readline().decode('ascii') <NEW_LINE> startTime = time.perf_counter() <NEW_LINE> <DEDENT> utcTime.setTime(utcFromGps(sioMesage.replace("\r\n",""), NmeaGpsMessages(os.getenv("NMEA_TYPE"))), startTime)
|
I/O Thread for server
This thread handles input and output from the NTP
Server. This includes both the Serial and optional
display
|
6259904c711fe17d825e16a6
|
class Operation(object): <NEW_LINE> <INDENT> reversible = True <NEW_LINE> reduces_to_sql = True <NEW_LINE> atomic = False <NEW_LINE> serialization_expand_args = [] <NEW_LINE> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> self = object.__new__(cls) <NEW_LINE> self._constructor_args = (args, kwargs) <NEW_LINE> return self <NEW_LINE> <DEDENT> def deconstruct(self): <NEW_LINE> <INDENT> return ( self.__class__.__name__, self._constructor_args[0], self._constructor_args[1], ) <NEW_LINE> <DEDENT> def state_forwards(self, app_label, state): <NEW_LINE> <INDENT> raise NotImplementedError('subclasses of Operation must provide a state_forwards() method') <NEW_LINE> <DEDENT> def database_forwards(self, app_label, schema_editor, from_state, to_state): <NEW_LINE> <INDENT> raise NotImplementedError('subclasses of Operation must provide a database_forwards() method') <NEW_LINE> <DEDENT> def database_backwards(self, app_label, schema_editor, from_state, to_state): <NEW_LINE> <INDENT> raise NotImplementedError('subclasses of Operation must provide a database_backwards() method') <NEW_LINE> <DEDENT> def describe(self): <NEW_LINE> <INDENT> return "%s: %s" % (self.__class__.__name__, self._constructor_args) <NEW_LINE> <DEDENT> def references_model(self, name, app_label=None): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def references_field(self, model_name, name, app_label=None): <NEW_LINE> <INDENT> return self.references_model(model_name, app_label) <NEW_LINE> <DEDENT> def allowed_to_migrate(self, connection_alias, model): <NEW_LINE> <INDENT> return ( not model._meta.proxy and not model._meta.swapped and model._meta.managed and router.allow_migrate(connection_alias, model) ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s %s%s>" % ( self.__class__.__name__, ", ".join(map(repr, self._constructor_args[0])), ",".join(" %s=%r" % x for x in self._constructor_args[1].items()), )
|
Base class for migration operations.
It's responsible for both mutating the in-memory model state
(see db/migrations/state.py) to represent what it performs, as well
as actually performing it against a live database.
Note that some operations won't modify memory state at all (e.g. data
copying operations), and some will need their modifications to be
optionally specified by the user (e.g. custom Python code snippets)
Due to the way this class deals with deconstruction, it should be
considered immutable.
|
6259904cac7a0e7691f738eb
|
class Hand(): <NEW_LINE> <INDENT> def __init__(self, PCC, PlayerC): <NEW_LINE> <INDENT> self.PCC = PCC <NEW_LINE> self.PlayerC = PlayerC <NEW_LINE> <DEDENT> def FromPCToPlayer(self, PCC, PlayerC): <NEW_LINE> <INDENT> temp0 = PCC[len(PCC) - 1] <NEW_LINE> PlayerC.insert(0,temp0) <NEW_LINE> temp0 = PlayerC[len(PlayerC) - 1] <NEW_LINE> PlayerC.insert(0,temp0) <NEW_LINE> return PlayerC <NEW_LINE> <DEDENT> def FromPlayerToPC(self, PCC, PlayerC): <NEW_LINE> <INDENT> temp = PlayerC[len(PlayerC) - 1] <NEW_LINE> PCC.insert(0, temp) <NEW_LINE> temp = PCC[len(PCC) - 1] <NEW_LINE> PCC.insert(0, temp) <NEW_LINE> return PCC <NEW_LINE> <DEDENT> def DeleteLast(self, PCC): <NEW_LINE> <INDENT> helpArray = '' <NEW_LINE> for item in range(0, len(PCC) - 1): <NEW_LINE> <INDENT> helpArray = helpArray + ' ' + PCC[item] + " " <NEW_LINE> <DEDENT> return helpArray.split()
|
This is the Hand class. Each player has a Hand, and can add or remove
cards from that hand. There should be an add and remove card method here.
|
6259904ce64d504609df9dd8
|
class IdentityServicer(object): <NEW_LINE> <INDENT> def CreateUser(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def GetUser(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def UpdateUser(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def DeleteUser(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def ListUsers(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
|
A simple identity service.
|
6259904c8e71fb1e983bced7
|
class BaseRecipeViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.CreateModelMixin): <NEW_LINE> <INDENT> authentication_classes = TokenAuthentication, <NEW_LINE> permission_classes = IsAuthenticated, <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> assigned_only = bool(self.request.query_params.get('assigned_only')) <NEW_LINE> queryset = self.queryset <NEW_LINE> if assigned_only: <NEW_LINE> <INDENT> return queryset.filter(recipe__isnull=False) <NEW_LINE> <DEDENT> return queryset.filter(user=self.request.user).order_by('-name') <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user)
|
Base viewset for user owned recipe attributes
|
6259904c6fece00bbacccdca
|
class digest_deregister_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = RteReturn() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('digest_deregister_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- success
|
6259904c30c21e258be99c17
|
class FileSignalHandlerTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.signal_file = './foo.txt' <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.signal_file) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_file_signal_handler(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, file_signal_handler.FileSignalHandler, '/foo/bar/foobar.txt') <NEW_LINE> <DEDENT> def test_wait(self): <NEW_LINE> <INDENT> handler = file_signal_handler.FileSignalHandler(self.signal_file) <NEW_LINE> wait_interval = 1. <NEW_LINE> poll_interval = 2.053 <NEW_LINE> self.assertRaises(ValueError, handler.wait, wait_interval, poll_interval) <NEW_LINE> wait_interval = 3.1 <NEW_LINE> elapsed_time = handler.wait(wait_interval, poll_interval) <NEW_LINE> self.assertAlmostEqual(elapsed_time, wait_interval)
|
Test case class for FileSignalHandler class
|
6259904cd6c5a102081e352e
|
class FCSEIDR(AbstractRegister): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(FCSEIDR, self).__init__() <NEW_LINE> <DEDENT> def set_pid(self, pid): <NEW_LINE> <INDENT> self.value[0:7] = pid <NEW_LINE> <DEDENT> def get_pid(self): <NEW_LINE> <INDENT> return self.value[0:7]
|
FCSE Process ID Register
|
6259904cb830903b9686ee83
|
class Database: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.guilds = {} <NEW_LINE> <DEDENT> async def setup(self) -> None: <NEW_LINE> <INDENT> self.pool = await create_pool( host=getenv("DB_HOST", "127.0.0.1"), port=getenv("DB_PORT", 5432), database=getenv("DB_DATABASE", "magoji"), user=getenv("DB_USER", "root"), password=getenv("DB_PASS", "password"), ) <NEW_LINE> <DEDENT> async def execute(self, query: str, *args: Any) -> None: <NEW_LINE> <INDENT> async with self.pool.acquire() as conn: <NEW_LINE> <INDENT> await conn.execute(query, *args) <NEW_LINE> <DEDENT> <DEDENT> async def fetchrow(self, query: str, *args: Any) -> Record: <NEW_LINE> <INDENT> async with self.pool.acquire() as conn: <NEW_LINE> <INDENT> return await conn.fetchrow(query, *args) <NEW_LINE> <DEDENT> <DEDENT> async def fetch(self, query: str, *args: Any) -> List[Record]: <NEW_LINE> <INDENT> async with self.pool.acquire() as conn: <NEW_LINE> <INDENT> return await conn.fetch(query, *args) <NEW_LINE> <DEDENT> <DEDENT> async def create_guild( self, id: int, prefix: str = ">", config: str = "{}" ) -> None: <NEW_LINE> <INDENT> await self.execute( "INSERT INTO Guilds (id, prefix, config) VALUES ($1, $2, $3);", id, prefix, config, ) <NEW_LINE> <DEDENT> async def update_guild_prefix(self, id: int, prefix: str) -> None: <NEW_LINE> <INDENT> if not await self.fetch_guild(id): <NEW_LINE> <INDENT> return await self.create_guild(id, prefix) <NEW_LINE> <DEDENT> if id in self.guilds: <NEW_LINE> <INDENT> del self.guilds[id] <NEW_LINE> <DEDENT> await self.execute("UPDATE Guilds SET prefix = $1 WHERE id = $2;", prefix, id) <NEW_LINE> <DEDENT> async def fetch_guild(self, id: int) -> Record: <NEW_LINE> <INDENT> if id in self.guilds: <NEW_LINE> <INDENT> return self.guilds[id] <NEW_LINE> <DEDENT> data = await self.fetchrow("SELECT * FROM Guilds WHERE id = $1;", id) <NEW_LINE> self.guilds[id] = data <NEW_LINE> return data <NEW_LINE> <DEDENT> async def fetch_cases(self, userid: int, guildid: int) -> List[Record]: <NEW_LINE> <INDENT> return await self.fetch( "SELECT * FROM Cases WHERE userid = $1 AND guildid = $2 ORDER BY created_at;", userid, guildid, ) <NEW_LINE> <DEDENT> async def update_config(self, id: int, config: str) -> None: <NEW_LINE> <INDENT> if not await self.fetch_guild(id): <NEW_LINE> <INDENT> return await self.create_guild(id, config=config) <NEW_LINE> <DEDENT> if id in self.guilds: <NEW_LINE> <INDENT> del self.guilds[id] <NEW_LINE> <DEDENT> await self.execute("UPDATE Guilds SET config = $1 WHERE id = $2;", config, id)
|
A database interface for the bot to connect to Postgres.
|
6259904c097d151d1a2c2480
|
class NodeVisitor: <NEW_LINE> <INDENT> def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]": <NEW_LINE> <INDENT> return getattr(self, f"visit_{type(node).__name__}", None) <NEW_LINE> <DEDENT> def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: <NEW_LINE> <INDENT> f = self.get_visitor(node) <NEW_LINE> if f is not None: <NEW_LINE> <INDENT> return f(node, *args, **kwargs) <NEW_LINE> <DEDENT> return self.generic_visit(node, *args, **kwargs) <NEW_LINE> <DEDENT> def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: <NEW_LINE> <INDENT> for node in node.iter_child_nodes(): <NEW_LINE> <INDENT> self.visit(node, *args, **kwargs)
|
Walks the abstract syntax tree and call visitor functions for every
node found. The visitor functions may return values which will be
forwarded by the `visit` method.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `get_visitor` function. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
|
6259904c1f037a2d8b9e5275
|
class PilotDetail(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Pilot.objects.all() <NEW_LINE> serializer_class = PilotSerializer <NEW_LINE> name = 'pilot-detail' <NEW_LINE> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,)
|
飞行员详情
|
6259904cf7d966606f7492c1
|
class BeamSearchState(object): <NEW_LINE> <INDENT> def __init__(self, token_list, finished, decoder_input, hidden, probability=0.0): <NEW_LINE> <INDENT> super(BeamSearchState, self).__init__() <NEW_LINE> self.token_list = token_list <NEW_LINE> self.finished = finished <NEW_LINE> self.decoder_input = decoder_input <NEW_LINE> self.hidden = hidden <NEW_LINE> self.probability = probability <NEW_LINE> <DEDENT> def initInputVariable(self, token): <NEW_LINE> <INDENT> return Variable(LongTensor([[token]])) <NEW_LINE> <DEDENT> def advance(self, decoder, encoder_outputs, EOS_token): <NEW_LINE> <INDENT> decoder_output, hidden = decoder(self.decoder_input, self.hidden, encoder_outputs) <NEW_LINE> sorted_probabilities, sorted_predictions = decoder_output.data.topk(2) <NEW_LINE> retval = [] <NEW_LINE> for np, ni in zip(sorted_probabilities[0], sorted_predictions[0]): <NEW_LINE> <INDENT> if ni == EOS_token: <NEW_LINE> <INDENT> retval.append(BeamSearchState(self.token_list, True, self.initInputVariable(ni), hidden, self.probability + np)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> retval.append(BeamSearchState(self.token_list + [ni], False, self.initInputVariable(ni), hidden, self.probability + np)) <NEW_LINE> <DEDENT> <DEDENT> return retval
|
docstring for BeamSearchState
|
6259904c1f5feb6acb164008
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.