code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class AddressChangeStatusView(generic.RedirectView): <NEW_LINE> <INDENT> url = reverse_lazy('customer:address-list') <NEW_LINE> permanent = False <NEW_LINE> def get(self, request, pk=None, action=None, *args, **kwargs): <NEW_LINE> <INDENT> address = get_object_or_404(UserAddress, user=self.request.user, pk=pk) <NEW_LINE> if address.country.is_shipping_country: <NEW_LINE> <INDENT> setattr(address, 'is_%s' % action, True) <NEW_LINE> <DEDENT> elif action == 'default_for_billing': <NEW_LINE> <INDENT> setattr(address, 'is_default_for_billing', True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, _('We do not ship to this country')) <NEW_LINE> <DEDENT> address.save() <NEW_LINE> return super(AddressChangeStatusView, self).get( request, *args, **kwargs)
|
Sets an address as default_for_(billing|shipping)
|
625990216e29344779b014fc
|
@HEADS.register_module() <NEW_LINE> class YOLACTSegmHead(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_classes, in_channels=256, loss_segm=dict( type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)): <NEW_LINE> <INDENT> super(YOLACTSegmHead, self).__init__() <NEW_LINE> self.in_channels = in_channels <NEW_LINE> self.num_classes = num_classes <NEW_LINE> self.loss_segm = build_loss(loss_segm) <NEW_LINE> self._init_layers() <NEW_LINE> self.fp16_enabled = False <NEW_LINE> <DEDENT> def _init_layers(self): <NEW_LINE> <INDENT> self.segm_conv = nn.Conv2d( self.in_channels, self.num_classes, kernel_size=1) <NEW_LINE> <DEDENT> def init_weights(self): <NEW_LINE> <INDENT> xavier_init(self.segm_conv, distribution='uniform') <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return self.segm_conv(x) <NEW_LINE> <DEDENT> @force_fp32(apply_to=('segm_pred', )) <NEW_LINE> def loss(self, segm_pred, gt_masks, gt_labels): <NEW_LINE> <INDENT> loss_segm = [] <NEW_LINE> num_imgs, num_classes, mask_h, mask_w = segm_pred.size() <NEW_LINE> for idx in range(num_imgs): <NEW_LINE> <INDENT> cur_segm_pred = segm_pred[idx] <NEW_LINE> cur_gt_masks = gt_masks[idx].float() <NEW_LINE> cur_gt_labels = gt_labels[idx] <NEW_LINE> segm_targets = self.get_targets(cur_segm_pred, cur_gt_masks, cur_gt_labels) <NEW_LINE> if segm_targets is None: <NEW_LINE> <INDENT> loss = self.loss_segm(cur_segm_pred, torch.zeros_like(cur_segm_pred), torch.zeros_like(cur_segm_pred)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loss = self.loss_segm( cur_segm_pred, segm_targets, avg_factor=num_imgs * mask_h * mask_w) <NEW_LINE> <DEDENT> loss_segm.append(loss) <NEW_LINE> <DEDENT> return dict(loss_segm=loss_segm) <NEW_LINE> <DEDENT> def get_targets(self, segm_pred, gt_masks, gt_labels): <NEW_LINE> <INDENT> if gt_masks.size(0) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> num_classes, mask_h, mask_w = segm_pred.size() <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> downsampled_masks = F.interpolate( gt_masks.unsqueeze(0), (mask_h, mask_w), mode='bilinear', align_corners=False).squeeze(0) <NEW_LINE> downsampled_masks = downsampled_masks.gt(0.5).float() <NEW_LINE> segm_targets = torch.zeros_like(segm_pred, requires_grad=False) <NEW_LINE> for obj_idx in range(downsampled_masks.size(0)): <NEW_LINE> <INDENT> segm_targets[gt_labels[obj_idx] - 1] = torch.max( segm_targets[gt_labels[obj_idx] - 1], downsampled_masks[obj_idx]) <NEW_LINE> <DEDENT> return segm_targets
|
YOLACT segmentation head used in https://arxiv.org/abs/1904.02689.
Apply a semantic segmentation loss on feature space using layers that are
only evaluated during training to increase performance with no speed
penalty.
Args:
in_channels (int): Number of channels in the input feature map.
num_classes (int): Number of categories excluding the background
category.
loss_segm (dict): Config of semantic segmentation loss.
|
62599021c432627299fa3e9d
|
class MockStrategy(BaseStrategy): <NEW_LINE> <INDENT> def __init__(self, ordering): <NEW_LINE> <INDENT> self.ordering = ordering <NEW_LINE> <DEDENT> def apply(self, item: FileInfo): <NEW_LINE> <INDENT> print(item.full_path) <NEW_LINE> <DEDENT> def process(self, duplicate_list): <NEW_LINE> <INDENT> duplicates = self.ordering(duplicate_list)[1:] <NEW_LINE> removed = [] <NEW_LINE> for item in self._enforce_file_info(duplicates): <NEW_LINE> <INDENT> self.apply(item) <NEW_LINE> removed.append(item.full_path) <NEW_LINE> <DEDENT> return removed
|
Just print affected paths
|
62599021bf627c535bcb2361
|
class TemplateController(BaseController): <NEW_LINE> <INDENT> def view(self, url): <NEW_LINE> <INDENT> abort(404)
|
The fallback controller for tinvent.
By default, the final controller tried to fulfill the request
when no other routes match. It may be used to display a template
when all else fails, e.g.::
def view(self, url):
return render('/%s' % url)
Or if you're using Mako and want to explicitly send a 404 (Not
Found) response code when the requested template doesn't exist::
import mako.exceptions
def view(self, url):
try:
return render('/%s' % url)
except mako.exceptions.TopLevelLookupException:
abort(404)
|
62599021796e427e5384f62b
|
class IClassVDigitWindow(VDigitWindow): <NEW_LINE> <INDENT> def __init__(self, parent, giface, map, properties): <NEW_LINE> <INDENT> VDigitWindow.__init__(self, parent=parent, giface=giface, Map=map, properties=properties) <NEW_LINE> <DEDENT> def _onLeftDown(self, event): <NEW_LINE> <INDENT> action = self.toolbar.GetAction() <NEW_LINE> if not action: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> region = grass.region() <NEW_LINE> e, n = self.Pixel2Cell(event.GetPositionTuple()) <NEW_LINE> if not ((region['s'] <= n <= region['n']) and (region['w'] <= e <= region['e'])): <NEW_LINE> <INDENT> GWarning(parent = self.parent, message = _("You are trying to create a training area " "outside the computational region. " "Please, use g.region to set the appropriate region first.")) <NEW_LINE> return <NEW_LINE> <DEDENT> cat = self.GetCurrentCategory() <NEW_LINE> if cat is None and action == "addLine": <NEW_LINE> <INDENT> dlg = wx.MessageDialog(parent = self.parent, message = _("In order to create a training area, " "you have to select class first.\n\n" "There is no class yet, " "do you want to create one?"), caption = _("No class selected"), style = wx.YES_NO) <NEW_LINE> if dlg.ShowModal() == wx.ID_YES: <NEW_LINE> <INDENT> self.parent.OnCategoryManager(None) <NEW_LINE> <DEDENT> dlg.Destroy() <NEW_LINE> event.Skip() <NEW_LINE> return <NEW_LINE> <DEDENT> super(IClassVDigitWindow, self)._onLeftDown(event) <NEW_LINE> <DEDENT> def _addRecord(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def _updateATM(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _onRightUp(self, event): <NEW_LINE> <INDENT> super(IClassVDigitWindow, self)._onRightUp(event) <NEW_LINE> self.parent.UpdateChangeState(changes = True) <NEW_LINE> <DEDENT> def GetCurrentCategory(self): <NEW_LINE> <INDENT> return self.parent.GetToolbar("iClass").GetSelectedCategoryIdx() <NEW_LINE> <DEDENT> def GetCategoryColor(self, cat): <NEW_LINE> <INDENT> r, g, b = map(int, self.parent.GetClassColor(cat).split(':')) <NEW_LINE> return wx.Colour(r, g, b)
|
Class similar to VDigitWindow but specialized for wxIClass.
|
6259902130c21e258be996c3
|
class LiveServerTestCase(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.host = '127.0.0.1' <NEW_LINE> cls.port = find_free_ports(1)[0] <NEW_LINE> cls.server_process = multiprocessing.Process( target=live_server_process, args=(cls.host, cls.port)) <NEW_LINE> cls.server_process.daemon = True <NEW_LINE> cls.server_process.start() <NEW_LINE> start = time.time() <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cls.get('/ping') <NEW_LINE> <DEDENT> except requests.ConnectionError: <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> if time.time() - start > 1: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.server_process.terminate() <NEW_LINE> cls.server_process.join() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_live_server_url(cls): <NEW_LINE> <INDENT> return 'http://%s:%s' % (cls.host, cls.port) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, path, params=None): <NEW_LINE> <INDENT> uri = cls.get_live_server_url() + path <NEW_LINE> if params is None: <NEW_LINE> <INDENT> params = {} <NEW_LINE> <DEDENT> request = requests.get(uri, params=params) <NEW_LINE> request.raise_for_status() <NEW_LINE> return request <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def post(cls, path, data=None): <NEW_LINE> <INDENT> uri = cls.get_live_server_url() + path <NEW_LINE> if data is None: <NEW_LINE> <INDENT> data = {} <NEW_LINE> <DEDENT> request = requests.get(uri, data=json.dumps(data)) <NEW_LINE> request.raise_for_status() <NEW_LINE> return request
|
Base TestCase to inherit from which will run an HTTP server in
a background thread.
|
6259902121bff66bcd723b11
|
class LoginSerializer(Serializer): <NEW_LINE> <INDENT> email = EmailField(required=True, label='email', max_length=512) <NEW_LINE> password = CharField(required=True, label='password', max_length=512, min_length=6)
|
Serializer class
|
62599021507cdc57c63a5c54
|
class Assignment(models.Model): <NEW_LINE> <INDENT> job = models.ForeignKey(Job, on_delete=models.CASCADE) <NEW_LINE> member = models.ForeignKey('Member', on_delete=models.PROTECT) <NEW_LINE> core_cache = models.BooleanField(_('Kernbereich'), default=False) <NEW_LINE> job_extras = models.ManyToManyField( JobExtra, related_name='assignments', blank=True) <NEW_LINE> amount = models.FloatField(_('Wert')) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s #%s' % (Config.vocabulary('assignment'), self.id) <NEW_LINE> <DEDENT> def time(self): <NEW_LINE> <INDENT> return self.job.time <NEW_LINE> <DEDENT> def is_core(self): <NEW_LINE> <INDENT> return self.job.type.activityarea.core <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def pre_save(cls, sender, instance, **kwds): <NEW_LINE> <INDENT> instance.core_cache = instance.is_core() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = Config.vocabulary('assignment') <NEW_LINE> verbose_name_plural = Config.vocabulary('assignment_pl')
|
Single assignment (work unit).
|
62599021925a0f43d25e8ef4
|
class TestBuildTwoTypeDef(TwoMixin, TypeDefMixin, BuildQuantityMixin, TestCase): <NEW_LINE> <INDENT> pass
|
Test building a group that has a nested type def with quantity 2
|
625990215e10d32532ce405d
|
class TopPortletRenderer(ColumnPortletManagerRenderer): <NEW_LINE> <INDENT> adapts(Interface, IDefaultBrowserLayer, IBrowserView, IThemeSchoolsPortlets) <NEW_LINE> template = ViewPageTemplateFile('templates/renderer.pt')
|
A renderer for the content-well portlets
|
6259902121a7993f00c66e2c
|
class ReverseProxied(object): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> script_name = environ.get('HTTP_X_SCRIPT_NAME', '') <NEW_LINE> if script_name: <NEW_LINE> <INDENT> environ['SCRIPT_NAME'] = script_name <NEW_LINE> path_info = environ['PATH_INFO'] <NEW_LINE> if path_info.startswith(script_name): <NEW_LINE> <INDENT> environ['PATH_INFO'] = path_info[len(script_name):] <NEW_LINE> <DEDENT> <DEDENT> scheme = environ.get('HTTP_X_SCHEME', '') <NEW_LINE> if scheme: <NEW_LINE> <INDENT> environ['wsgi.url_scheme'] = scheme <NEW_LINE> <DEDENT> return self.app(environ, start_response)
|
Wrap the application in this middleware and configure the
front-end server to add these headers, to let you quietly bind
this to a URL other than / and to an HTTP scheme that is
different than what is used locally.
In nginx:
location /myprefix {
proxy_pass http://192.168.0.1:5001;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /myprefix;
}
:param app: the WSGI application
snippet from http://flask.pocoo.org/snippets/35/
|
62599021796e427e5384f62f
|
class System(object): <NEW_LINE> <INDENT> def __init__(self, idnum, name, position, x, y, stype, extra): <NEW_LINE> <INDENT> self.is_quasispace = False <NEW_LINE> self.idnum = idnum <NEW_LINE> self.name = name <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.draw_x = int(x/10) <NEW_LINE> self.draw_y = 1000-int(y/10) <NEW_LINE> self.position = position <NEW_LINE> self.stype = stype <NEW_LINE> self.extra = extra <NEW_LINE> self.highlight = True <NEW_LINE> self.planets = [] <NEW_LINE> self.mineral_agg = None <NEW_LINE> self.mineral_agg_full = None <NEW_LINE> self.bio_agg = None <NEW_LINE> self.bio_agg_full = None <NEW_LINE> self.bio_danger_agg = None <NEW_LINE> self.bio_danger_agg_full = None <NEW_LINE> if (self.position and self.position != ''): <NEW_LINE> <INDENT> self.fullname = '{} {}'.format(self.position, self.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fullname = self.name <NEW_LINE> <DEDENT> <DEDENT> def addplanet(self, planet): <NEW_LINE> <INDENT> self.planets.append(planet) <NEW_LINE> return planet <NEW_LINE> <DEDENT> def distance_to(self, system): <NEW_LINE> <INDENT> return (math.sqrt((self.x-system.x)**2+(self.y-system.y)**2)/10) <NEW_LINE> <DEDENT> def apply_filters(self, dispfilter, aggfilter): <NEW_LINE> <INDENT> self.highlight = dispfilter.approve(self) <NEW_LINE> self.mineral_agg = MinData() <NEW_LINE> self.mineral_agg_full = MinData() <NEW_LINE> self.bio_agg = 0 <NEW_LINE> self.bio_agg_full = 0 <NEW_LINE> self.bio_danger_agg = 0 <NEW_LINE> self.bio_danger_agg_full = 0 <NEW_LINE> for planet in self.planets: <NEW_LINE> <INDENT> self.bio_agg_full += planet.bio <NEW_LINE> self.bio_danger_agg_full += planet.bio_danger <NEW_LINE> self.mineral_agg_full.add(planet.mineral) <NEW_LINE> if (aggfilter.approve(planet)): <NEW_LINE> <INDENT> self.bio_agg += planet.bio <NEW_LINE> self.bio_danger_agg += planet.bio_danger <NEW_LINE> self.mineral_agg.add(planet.mineral) <NEW_LINE> <DEDENT> <DEDENT> return (self.mineral_agg.value(), self.bio_agg)
|
Class to hold information about a specific system. Most of the "fun"
data in here is the aggregate data. We keep track of two sets of
aggregates; one which holds the total value of the system, and one
which holds the value which might be modified by safety filters.
|
62599021507cdc57c63a5c56
|
class PriorityQueue: <NEW_LINE> <INDENT> def __init__(self, items: list[Items], priority_function: PriorityFunction) -> None: <NEW_LINE> <INDENT> self.__items = items <NEW_LINE> self.__priority_function = priority_function <NEW_LINE> heapq.heapify(self.__items) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.__items) <NEW_LINE> <DEDENT> def add(self, item: Any) -> None: <NEW_LINE> <INDENT> heapq.heappush(self.__items, (self.__priority_function(item), item)) <NEW_LINE> <DEDENT> def pop(self) -> Item: <NEW_LINE> <INDENT> return heapq.heappop(self.__items) <NEW_LINE> <DEDENT> def top(self) -> Any: <NEW_LINE> <INDENT> return self.__items[0][1]
|
Implementation of a priority queue data structure.
Based on the heapq module, part of the standard library.
This utility class encapsulates the most common interactions with the heapq module.
Parameters
----------
items: list[Any]
A list backing the priority queue, can be empty.
priority_function : Callable[[Any], float]
A function used to calculate the priorities of the items in the queue.
The queue is ordered according to these priorities. The first element is the one with the smalles priority.
|
625990215166f23b2e244285
|
class _FakeUnitTestFileBackedSQSQueue(_FakeFileBackedSQSQueue): <NEW_LINE> <INDENT> _FAKE_AWS_ACCT = uuid.uuid4().hex <NEW_LINE> _FAKE_QUEUE_URL = f"https://fake-us-region.queue.amazonaws.com/{_FAKE_AWS_ACCT}/{UNITTEST_FAKE_QUEUE_NAME}" <NEW_LINE> _QUEUE_DATA_FILE = str(FAKE_QUEUE_DATA_PATH / f"{_FAKE_AWS_ACCT}_{UNITTEST_FAKE_QUEUE_NAME}.pickle")
|
Subclass of the local file-backed queue used transiently during unit test sessions
|
6259902191af0d3eaad3acd7
|
class TypeEditPage(SchemaListingPage): <NEW_LINE> <INDENT> index = ViewPageTemplateFile('tabbed_forms.pt') <NEW_LINE> @property <NEW_LINE> def tabs(self): <NEW_LINE> <INDENT> return ( ('Fields', None), ('Behaviors', '@@behaviors'), ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def form(self): <NEW_LINE> <INDENT> if self.context.fti.hasDynamicSchema: <NEW_LINE> <INDENT> return SchemaListing <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ReadOnlySchemaListing
|
Form wrapper so we can get a form with layout.
We define an explicit subclass rather than using the wrap_form method
from plone.z3cform.layout so that we can inject the type name into
the form label.
|
625990213eb6a72ae038b516
|
class MetadataDescription(object): <NEW_LINE> <INDENT> def __init__(self, entity_id): <NEW_LINE> <INDENT> self.entity_id = entity_id <NEW_LINE> self._organization = None <NEW_LINE> self._contact_person = [] <NEW_LINE> self._ui_info = None <NEW_LINE> <DEDENT> def set_organization(self, organization): <NEW_LINE> <INDENT> assert isinstance(organization, OrganizationDesc) <NEW_LINE> self._organization = organization <NEW_LINE> <DEDENT> def add_contact_person(self, person): <NEW_LINE> <INDENT> assert isinstance(person, ContactPersonDesc) <NEW_LINE> self._contact_person.append(person) <NEW_LINE> <DEDENT> def set_ui_info(self, ui_info): <NEW_LINE> <INDENT> assert isinstance(ui_info, UIInfoDesc) <NEW_LINE> self._ui_info = ui_info <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> description = {} <NEW_LINE> description["entityid"] = self.entity_id <NEW_LINE> if self._organization: <NEW_LINE> <INDENT> description.update(self._organization.to_dict()) <NEW_LINE> <DEDENT> if self._contact_person: <NEW_LINE> <INDENT> description['contact_person'] = [] <NEW_LINE> for person in self._contact_person: <NEW_LINE> <INDENT> description['contact_person'].append(person.to_dict()) <NEW_LINE> <DEDENT> <DEDENT> if self._ui_info: <NEW_LINE> <INDENT> description.update(self._ui_info.to_dict()) <NEW_LINE> <DEDENT> return description
|
Description class for a backend module
|
6259902163f4b57ef00864cb
|
class DefinitionData(object): <NEW_LINE> <INDENT> deserialized_types = { 'catalog': 'ask_smapi_model.v1.skill.interaction_model.catalog.catalog_input.CatalogInput', 'vendor_id': 'str' } <NEW_LINE> attribute_map = { 'catalog': 'catalog', 'vendor_id': 'vendorId' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> def __init__(self, catalog=None, vendor_id=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.catalog = catalog <NEW_LINE> self.vendor_id = vendor_id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DefinitionData): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
Catalog request definitions.
:param catalog:
:type catalog: (optional) ask_smapi_model.v1.skill.interaction_model.catalog.catalog_input.CatalogInput
:param vendor_id: The vendorId that the catalog should belong to.
:type vendor_id: (optional) str
|
6259902121bff66bcd723b15
|
class ClipEntry: <NEW_LINE> <INDENT> def __init__(self, text = None, time = None, offset = -1, length = 0): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> self.time = time if time is not None else sys_time() <NEW_LINE> self.offset = offset <NEW_LINE> self.length = length <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> d = {'time':self.time, 'text':self.text} <NEW_LINE> return d
|
Clip entry infomation
|
62599021bf627c535bcb2367
|
class FunctionTask(models.Model): <NEW_LINE> <INDENT> task = models.ForeignKey(TaskFlowInstance, related_name='function_task', help_text=_(u"职能化单")) <NEW_LINE> creator = models.CharField(_(u"提单人"), max_length=32) <NEW_LINE> create_time = models.DateTimeField(_(u"提单时间"), auto_now_add=True) <NEW_LINE> claimant = models.CharField(_(u"认领人"), max_length=32, blank=True) <NEW_LINE> claim_time = models.DateTimeField(_(u"认领时间"), blank=True, null=True) <NEW_LINE> rejecter = models.CharField(_(u"驳回人"), max_length=32, blank=True) <NEW_LINE> reject_time = models.DateTimeField(_(u"驳回时间"), blank=True, null=True) <NEW_LINE> predecessor = models.CharField(_(u"转单人"), max_length=32, blank=True) <NEW_LINE> transfer_time = models.DateTimeField(_(u"转单时间"), blank=True, null=True) <NEW_LINE> status = models.CharField(_(u"单据状态"), max_length=32, default='submitted', choices=FUNCTION_TASK_STATUS) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s_%s" % (self.task, self.id) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _(u"职能化认领单 FunctionTask") <NEW_LINE> verbose_name_plural = _(u"职能化认领单 FunctionTask") <NEW_LINE> ordering = ['-id'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def status_name(self): <NEW_LINE> <INDENT> return self.get_status_display() <NEW_LINE> <DEDENT> @property <NEW_LINE> def creator_name(self): <NEW_LINE> <INDENT> return convert_readable_username(self.creator) <NEW_LINE> <DEDENT> @property <NEW_LINE> def editor_name(self): <NEW_LINE> <INDENT> return convert_readable_username(self.editor) <NEW_LINE> <DEDENT> def claim_task(self, username): <NEW_LINE> <INDENT> if self.status != 'submitted': <NEW_LINE> <INDENT> return {'result': False, 'message': 'task has been claimed by others'} <NEW_LINE> <DEDENT> self.claimant = username <NEW_LINE> self.claim_time = timezone.now() <NEW_LINE> self.status = 'claimed' <NEW_LINE> self.save() <NEW_LINE> return {'result': True, 'message': 'success', 'data': {}} <NEW_LINE> <DEDENT> def reject_task(self, username): <NEW_LINE> <INDENT> if self.status != 'submitted': <NEW_LINE> <INDENT> return {'result': False, 'message': 'task has been claimed by others'} <NEW_LINE> <DEDENT> self.rejecter = username <NEW_LINE> self.reject_time = timezone.now() <NEW_LINE> self.status = 'rejected' <NEW_LINE> return {'result': True, 'message': 'success', 'data': {}} <NEW_LINE> <DEDENT> def transfer_task(self, username, claimant): <NEW_LINE> <INDENT> if self.status not in ['claimed', 'executed']: <NEW_LINE> <INDENT> return {'result': False, 'message': 'task with status:%s cannot be transferred' % self.status} <NEW_LINE> <DEDENT> if self.claimant != username: <NEW_LINE> <INDENT> return {'result': False, 'message': 'task can only be transferred by claimant'} <NEW_LINE> <DEDENT> self.predecessor = self.claimant <NEW_LINE> self.transfer_time = timezone.now() <NEW_LINE> self.claimant = claimant <NEW_LINE> self.claim_time = timezone.now() <NEW_LINE> self.save() <NEW_LINE> return {'result': True, 'message': 'success', 'data': {}}
|
职能化认领单
|
62599021287bf620b6272aa0
|
class StatusBar(ttk.Frame): <NEW_LINE> <INDENT> def __init__(self, parent, hide_status=False): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.pack(side=tk.BOTTOM, padx=10, pady=2, fill=tk.X, expand=False) <NEW_LINE> self._message = tk.StringVar() <NEW_LINE> self._pbar_message = tk.StringVar() <NEW_LINE> self._pbar_position = tk.IntVar() <NEW_LINE> self._message.set("Ready") <NEW_LINE> self._status(hide_status) <NEW_LINE> self._pbar = self._progress_bar() <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> def _status(self, hide_status): <NEW_LINE> <INDENT> if hide_status: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> statusframe = ttk.Frame(self) <NEW_LINE> statusframe.pack(side=tk.LEFT, anchor=tk.W, fill=tk.X, expand=False) <NEW_LINE> lbltitle = ttk.Label(statusframe, text="Status:", width=6, anchor=tk.W) <NEW_LINE> lbltitle.pack(side=tk.LEFT, expand=False) <NEW_LINE> lblstatus = ttk.Label(statusframe, width=40, textvariable=self._message, anchor=tk.W) <NEW_LINE> lblstatus.pack(side=tk.LEFT, anchor=tk.W, fill=tk.X, expand=True) <NEW_LINE> <DEDENT> def _progress_bar(self): <NEW_LINE> <INDENT> progressframe = ttk.Frame(self) <NEW_LINE> progressframe.pack(side=tk.RIGHT, anchor=tk.E, fill=tk.X) <NEW_LINE> lblmessage = ttk.Label(progressframe, textvariable=self._pbar_message) <NEW_LINE> lblmessage.pack(side=tk.LEFT, padx=3, fill=tk.X, expand=True) <NEW_LINE> pbar = ttk.Progressbar(progressframe, length=200, variable=self._pbar_position, maximum=100, mode="determinate") <NEW_LINE> pbar.pack(side=tk.LEFT, padx=2, fill=tk.X, expand=True) <NEW_LINE> pbar.pack_forget() <NEW_LINE> return pbar <NEW_LINE> <DEDENT> def start(self, mode): <NEW_LINE> <INDENT> self._set_mode(mode) <NEW_LINE> self._pbar.pack() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._pbar_message.set("") <NEW_LINE> self._pbar_position.set(0) <NEW_LINE> self._set_mode("determinate") <NEW_LINE> self._pbar.pack_forget() <NEW_LINE> <DEDENT> def _set_mode(self, mode): <NEW_LINE> <INDENT> self._pbar.config(mode=mode) <NEW_LINE> if mode == "indeterminate": <NEW_LINE> <INDENT> self._pbar.config(maximum=100) <NEW_LINE> self._pbar.start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._pbar.stop() <NEW_LINE> self._pbar.config(maximum=100) <NEW_LINE> <DEDENT> <DEDENT> def progress_update(self, message, position, update_position=True): <NEW_LINE> <INDENT> self._pbar_message.set(message) <NEW_LINE> if update_position: <NEW_LINE> <INDENT> self._pbar_position.set(position)
|
Status Bar for displaying the Status Message and Progress Bar at the bottom of the GUI.
Parameters
----------
parent: tkinter object
The parent tkinter widget that will hold the status bar
hide_status: bool, optional
``True`` to hide the status message that appears at the far left hand side of the status
frame otherwise ``False``. Default: ``False``
|
6259902163f4b57ef00864cc
|
class CertOptionNameAndValidPrincipalTypes(typing.NamedTuple): <NEW_LINE> <INDENT> name: str <NEW_LINE> valid_principal_types: typing.List[CertPrincipalType]
|
A certificate option's name and the types of certificate principals for
which this certificate option is valid.
|
62599021d18da76e235b78a7
|
class ContainerWrapper: <NEW_LINE> <INDENT> def __init__(self, server_context: ServerContext): <NEW_LINE> <INDENT> self.server_context = server_context <NEW_LINE> <DEDENT> def create( self, name: str, container_path: str = None, description: str = None, folder_type: str = None, is_workbook: bool = None, title: str = None, ): <NEW_LINE> <INDENT> return create( self.server_context, name, container_path, description, folder_type, is_workbook, title ) <NEW_LINE> <DEDENT> def delete(self, container_path: str = None): <NEW_LINE> <INDENT> return delete(self.server_context, container_path)
|
Wrapper for all of the API methods exposed in the container module. Used by the APIWrapper class.
|
62599021462c4b4f79dbc8be
|
class MockServices: <NEW_LINE> <INDENT> flag = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.id = "117" <NEW_LINE> self.name = "iptables" <NEW_LINE> self.description = "description" <NEW_LINE> self.type = "type" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create(name, service_type, description): <NEW_LINE> <INDENT> service = MockServices() <NEW_LINE> service.id = "005" <NEW_LINE> service.name = name <NEW_LINE> service.description = description <NEW_LINE> service.type = service_type <NEW_LINE> return service <NEW_LINE> <DEDENT> def get(self, service_id): <NEW_LINE> <INDENT> service = MockServices() <NEW_LINE> if self.flag == 1: <NEW_LINE> <INDENT> service.id = "asd" <NEW_LINE> return [service] <NEW_LINE> <DEDENT> elif self.flag == 2: <NEW_LINE> <INDENT> service.id = service_id <NEW_LINE> return service <NEW_LINE> <DEDENT> return [service] <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> service = MockServices() <NEW_LINE> if self.flag == 1: <NEW_LINE> <INDENT> service.id = "asd" <NEW_LINE> return [service] <NEW_LINE> <DEDENT> return [service] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def delete(service_id): <NEW_LINE> <INDENT> return service_id
|
Mock of Services class
|
625990216fece00bbaccc86c
|
class LocalComponentBase(SageObject): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, newform, prime, twist_factor): <NEW_LINE> <INDENT> self._p = prime <NEW_LINE> self._f = newform <NEW_LINE> self._twist_factor = twist_factor <NEW_LINE> <DEDENT> @abstract_method <NEW_LINE> def species(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstract_method <NEW_LINE> def check_tempered(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Smooth representation of GL_2(Q_%s) with conductor %s^%s" % (self.prime(), self.prime(), self.conductor()) <NEW_LINE> <DEDENT> def newform(self): <NEW_LINE> <INDENT> return self._f <NEW_LINE> <DEDENT> def prime(self): <NEW_LINE> <INDENT> return self._p <NEW_LINE> <DEDENT> def conductor(self): <NEW_LINE> <INDENT> return self.newform().level().valuation(self.prime()) <NEW_LINE> <DEDENT> def coefficient_field(self): <NEW_LINE> <INDENT> return self.newform().hecke_eigenvalue_field() <NEW_LINE> <DEDENT> def twist_factor(self): <NEW_LINE> <INDENT> return self._twist_factor <NEW_LINE> <DEDENT> def central_character(self): <NEW_LINE> <INDENT> from sage.rings.arith import crt <NEW_LINE> chi = self.newform().character() <NEW_LINE> f = self.prime() ** self.conductor() <NEW_LINE> N = self.newform().level() // f <NEW_LINE> G = DirichletGroup(f, self.coefficient_field()) <NEW_LINE> chip = G([chi(crt(ZZ(x), 1, f, N)) for x in G.unit_gens()]).primitive_character() <NEW_LINE> a = crt(1, self.prime(), f, N) <NEW_LINE> if chip.conductor() == 1: <NEW_LINE> <INDENT> return SmoothCharacterGroupQp(self.prime(), self.coefficient_field()).character(0, [chi(a) * self.prime()**self.twist_factor()]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return SmoothCharacterGroupQp(self.prime(), self.coefficient_field()).character(chip.conductor().valuation(self.prime()), list((~chip).values_on_gens()) + [chi(a) * self.prime()**self.twist_factor()]) <NEW_LINE> <DEDENT> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> return (cmp(type(self), type(other)) or cmp(self.prime(), other.prime()) or cmp(self.newform(), other.newform()) or cmp(self.twist_factor(), other.twist_factor()))
|
Base class for local components of newforms. Not to be directly instantiated; use the :func:`~LocalComponent` constructor function.
|
62599021a8ecb033258720d4
|
class CheckAnswerAPI(APIView): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated,) <NEW_LINE> def get(self, request, slug): <NEW_LINE> <INDENT> quiz = get_object_or_404(Quiz, slug=slug) <NEW_LINE> answer = get_object_or_404(QuestionAnswer, pk=request.GET.get('pk')) <NEW_LINE> quiz_manager, created = QuizManager.objects.get_or_create( quiz=quiz, user=request.user ) <NEW_LINE> if quiz_manager.completed: <NEW_LINE> <INDENT> quiz_manager.remove_correct_answers() <NEW_LINE> quiz_manager.set_as_uncompleted() <NEW_LINE> <DEDENT> if answer.is_correct: <NEW_LINE> <INDENT> quiz_manager.increase_correct_answers() <NEW_LINE> <DEDENT> return Response({'checked': True}, status=status.HTTP_200_OK)
|
API for checking answers.
Find the quiz with given slug.
Get a pk from the GET data and find the answer.
If a user already completed the quiz call QuizManager.set_as_uncompleted()
and remove_correct_answers().
User will pass the quiz from scratch.
Check if the answer is correct -> increase the quiz manager
correct_answers by QuizManager.increase_correct_answers().
Args:
slug(str): slug of the quiz question
Request data args:
pk(int): selected QuizAnswer pk
Returns:
checked(bool): True
response status 200
|
6259902156b00c62f0fb3774
|
class CheckcveViewSet(mixins.ListModelMixin, mixins.CreateModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = Checkcve.objects.all() <NEW_LINE> serializer_class = serializers.CheckcveSerializer <NEW_LINE> def update(self, request, pk=None): <NEW_LINE> <INDENT> checkcve = self.get_object() <NEW_LINE> serializer = serializers.CheckcveUpdateSerializer(checkcve, data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def partial_update(self, request, pk=None): <NEW_LINE> <INDENT> checkcve = self.get_object() <NEW_LINE> serializer = serializers.CheckcveUpdateSerializer(checkcve, data=request.data, partial=True) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> @action(detail=True) <NEW_LINE> def check_cve(self, request, pk=None): <NEW_LINE> <INDENT> obj = self.get_object() <NEW_LINE> response = obj.check_cve() <NEW_LINE> return Response({'status': True, 'message': str(response)})
|
API endpoint that allows groups to be viewed or edited.
|
625990211d351010ab8f49cb
|
class Document: <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.enviroment_stack = {} <NEW_LINE> self.filename = filename <NEW_LINE> self.macrodict = {} <NEW_LINE> InputMacro(self) <NEW_LINE> self.environdict = {} <NEW_LINE> BeginEnvironmentMacro(self) <NEW_LINE> EndEnvironmentMacro(self) <NEW_LINE> self.output = "" <NEW_LINE> self.outputfile = file <NEW_LINE> <DEDENT> def add_macro(self, macro): <NEW_LINE> <INDENT> self.macrodict[ macro.get_macroname() ] = macro <NEW_LINE> <DEDENT> def has_macro(self, macroname): <NEW_LINE> <INDENT> return macroname in self.macrodict <NEW_LINE> <DEDENT> def get_macro(self, macroname): <NEW_LINE> <INDENT> return self.macrodict[ macroname ] <NEW_LINE> <DEDENT> def add_environment(self, environment): <NEW_LINE> <INDENT> self.environdict[ environment.get_environname() ] = environment <NEW_LINE> <DEDENT> def has_environment(self, environname): <NEW_LINE> <INDENT> return environname in self.environdict <NEW_LINE> <DEDENT> def get_environment(self, environname): <NEW_LINE> <INDENT> return self.environdict[ environname ] <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> self.get_macro("input").render(self.filename, self.output) <NEW_LINE> <DEDENT> def render_file(self, filename): <NEW_LINE> <INDENT> finputcontent = open( filename ).read() <NEW_LINE> p = re.compile("\\\\(\w*)\\{([\w.]*)\\}") <NEW_LINE> for m in p.finditer( finputcontent ): <NEW_LINE> <INDENT> macroname = m.group( 1 ) <NEW_LINE> macroarg = m.group( 2 ) <NEW_LINE> if( self.has_macro(macroname) ): <NEW_LINE> <INDENT> self.get_macro(macroname).render(macroarg,self.output) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_output(self): <NEW_LINE> <INDENT> return self.output <NEW_LINE> <DEDENT> def add_output(self,line): <NEW_LINE> <INDENT> self.output = self.output + line <NEW_LINE> <DEDENT> def get_outputfile(self): <NEW_LINE> <INDENT> return self.outputfile <NEW_LINE> <DEDENT> def set_outputfile(self,file): <NEW_LINE> <INDENT> self.outputfile = file
|
6259902191af0d3eaad3acdb
|
|
class UserCreationSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> email = serializers.EmailField(validators=[UniqueValidator(queryset=User.objects.all())], label="이메일", style={'placeholder': "회원 이메일"}) <NEW_LINE> password1 = serializers.CharField(write_only=True, min_length=8, label="비밀번호(8자 이상)", style={'placeholder': "패스워드 입력", 'input_type': 'password'}) <NEW_LINE> password2 = serializers.CharField(write_only=True, min_length=8, label="비밀번호 재입력(8자 이상)", style={'placeholder': "패스워드 재입력", 'input_type': 'password'}) <NEW_LINE> name = serializers.CharField(label="이름", style={'placeholder': "유저의 실명"}) <NEW_LINE> nickname = serializers.CharField(validators=[UniqueValidator(queryset=User.objects.all())], label="닉네임", style={'placeholder': "사이트에서 사용할 별명"}) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( 'pk', 'email', 'password1', 'password2', 'name', 'nickname', 'post_code', 'road_address', 'detail_address', 'date_joined', 'last_login', 'user_type', ) <NEW_LINE> write_only_fields = ('email', 'name', 'nickname', 'password1', 'password2') <NEW_LINE> read_only_fields = ('post_code', 'road_address', 'detail_address', 'date_joined', 'last_login', 'user_type',) <NEW_LINE> <DEDENT> def validate_email(self, email): <NEW_LINE> <INDENT> if User.objects.filter(email=email).exists(): <NEW_LINE> <INDENT> raise serializers.ValidationError("중복되는 이메일이 존재합니다.") <NEW_LINE> <DEDENT> return email <NEW_LINE> <DEDENT> def validate_password(self, data): <NEW_LINE> <INDENT> if data['password1'] != data['password2']: <NEW_LINE> <INDENT> raise serializers.ValidationError('비밀번호가 서로 일치하지 않습니다.') <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> user = User.objects.create_user(email=validated_data['email'], password=validated_data['password1'], name=validated_data['name'], nickname=validated_data['nickname'] ) <NEW_LINE> return user
|
소셜 로그인이 아닌 일반 회원가입을 의미합니다.
|
62599021d18da76e235b78a8
|
class BaseCatalogueDecluster(object): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def decluster(self, catalogue, config): <NEW_LINE> <INDENT> return
|
Abstract base class for implementation of declustering algorithms
|
625990215e10d32532ce4060
|
class HighlightStyleMenu(Bubble): <NEW_LINE> <INDENT> scroll_view = ObjectProperty(None) <NEW_LINE> editor_container = ObjectProperty(None) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(HighlightStyleMenu, self).__init__(**kwargs) <NEW_LINE> self.layout = GridLayout(cols=1, spacing=2, size_hint_y=None) <NEW_LINE> self.layout.bind(minimum_height=self.layout.setter('height')) <NEW_LINE> self.get_available_styles() <NEW_LINE> <DEDENT> def get_available_styles(self): <NEW_LINE> <INDENT> self.styles = [name for name in get_all_styles()] <NEW_LINE> self.styles.append('default TE') <NEW_LINE> sorted_style = sorted(self.styles) <NEW_LINE> for style in sorted_style: <NEW_LINE> <INDENT> self.add_style_to_list(style) <NEW_LINE> <DEDENT> self.scroll_view.add_widget(self.layout) <NEW_LINE> <DEDENT> def add_style_to_list(self, name): <NEW_LINE> <INDENT> button = Button(text=name, size_hint_y=None,height=40) <NEW_LINE> button.bind(on_release=lambda w: self.change_style(w)) <NEW_LINE> self.layout.add_widget(button) <NEW_LINE> <DEDENT> def change_style(self, widget): <NEW_LINE> <INDENT> self.parent.text = widget.text <NEW_LINE> self.parent.display_state = self.parent.states[self.parent.state_index] <NEW_LINE> if self.editor_container: <NEW_LINE> <INDENT> self.editor_container.current_tab.content.editor.change_style(widget.text)
|
Highlight style menu used to select the style of the text (and :py:class:`editorcontainer.editor.editor.Editor`)
displayed in the :py:class:`editorcontainer.editor.editor.Editor`.
|
62599021a8ecb033258720d6
|
@serializable <NEW_LINE> class CorProfilForApp(GenericRepository): <NEW_LINE> <INDENT> __tablename__ = "cor_profil_for_app" <NEW_LINE> __table_args__ = {"schema": "utilisateurs", "extend_existing": True} <NEW_LINE> id_application = db.Column( db.Integer, ForeignKey("utilisateurs.t_applications.id_application"), primary_key=True, ) <NEW_LINE> id_profil = db.Column( db.Integer, ForeignKey("utilisateurs.t_profils.id_profil"), primary_key=True ) <NEW_LINE> profil_rel = relationship("TProfils") <NEW_LINE> @classmethod <NEW_LINE> def add_cor(cls, id_application, ids_profil): <NEW_LINE> <INDENT> dict_add = dict() <NEW_LINE> dict_add["id_application"] = id_application <NEW_LINE> for d in ids_profil: <NEW_LINE> <INDENT> dict_add["id_profil"] = d <NEW_LINE> cls.post(dict_add) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def del_cor(cls, id_application, ids_profil): <NEW_LINE> <INDENT> for d in ids_profil: <NEW_LINE> <INDENT> cls.query.filter(cls.id_application == id_application).filter( cls.id_profil == d ).delete() <NEW_LINE> db.session.commit()
|
Classe de correspondance entre la table t_applications et la table t_profils
|
62599021287bf620b6272aa4
|
class FlowGoRelativeViscosityModelCosta1(pyflowgo.base.flowgo_base_relative_viscosity_model. FlowGoBaseRelativeViscosityModel): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._strain_rate = 1. <NEW_LINE> <DEDENT> def read_initial_condition_from_json_file(self, filename): <NEW_LINE> <INDENT> with open(filename) as data_file: <NEW_LINE> <INDENT> data = json.load(data_file) <NEW_LINE> self._strain_rate = float(data['relative_viscosity_parameters']['strain_rate']) <NEW_LINE> <DEDENT> <DEDENT> def compute_relative_viscosity(self, state): <NEW_LINE> <INDENT> phi = state.get_crystal_fraction() <NEW_LINE> if self._strain_rate == 1.0: <NEW_LINE> <INDENT> delta_1 = 11.4 <NEW_LINE> gama_1 = 1.6 <NEW_LINE> phi_star_1 = 0.67 <NEW_LINE> epsilon_1 = 0.01 <NEW_LINE> f = (1. - epsilon_1) * math.erf(min(25., ( (math.sqrt(math.pi) / (2. * (1. - epsilon_1))) * (phi / phi_star_1) * ( 1. + (math.pow((phi / phi_star_1), gama_1)))))) <NEW_LINE> relative_viscosity = (1. + math.pow((phi / phi_star_1), delta_1)) / ( math.pow((1. - f), (2.5 * phi_star_1))) <NEW_LINE> return relative_viscosity <NEW_LINE> <DEDENT> if self._strain_rate == 0.0001: <NEW_LINE> <INDENT> delta_1 = 11.48 <NEW_LINE> gama_1 = 1.52 <NEW_LINE> phi_star_1 = 0.62 <NEW_LINE> epsilon_1 = 0.005 <NEW_LINE> f = (1. - epsilon_1) * math.erf(min(25., ( (math.sqrt(math.pi) / (2. * (1. - epsilon_1))) * (phi / phi_star_1) * ( 1. + (math.pow((phi / phi_star_1), gama_1)))))) <NEW_LINE> relative_viscosity = (1. + math.pow((phi / phi_star_1), delta_1)) / ( math.pow((1. - f), (2.5 * phi_star_1))) <NEW_LINE> return relative_viscosity <NEW_LINE> <DEDENT> <DEDENT> def is_notcompatible(self, state): <NEW_LINE> <INDENT> return False
|
This methods permits to calculate the effect of crystal cargo on viscosity according to Costa et al []
This relationship considers the strain rate and allows to evalutate the effect of high crystal fraction
(above maximum packing).
The input parameters include the variable crystal fraction (phi) and other parameters depending on the aspect ratio
of the crystals.
Here the method costa1 corresponds to case where:
all crystals are spherical
for strain-rate = 1s-1, phi_max = 0.61
for strain-rate = 10-4 s-1, phi_max= 0.54,
The inputs parameters correspond to the particles A from Cimarelli et al. [2011]
References:
---------
Cimarelli, C., A. Costa, S. Mueller, and H. M. Mader (2011), Rheology of magmas with bimodal crystal size and shape
distributions: Insights from analog experiments, Geochem. Geophys. Geosyst., 12, Q07024, doi:10.1029/2011GC003606.
Costa, A., L. Caricchi, and N. Bagdassarov (2009), A model for the rheology of particle‐bearing suspensions
and partially molten rocks, Geochem. Geophys. Geosyst., 10, Q03010, doi:10.1029/2008GC002138.
|
62599021d164cc6175821e2d
|
class DescribeRestoreJobTest(test_base_testcase.APITestCase): <NEW_LINE> <INDENT> @mock.patch('magnetodb.storage.describe_restore_job') <NEW_LINE> def test_describe_restore_job(self, describe_restore_job_mock): <NEW_LINE> <INDENT> headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} <NEW_LINE> conn = httplib.HTTPConnection('localhost:8080') <NEW_LINE> the_uuid = uuid.uuid4() <NEW_LINE> backup_uuid = uuid.uuid4() <NEW_LINE> url = '/v1/management/default_tenant/the_table/restores/{}'.format( the_uuid.hex) <NEW_LINE> describe_restore_job_mock.return_value = models.RestoreJobMeta( the_uuid, 'default_table', models.RestoreJobMeta.RESTORE_STATUS_RESTORING, backup_uuid, 'source' ) <NEW_LINE> conn.request("GET", url, headers=headers) <NEW_LINE> response = conn.getresponse() <NEW_LINE> json_response = response.read() <NEW_LINE> response_model = json.loads(json_response) <NEW_LINE> self.assertEqual('default_table', response_model['table_name']) <NEW_LINE> self.assertEqual(backup_uuid.hex, response_model['backup_id'])
|
The test for v1 ReST API DescribeRestoreJobController.
|
6259902163f4b57ef00864ce
|
class BaseDeleteView(DeletionMixin, BaseDetailView): <NEW_LINE> <INDENT> pass
|
Base view for deleting an object.
Using this base class requires subclassing to provide a response mixin.
|
625990211d351010ab8f49ce
|
class Meta: <NEW_LINE> <INDENT> swappable = swapper.swappable_setting('kernel', 'JointFacultyMembership')
|
Meta class for JointFacultyMembership
|
625990216e29344779b01508
|
class Manifest(EqualityBase): <NEW_LINE> <INDENT> def __init__(self, default_client_spec=None, client_spec_list=None): <NEW_LINE> <INDENT> self.default_client_spec = default_client_spec <NEW_LINE> if not client_spec_list is None: <NEW_LINE> <INDENT> self.client_spec_list = client_spec_list[:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.client_spec_list = None <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('<Manifest default_client_spec:%s client_spec_list:%s>' % (self.default_client_spec, str(self.client_spec_list))) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__()
|
Represents the manifest.
|
6259902266673b3332c312a5
|
class ComputeSnapshotsListRequest(_messages.Message): <NEW_LINE> <INDENT> filter = _messages.StringField(1) <NEW_LINE> maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32, default=500) <NEW_LINE> orderBy = _messages.StringField(3) <NEW_LINE> pageToken = _messages.StringField(4) <NEW_LINE> project = _messages.StringField(5, required=True)
|
A ComputeSnapshotsListRequest object.
Fields:
filter: Filter expression for filtering listed resources.
maxResults: Maximum count of results to be returned.
orderBy: A string attribute.
pageToken: Tag returned by a previous list request when that list was
truncated to maxResults. Used to continue a previous list request.
project: Name of the project scoping this request.
|
6259902230c21e258be996cf
|
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = util.Counter() <NEW_LINE> for p in self.legalPositions: self.beliefs[p] = 1.0 <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observe(self, observation, gameState): <NEW_LINE> <INDENT> noisyDistance = observation <NEW_LINE> emissionModel = busters.getObservationDistribution(noisyDistance) <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> if noisyDistance == None: <NEW_LINE> <INDENT> allPossible[self.getJailPosition()] = 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for p in self.legalPositions: <NEW_LINE> <INDENT> trueDistance = util.manhattanDistance(p, pacmanPosition) <NEW_LINE> if emissionModel[trueDistance] > 0: <NEW_LINE> <INDENT> emissionProb = emissionModel[trueDistance] <NEW_LINE> if p in self.beliefs: <NEW_LINE> <INDENT> allPossible[p] = emissionProb * self.beliefs[p] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> "*** END YOUR CODE HERE ***" <NEW_LINE> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> allPossible = util.Counter() <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> allPossible = util.Counter() <NEW_LINE> legalPositions = self.legalPositions <NEW_LINE> for oldPosition in legalPositions: <NEW_LINE> <INDENT> previousBelief = self.beliefs[oldPosition] <NEW_LINE> newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPosition)) <NEW_LINE> for newPos, prob in newPosDist.items(): <NEW_LINE> <INDENT> allPossible[newPos] += (prob * previousBelief) <NEW_LINE> <DEDENT> <DEDENT> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs
|
The exact dynamic inference module should use forward-algorithm updates to
compute the exact belief function at each time step.
|
62599022d164cc6175821e2f
|
class PublicUserApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> <DEDENT> def test_create_valid_user_success(self): <NEW_LINE> <INDENT> payload = { 'email': 'test@snikers.com', 'password': 'testpass', 'name': 'Test name', } <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_201_CREATED) <NEW_LINE> user = get_user_model().objects.get(**res.data) <NEW_LINE> self.assertTrue(user.check_password(payload['password'])) <NEW_LINE> self.assertNotIn('password', res.data) <NEW_LINE> <DEDENT> def test_user_exists(self): <NEW_LINE> <INDENT> payload = {'email': 'test@snikers.com', 'password': 'testpass'} <NEW_LINE> create_user(**payload) <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_password_too_short(self): <NEW_LINE> <INDENT> payload = {'email': 'test@snikers.com', 'password': 'pw'} <NEW_LINE> res = self.client.post(CREATE_USER_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> user_exists = get_user_model().objects.filter( email=payload['email'] ).exists() <NEW_LINE> self.assertFalse(user_exists) <NEW_LINE> <DEDENT> def test_create_token_for_user(self): <NEW_LINE> <INDENT> payload = {'email': 'test@snikers.com', 'password': 'testpass'} <NEW_LINE> create_user(**payload) <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> <DEDENT> def test_create_token_invalid_credentials(self): <NEW_LINE> <INDENT> create_user(email='test@snikers', password='testpass') <NEW_LINE> payload = {'email': 'test@snikers', 'password': 'wrong'} <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_create_token_no_user(self): <NEW_LINE> <INDENT> payload = {'email': 'test@snikers', 'password': 'testpass'} <NEW_LINE> res = self.client.post(TOKEN_URL, payload) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_create_token_missing_field(self): <NEW_LINE> <INDENT> res = self.client.post(TOKEN_URL, {'email': 'one', 'password': ''}) <NEW_LINE> self.assertNotIn('token', res.data) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_user_unauthorized(self): <NEW_LINE> <INDENT> res = self.client.get(ME_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
|
Test the users API (public)
|
62599022462c4b4f79dbc8c4
|
class Ubuntu1404Mixin(DebianMixin): <NEW_LINE> <INDENT> OS_TYPE = os_types.UBUNTU1404
|
Class holding Ubuntu1404 specific VM methods and attributes.
|
6259902266673b3332c312a7
|
class LearningCurves(): <NEW_LINE> <INDENT> def __init__(self,X, y, estimator, validation, metric, step_size, shuffle=False, metric_name='metric'): <NEW_LINE> <INDENT> self.X = X <NEW_LINE> self.y = y <NEW_LINE> self.estimator = estimator <NEW_LINE> self.validation = validation <NEW_LINE> self.metric = metric <NEW_LINE> self.step_size = step_size <NEW_LINE> self.shuffle = shuffle <NEW_LINE> self.metric_name = metric_name <NEW_LINE> self.table = None <NEW_LINE> <DEDENT> def __computaion(self): <NEW_LINE> <INDENT> if self.shuffle: <NEW_LINE> <INDENT> self.X, self.y = shuffle(self.X, self.y) <NEW_LINE> <DEDENT> guardar = [] <NEW_LINE> for step in range(self.step_size, len(self.X)+1, self.step_size): <NEW_LINE> <INDENT> validacao_cruzada = cross_validate(self.estimator, self.X.iloc[:step,:], self.y.iloc[:step], scoring=self.metric, cv=self.validation, return_train_score=True, n_jobs=-1) <NEW_LINE> treino = np.mean(validacao_cruzada['train_score']) <NEW_LINE> teste = np.mean(validacao_cruzada['test_score']) <NEW_LINE> quantidade_exemplos = step <NEW_LINE> guardar.append((quantidade_exemplos, treino, teste)) <NEW_LINE> print(f'Samples: {quantidade_exemplos}') <NEW_LINE> print(f'Train: {np.round(treino, 3)} | Validation: {np.round(teste, 3)}\n') <NEW_LINE> <DEDENT> return guardar <NEW_LINE> <DEDENT> def learning_curves(self, figsize=(8,5), ylim=None): <NEW_LINE> <INDENT> self.table = pd.DataFrame(self.__computaion(), columns=['Sample size', 'Train','Validation']) <NEW_LINE> melt = pd.melt(self.table, id_vars='Sample size', value_vars=['Train','Validation'], value_name='Score', var_name='Set') <NEW_LINE> f, ax = plt.subplots(figsize=figsize) <NEW_LINE> sn.pointplot(x='Sample size', y='Score', hue='Set', palette=('green','red'), data=melt, ax=ax) <NEW_LINE> ax.set_title(self.metric_name.title()) if not isinstance(self.metric, str) else ax.set_title(self.metric.title()) <NEW_LINE> ax.set(ylim=ylim)
|
Evaluates the performace of a machine learning estimator based on the increase of the sample size.
parameters:
X - Pandas dataframe.
y - target.
estimator - Algorithm or pipeline.
validation - cross validation, it can be a integer number or a function like: KFold, RepeatedKFold, etc.
metric - Metric chosen for the assessment. It can be a string like: 'accuracy', 'neg_mean_absolute_error' or
build with the make_scorer function.
step_size - Sample size that will be add in each training cycle.
shuffle - Deafault 'False', 'True' inr order to shuffle the data.
metric_name - String representing the name of the chosen metric.
It will be the title of the plot when thre make_scorer function is used.
method:
learning_curves - plot.
|
6259902230c21e258be996d1
|
class Renderer(base.Renderer): <NEW_LINE> <INDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return bool(self.getApprofondimenti()) <NEW_LINE> <DEDENT> def getTitle(self): <NEW_LINE> <INDENT> path = self._getApprofondimentiPath(self.data.up_levels) <NEW_LINE> if path: <NEW_LINE> <INDENT> return self.context.unrestrictedTraverse(path).Title() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Ulteriori Approfondimenti" <NEW_LINE> <DEDENT> <DEDENT> def getApprofondimenti(self): <NEW_LINE> <INDENT> folder_path = self._getApprofondimentiPath(self.data.up_levels) <NEW_LINE> if folder_path: <NEW_LINE> <INDENT> return self.context.portal_catalog( path={'query': folder_path, 'depth': 1}, sort_on='getObjPositionInParent', ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @memoize <NEW_LINE> def _getApprofondimentiPath(self, level=2): <NEW_LINE> <INDENT> context = self.context <NEW_LINE> plone_view = getMultiAdapter( (aq_inner(context), self.request), name='plone' ) <NEW_LINE> item_path = context.getPhysicalPath() <NEW_LINE> catalog = getToolByName(context, 'portal_catalog') <NEW_LINE> if ( plone_view.isDefaultPageInFolder() or context.portal_type != 'Folder' ): <NEW_LINE> <INDENT> folder_path = item_path[:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> folder_path = item_path <NEW_LINE> <DEDENT> return self._lookForUpUA( context, folder_path, level=level, ua_id=self.data.ua_id ) <NEW_LINE> <DEDENT> def _lookForUpUA( self, context, folder_path, level=2, ua_id='ulteriori-appronfondimenti' ): <NEW_LINE> <INDENT> catalog = getToolByName(context, 'portal_catalog') <NEW_LINE> if level == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> result = catalog( path=dict(query='/'.join(folder_path), depth=1), id=ua_id ) <NEW_LINE> if result: <NEW_LINE> <INDENT> return result[0].getPath() <NEW_LINE> <DEDENT> return self._lookForUpUA( context, folder_path[:-1], level=level - 1, ua_id=ua_id ) <NEW_LINE> <DEDENT> render = ViewPageTemplateFile('ulterioriapprofondimenti.pt')
|
Portlet renderer.
|
62599022507cdc57c63a5c60
|
class ReplayBuffer: <NEW_LINE> <INDENT> def __init__(self, action_size, buffer_size, batch_size, seed): <NEW_LINE> <INDENT> self.action_size = action_size <NEW_LINE> self.memory = deque(maxlen=buffer_size) <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"]) <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> <DEDENT> def add(self, state, action, reward, next_state, done): <NEW_LINE> <INDENT> e = self.experience(state, action, reward, next_state, done) <NEW_LINE> self.memory.append(e) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> experiences = random.sample(self.memory, k=self.batch_size) <NEW_LINE> states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device) <NEW_LINE> actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).long().to(device) <NEW_LINE> rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device) <NEW_LINE> next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device) <NEW_LINE> dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device) <NEW_LINE> return (states, actions, rewards, next_states, dones) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.memory)
|
Fixed-size buffer to store experience tuples.
|
62599022287bf620b6272aa8
|
@global_preferences_registry.register <NEW_LINE> class OrionServerRestUrl(LongStringPreference): <NEW_LINE> <INDENT> section = ORION_SERVER_CONN <NEW_LINE> name = 'orion_rest_url' <NEW_LINE> default = settings.ORION_URL <NEW_LINE> required = True <NEW_LINE> verbose_name = _('Orion Server REST API root URL')
|
Dynamic preferences class controlling the `URL` for the `REST` `API`
provided by the `Orion server`
This preference is used by several applications in this project to query
the `Orion` server for data about `Orion` nodes.
:access_key: 'orionserverconn__orion_rest_url'
|
625990225166f23b2e24428f
|
class SphericalKMeans(KMeans): <NEW_LINE> <INDENT> def __init__(self, n_clusters=8, init='k-means++', n_init=10, max_iter=300, tol=1e-4, n_jobs=1, verbose=0, random_state=None, copy_x=True): <NEW_LINE> <INDENT> self.n_clusters = n_clusters <NEW_LINE> self.init = init <NEW_LINE> self.max_iter = max_iter <NEW_LINE> self.tol = tol <NEW_LINE> self.n_init = n_init <NEW_LINE> self.verbose = verbose <NEW_LINE> self.random_state = random_state <NEW_LINE> self.copy_x = copy_x <NEW_LINE> self.n_jobs = n_jobs <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> random_state = check_random_state(self.random_state) <NEW_LINE> X = self._check_fit_data(X) <NEW_LINE> self.cluster_centers_, self.labels_, self.inertia_, self.n_iter_ = spherical_k_means( X, n_clusters=self.n_clusters, init=self.init, n_init=self.n_init, max_iter=self.max_iter, verbose=self.verbose, tol=self.tol, random_state=random_state, copy_x=self.copy_x, n_jobs=self.n_jobs, return_n_iter=True) <NEW_LINE> return self
|
Spherical K-Means clustering
Modfication of sklearn.cluster.KMeans where cluster centers are normalized
(projected onto the sphere) in each iteration.
Parameters
----------
n_clusters : int, optional, default: 8
The number of clusters to form as well as the number of
centroids to generate.
max_iter : int, default: 300
Maximum number of iterations of the k-means algorithm for a
single run.
n_init : int, default: 10
Number of time the k-means algorithm will be run with different
centroid seeds. The final results will be the best output of
n_init consecutive runs in terms of inertia.
init : {'k-means++', 'random' or an ndarray}
Method for initialization, defaults to 'k-means++':
'k-means++' : selects initial cluster centers for k-mean
clustering in a smart way to speed up convergence. See section
Notes in k_init for more details.
'random': choose k observations (rows) at random from data for
the initial centroids.
If an ndarray is passed, it should be of shape (n_clusters, n_features)
and gives the initial centers.
tol : float, default: 1e-4
Relative tolerance with regards to inertia to declare convergence
n_jobs : int
The number of jobs to use for the computation. This works by computing
each of the n_init runs in parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
random_state : integer or numpy.RandomState, optional
The generator used to initialize the centers. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
verbose : int, default 0
Verbosity mode.
copy_x : boolean, default True
When pre-computing distances it is more numerically accurate to center
the data first. If copy_x is True, then the original data is not
modified. If False, the original data is modified, and put back before
the function returns, but small numerical differences may be introduced
by subtracting and then adding the data mean.
Attributes
----------
cluster_centers_ : array, [n_clusters, n_features]
Coordinates of cluster centers
labels_ :
Labels of each point
inertia_ : float
Sum of distances of samples to their closest cluster center.
|
62599022925a0f43d25e8f00
|
class ReplyUncommitted(object): <NEW_LINE> <INDENT> def __init__(self, rtag, txn_ids): <NEW_LINE> <INDENT> self.rtag = rtag <NEW_LINE> self.txn_ids = txn_ids <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "ReplyUncommitted(rtag={!r}, txn_ids={!r})".format(self.rtag, self.txn_ids) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.rtag == other.rtag and self.txn_ids == other.txn_ids) <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> return (struct.pack('>QI', self.rtag, len(self.txn_ids)) + b''.join(( struct.pack('>H{}s'.format(len(txn_id)), len(txn_id), txn_id.encode("utf-8")) for txn_id in self.txn_ids))) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decode(bs): <NEW_LINE> <INDENT> reader = StringIO(bs) <NEW_LINE> credits = struct.unpack(">I", reader.read(4))[0] <NEW_LINE> acks_length = struct.unpack(">I", reader.read(4))[0] <NEW_LINE> acks = [] <NEW_LINE> for _ in range(acks_length): <NEW_LINE> <INDENT> stream_id = struct.unpack(">Q", reader.read(8))[0] <NEW_LINE> point_of_ref = struct.unpack(">Q", reader.read(8))[0] <NEW_LINE> acks.append((stream_id, point_of_ref)) <NEW_LINE> <DEDENT> return Ack(credits, acks)
|
ReplyUncommitted(rtag: U64, txn_ids: Array[(txn_id: String])
|
625990221d351010ab8f49d1
|
class BaseCLIDriverTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.environ = { 'AWS_DATA_PATH': os.environ['AWS_DATA_PATH'], 'AWS_DEFAULT_REGION': 'us-east-1', 'AWS_ACCESS_KEY_ID': 'access_key', 'AWS_SECRET_ACCESS_KEY': 'secret_key', 'AWS_CONFIG_FILE': '', } <NEW_LINE> self.environ_patch = mock.patch('os.environ', self.environ) <NEW_LINE> self.environ_patch.start() <NEW_LINE> emitter = HierarchicalEmitter() <NEW_LINE> session = Session(EnvironmentVariables, emitter) <NEW_LINE> load_plugins({}, event_hooks=emitter) <NEW_LINE> driver = CLIDriver(session=session) <NEW_LINE> self.session = session <NEW_LINE> self.driver = driver <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.environ_patch.stop()
|
Base unittest that use clidriver.
This will load all the default plugins as well so it
will simulate the behavior the user will see.
|
625990223eb6a72ae038b520
|
class Player(item.Item): <NEW_LINE> <INDENT> sprite = { 'left': "{#", 'right': "#}" } <NEW_LINE> heading = 'left' <NEW_LINE> missile = { "current": 0, "max": 10 } <NEW_LINE> def __init__(self, x, y, max_x, objects): <NEW_LINE> <INDENT> item.Item.__init__(self, x, y) <NEW_LINE> self.max_x = max_x <NEW_LINE> self.objects = objects <NEW_LINE> <DEDENT> def move_right(self): <NEW_LINE> <INDENT> if self.pos_x > self.max_x - 3: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> item.Item.move_right(self) <NEW_LINE> self.heading = "right" <NEW_LINE> <DEDENT> def move_left(self): <NEW_LINE> <INDENT> if self.pos_x < 1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> item.Item.move_left(self) <NEW_LINE> self.heading = "left" <NEW_LINE> <DEDENT> def fire(self): <NEW_LINE> <INDENT> if self.missile['current'] < self.missile['max']: <NEW_LINE> <INDENT> self.missile['current'] += 1 <NEW_LINE> if self.heading == "right": <NEW_LINE> <INDENT> self.objects.append( missile.Missile(self.pos_x, self.pos_y, self) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.objects.append( missile.Missile(self.pos_x + 1, self.pos_y, self) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def tick(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_sprite(self): <NEW_LINE> <INDENT> return self.sprite[self.heading] <NEW_LINE> <DEDENT> def event_discard(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def can_hit(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_hit(self, target): <NEW_LINE> <INDENT> if isinstance(target, bomb.Bomb): <NEW_LINE> <INDENT> target_position = target.get_position() <NEW_LINE> if target_position[1] == self.pos_y and self.pos_x <= target_position[0] <= self.pos_x + 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
|
Player class
|
625990226e29344779b0150c
|
class DocID(models.Model): <NEW_LINE> <INDENT> doc_id = models.CharField(max_length=26, primary_key=True)
|
Model for a document id used to generate queries of certain sizes
This model is used to store document ids. The document ids are used to
generate query sets of certain sizes. These query sets are used to test the
performance of generating word cloud data using ES in multiple management
commands.
|
625990226fece00bbaccc874
|
class RingBuffer(np.ndarray): <NEW_LINE> <INDENT> def __new__(cls, input_array): <NEW_LINE> <INDENT> obj = np.asarray(input_array).view(cls) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def __array_finalize__(self, obj): <NEW_LINE> <INDENT> if obj is None: return <NEW_LINE> <DEDENT> def __array_wrap__(self, out_arr, context=None): <NEW_LINE> <INDENT> return np.ndarray.__array_wrap__(self, out_arr, context) <NEW_LINE> <DEDENT> def extend(self, xs): <NEW_LINE> <INDENT> xs = np.asarray(xs) <NEW_LINE> if self.shape[1:] != xs.shape[1:]: <NEW_LINE> <INDENT> raise ValueError("Element's shape mismatch. RingBuffer.shape={}. " "xs.shape={}".format(self.shape, xs.shape)) <NEW_LINE> <DEDENT> len_self = len(self) <NEW_LINE> len_xs = len(xs) <NEW_LINE> if len_self <= len_xs: <NEW_LINE> <INDENT> xs = xs[-len_self:] <NEW_LINE> len_xs = len(xs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[:-len_xs] = self[len_xs:] <NEW_LINE> <DEDENT> self[-len_xs:] = xs <NEW_LINE> <DEDENT> def append(self, x): <NEW_LINE> <INDENT> x = np.asarray(x) <NEW_LINE> if self.shape[1:] != x.shape: <NEW_LINE> <INDENT> raise ValueError("Element's shape mismatch. RingBuffer.shape={}. " "xs.shape={}".format(self.shape, x.shape)) <NEW_LINE> <DEDENT> len_self = len(self) <NEW_LINE> self[:-1] = self[1:] <NEW_LINE> self[-1] = x
|
A multidimensional ring buffer.
|
62599022c432627299fa3ead
|
class Item: <NEW_LINE> <INDENT> def __init__(self, name, desc): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.desc = desc
|
An item in the game.
Arguments:
name (str): Name of the item.
desc (str): Descripton of the item available to the player.
|
62599022d164cc6175821e34
|
class EndpointInfo(Model): <NEW_LINE> <INDENT> _attribute_map = { 'version_id': {'key': 'versionId', 'type': 'str'}, 'is_staging': {'key': 'isStaging', 'type': 'bool'}, 'endpoint_url': {'key': 'endpointUrl', 'type': 'str'}, 'region': {'key': 'region', 'type': 'str'}, 'assigned_endpoint_key': {'key': 'assignedEndpointKey', 'type': 'str'}, 'endpoint_region': {'key': 'endpointRegion', 'type': 'str'}, 'failed_regions': {'key': 'failedRegions', 'type': 'str'}, 'published_date_time': {'key': 'publishedDateTime', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(EndpointInfo, self).__init__(**kwargs) <NEW_LINE> self.version_id = kwargs.get('version_id', None) <NEW_LINE> self.is_staging = kwargs.get('is_staging', None) <NEW_LINE> self.endpoint_url = kwargs.get('endpoint_url', None) <NEW_LINE> self.region = kwargs.get('region', None) <NEW_LINE> self.assigned_endpoint_key = kwargs.get('assigned_endpoint_key', None) <NEW_LINE> self.endpoint_region = kwargs.get('endpoint_region', None) <NEW_LINE> self.failed_regions = kwargs.get('failed_regions', None) <NEW_LINE> self.published_date_time = kwargs.get('published_date_time', None)
|
The base class "ProductionOrStagingEndpointInfo" inherits from.
:param version_id: The version ID to publish.
:type version_id: str
:param is_staging: Indicates if the staging slot should be used, instead
of the Production one.
:type is_staging: bool
:param endpoint_url: The Runtime endpoint URL for this model version.
:type endpoint_url: str
:param region: The target region that the application is published to.
:type region: str
:param assigned_endpoint_key: The endpoint key.
:type assigned_endpoint_key: str
:param endpoint_region: The endpoint's region.
:type endpoint_region: str
:param failed_regions: Regions where publishing failed.
:type failed_regions: str
:param published_date_time: Timestamp when was last published.
:type published_date_time: str
|
6259902221bff66bcd723b21
|
class SignUp(CreateView): <NEW_LINE> <INDENT> form_class = forms.UserCreateForm <NEW_LINE> success_url = reverse_lazy("login") <NEW_LINE> template_name = "accounts/signup.html"
|
A handle of the sign up
**Context**
**Template:**
:template:`accounts/signup.html`
|
62599022a8ecb033258720de
|
class PolicyContract(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'value': {'key': 'properties.value', 'type': 'str'}, 'format': {'key': 'properties.format', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[str] = None, format: Optional[Union[str, "PolicyContentFormat"]] = "xml", **kwargs ): <NEW_LINE> <INDENT> super(PolicyContract, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.format = format
|
Policy Contract details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:ivar value: Contents of the Policy as defined by the format.
:vartype value: str
:ivar format: Format of the policyContent. Possible values include: "xml", "xml-link",
"rawxml", "rawxml-link". Default value: "xml".
:vartype format: str or ~api_management_client.models.PolicyContentFormat
|
62599022287bf620b6272aac
|
class TabPanel(wx.Panel): <NEW_LINE> <INDENT> def __init__(self, tab_panel_stack): <NEW_LINE> <INDENT> super().__init__(tab_panel_stack) <NEW_LINE> self.tab_panel_stack = tab_panel_stack <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> return {'subclass': self.__class__.__name__} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def restore(cls, tps, saved_panel): <NEW_LINE> <INDENT> cls(tps) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def restore_factory(cls, tps, saved_panel): <NEW_LINE> <INDENT> subclass = util.find_descendent_class(cls, saved_panel['subclass']) <NEW_LINE> subclass.restore(tps, saved_panel) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def cls_text(cls): <NEW_LINE> <INDENT> return cls.__name__ <NEW_LINE> <DEDENT> def inst_text(self): <NEW_LINE> <INDENT> return self.__class__.cls_text() <NEW_LINE> <DEDENT> def tab_idx(self): <NEW_LINE> <INDENT> return self.tab_panel_stack.tab_idx(self) <NEW_LINE> <DEDENT> def relative_stack(self, pos): <NEW_LINE> <INDENT> return self.tab_panel_stack.relative_stack(pos) <NEW_LINE> <DEDENT> def push(self): <NEW_LINE> <INDENT> self.tab_panel_stack.push(self)
|
panel in a TabPanelStack
|
625990221d351010ab8f49d5
|
class SectionChoices: <NEW_LINE> <INDENT> all = 'all' <NEW_LINE> cars = 'cars' <NEW_LINE> sport = 'sport' <NEW_LINE> CHOICES = ( (all, "all"), (cars, "cars"), (sport, "sport") )
|
Разделы
|
62599022d164cc6175821e36
|
@ddt.ddt <NEW_LINE> class RadioProblemResetCorrectnessAfterChangingAnswerTest(RadioProblemTypeBase): <NEW_LINE> <INDENT> shard = 24 <NEW_LINE> @ddt.data(['correct', '1/1 point (ungraded)'], ['incorrect', '0/1 point (ungraded)']) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_radio_score_after_answer_and_reset(self, correctness, score): <NEW_LINE> <INDENT> self.answer_problem(correctness) <NEW_LINE> self.problem_page.click_submit() <NEW_LINE> self.assertEqual(self.problem_page.problem_progress_graded_value, score) <NEW_LINE> self.problem_page.click_reset() <NEW_LINE> self.assertEqual(self.problem_page.problem_progress_graded_value, '0/1 point (ungraded)') <NEW_LINE> <DEDENT> @ddt.data(['correct', 'incorrect'], ['incorrect', 'correct']) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_reset_correctness_after_changing_answer(self, initial_correctness, other_correctness): <NEW_LINE> <INDENT> self.assertTrue(self.problem_status("unanswered")) <NEW_LINE> self.answer_problem(initial_correctness) <NEW_LINE> self.problem_page.click_submit() <NEW_LINE> self.assertTrue(self.problem_status(initial_correctness)) <NEW_LINE> self.problem_page.click_reset() <NEW_LINE> self.assertFalse(self.problem_status(initial_correctness)) <NEW_LINE> self.assertFalse(self.problem_status(other_correctness))
|
Tests for Radio problem with changing answers
|
6259902230c21e258be996d7
|
class User(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> username = models.CharField(max_length=255, db_index=True, unique=True) <NEW_LINE> pass_salt = models.TextField() <NEW_LINE> pass_hash = models.TextField() <NEW_LINE> type = models.TextField() <NEW_LINE> def set_password(self, password): <NEW_LINE> <INDENT> self.pass_salt = uuid.uuid4().hex <NEW_LINE> self.pass_hash = hashlib.md5(password + self.pass_salt).hexdigest() <NEW_LINE> <DEDENT> def is_password_correct(self, password): <NEW_LINE> <INDENT> hash = hashlib.md5(password + self.pass_salt).hexdigest() <NEW_LINE> return (hash == self.pass_hash)
|
A user within the Authentication app.
|
625990228c3a8732951f7418
|
class CssIndent(Indent): <NEW_LINE> <INDENT> def events(self, block, tokens, prev_indents): <NEW_LINE> <INDENT> for t in tokens: <NEW_LINE> <INDENT> if t.action is Bracket: <NEW_LINE> <INDENT> if t == "{": <NEW_LINE> <INDENT> yield INDENT <NEW_LINE> <DEDENT> elif t == "}": <NEW_LINE> <INDENT> yield DEDENT
|
Indenter for Css.
|
625990223eb6a72ae038b526
|
class Function(use_metaclass(CachedMetaClass, pr.IsScope)): <NEW_LINE> <INDENT> def __init__(self, evaluator, func, is_decorated=False): <NEW_LINE> <INDENT> self._evaluator = evaluator <NEW_LINE> self.base_func = func <NEW_LINE> self.is_decorated = is_decorated <NEW_LINE> <DEDENT> @memoize_default(None) <NEW_LINE> def _decorated_func(self): <NEW_LINE> <INDENT> f = self.base_func <NEW_LINE> if not self.is_decorated: <NEW_LINE> <INDENT> for dec in reversed(self.base_func.decorators): <NEW_LINE> <INDENT> debug.dbg('decorator: %s %s', dec, f) <NEW_LINE> dec_results = self._evaluator.eval_statement(dec) <NEW_LINE> if not len(dec_results): <NEW_LINE> <INDENT> debug.warning('decorator not found: %s on %s', dec, self.base_func) <NEW_LINE> return None <NEW_LINE> <DEDENT> decorator = dec_results.pop() <NEW_LINE> if dec_results: <NEW_LINE> <INDENT> debug.warning('multiple decorators found %s %s', self.base_func, dec_results) <NEW_LINE> <DEDENT> old_func = Function(self._evaluator, f, is_decorated=True) <NEW_LINE> wrappers = self._evaluator.execute(decorator, (old_func,)) <NEW_LINE> if not len(wrappers): <NEW_LINE> <INDENT> debug.warning('no wrappers found %s', self.base_func) <NEW_LINE> return None <NEW_LINE> <DEDENT> if len(wrappers) > 1: <NEW_LINE> <INDENT> debug.warning('multiple wrappers found %s %s', self.base_func, wrappers) <NEW_LINE> <DEDENT> f = wrappers[0] <NEW_LINE> debug.dbg('decorator end %s', f) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(f, pr.Function): <NEW_LINE> <INDENT> f = Function(self._evaluator, f) <NEW_LINE> <DEDENT> return f <NEW_LINE> <DEDENT> def get_decorated_func(self): <NEW_LINE> <INDENT> return self._decorated_func() or Function(self._evaluator, self.base_func, True) <NEW_LINE> <DEDENT> def get_magic_function_names(self): <NEW_LINE> <INDENT> return compiled.magic_function_class.get_defined_names() <NEW_LINE> <DEDENT> def get_magic_function_scope(self): <NEW_LINE> <INDENT> return compiled.magic_function_class <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.base_func, name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> decorated_func = self._decorated_func() <NEW_LINE> dec = '' <NEW_LINE> if decorated_func is not None and decorated_func != self: <NEW_LINE> <INDENT> dec = " is " + repr(self._decorated_func()) <NEW_LINE> <DEDENT> return "<e%s of %s%s>" % (type(self).__name__, self.base_func, dec)
|
Needed because of decorators. Decorators are evaluated here.
|
625990226e29344779b01512
|
class MedicinalProductIngredientSpecifiedSubstance(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_type = "MedicinalProductIngredientSpecifiedSubstance" <NEW_LINE> def __init__(self, jsondict=None, strict=True, **kwargs): <NEW_LINE> <INDENT> self.code = None <NEW_LINE> self.confidentiality = None <NEW_LINE> self.group = None <NEW_LINE> self.strength = None <NEW_LINE> super(MedicinalProductIngredientSpecifiedSubstance, self).__init__(jsondict=jsondict, strict=strict, **kwargs) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(MedicinalProductIngredientSpecifiedSubstance, self).elementProperties() <NEW_LINE> js.extend([ ("code", "code", codeableconcept.CodeableConcept, False, None, True), ("confidentiality", "confidentiality", codeableconcept.CodeableConcept, False, None, False), ("group", "group", codeableconcept.CodeableConcept, False, None, True), ("strength", "strength", MedicinalProductIngredientSpecifiedSubstanceStrength, True, None, False), ]) <NEW_LINE> return js
|
A specified substance that comprises this ingredient.
|
62599022462c4b4f79dbc8cd
|
class DiscoItems: <NEW_LINE> <INDENT> def __init__(self, items=None): <NEW_LINE> <INDENT> if (not items): <NEW_LINE> <INDENT> items = [] <NEW_LINE> <DEDENT> self.items = items <NEW_LINE> <DEDENT> def additem(self, jid, name=None, node=None): <NEW_LINE> <INDENT> jid = unicode(jid) <NEW_LINE> self.items.append( (jid, name, node) ) <NEW_LINE> <DEDENT> def getitems(self): <NEW_LINE> <INDENT> return self.items
|
DiscoItems: A class which represents the results of a disco items
query. It contains a list of items.
DiscoItems(items=None) -- constructor.
The *items* (if present) must be a list of (jid, name, node) tuples
(where *name* and *node* may be None).
Public methods:
additem(jid, name=None, node=None) -- add a new item to the list.
getitems() -- return the list of items.
|
625990229b70327d1c57fc43
|
class CloudPoolPaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'odata\\.nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[CloudPool]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CloudPoolPaged, self).__init__(*args, **kwargs)
|
A paging container for iterating over a list of :class:`CloudPool <azure.batch.models.CloudPool>` object
|
62599022925a0f43d25e8f08
|
class Rectangle: <NEW_LINE> <INDENT> pass
|
Represents a rectangle in 2-D space.
- width and height represent the rectangle's dimensions
- corner is a Point object that specifies the lower-left corner of the rectangle
|
625990226fece00bbaccc87c
|
class Resource(ResourceMixin, WithMetrics, db.EmbeddedDocument): <NEW_LINE> <INDENT> on_added = signal('Resource.on_added') <NEW_LINE> on_deleted = signal('Resource.on_deleted')
|
Local file, remote file or API provided by the original provider of the
dataset
|
62599022462c4b4f79dbc8cf
|
class VecTransposeImage(VecEnvWrapper): <NEW_LINE> <INDENT> def __init__(self, venv: VecEnv, skip: bool = False): <NEW_LINE> <INDENT> assert is_image_space(venv.observation_space) or isinstance( venv.observation_space, spaces.dict.Dict ), "The observation space must be an image or dictionary observation space" <NEW_LINE> self.skip = skip <NEW_LINE> if skip: <NEW_LINE> <INDENT> super(VecTransposeImage, self).__init__(venv) <NEW_LINE> return <NEW_LINE> <DEDENT> if isinstance(venv.observation_space, spaces.dict.Dict): <NEW_LINE> <INDENT> self.image_space_keys = [] <NEW_LINE> observation_space = deepcopy(venv.observation_space) <NEW_LINE> for key, space in observation_space.spaces.items(): <NEW_LINE> <INDENT> if is_image_space(space): <NEW_LINE> <INDENT> self.image_space_keys.append(key) <NEW_LINE> observation_space.spaces[key] = self.transpose_space(space, key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> observation_space = self.transpose_space(venv.observation_space) <NEW_LINE> <DEDENT> super(VecTransposeImage, self).__init__(venv, observation_space=observation_space) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def transpose_space(observation_space: spaces.Box, key: str = "") -> spaces.Box: <NEW_LINE> <INDENT> assert is_image_space(observation_space), "The observation space must be an image" <NEW_LINE> assert not is_image_space_channels_first( observation_space ), f"The observation space {key} must follow the channel last convention" <NEW_LINE> height, width, channels = observation_space.shape <NEW_LINE> new_shape = (channels, height, width) <NEW_LINE> return spaces.Box(low=0, high=255, shape=new_shape, dtype=observation_space.dtype) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def transpose_image(image: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> if len(image.shape) == 3: <NEW_LINE> <INDENT> return np.transpose(image, (2, 0, 1)) <NEW_LINE> <DEDENT> return np.transpose(image, (0, 3, 1, 2)) <NEW_LINE> <DEDENT> def transpose_observations(self, observations: Union[np.ndarray, Dict]) -> Union[np.ndarray, Dict]: <NEW_LINE> <INDENT> if self.skip: <NEW_LINE> <INDENT> return observations <NEW_LINE> <DEDENT> if isinstance(observations, dict): <NEW_LINE> <INDENT> observations = deepcopy(observations) <NEW_LINE> for k in self.image_space_keys: <NEW_LINE> <INDENT> observations[k] = self.transpose_image(observations[k]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> observations = self.transpose_image(observations) <NEW_LINE> <DEDENT> return observations <NEW_LINE> <DEDENT> def step_wait(self) -> VecEnvStepReturn: <NEW_LINE> <INDENT> observations, rewards, dones, infos = self.venv.step_wait() <NEW_LINE> for idx, done in enumerate(dones): <NEW_LINE> <INDENT> if not done: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if "terminal_observation" in infos[idx]: <NEW_LINE> <INDENT> infos[idx]["terminal_observation"] = self.transpose_observations(infos[idx]["terminal_observation"]) <NEW_LINE> <DEDENT> <DEDENT> return self.transpose_observations(observations), rewards, dones, infos <NEW_LINE> <DEDENT> def reset(self) -> Union[np.ndarray, Dict]: <NEW_LINE> <INDENT> return self.transpose_observations(self.venv.reset()) <NEW_LINE> <DEDENT> def close(self) -> None: <NEW_LINE> <INDENT> self.venv.close()
|
Re-order channels, from HxWxC to CxHxW.
It is required for PyTorch convolution layers.
:param venv:
:param skip: Skip this wrapper if needed as we rely on heuristic to apply it or not,
which may result in unwanted behavior, see GH issue #671.
|
6259902230c21e258be996da
|
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0): <NEW_LINE> <INDENT> if type(size) is not int: <NEW_LINE> <INDENT> raise TypeError("size must be an integer") <NEW_LINE> <DEDENT> if size < 0: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> """attribute""" <NEW_LINE> self.__size = size
|
Initialize
|
625990228c3a8732951f741c
|
class Fraction: <NEW_LINE> <INDENT> def __init__(self, numerator, denominator): <NEW_LINE> <INDENT> if not isinstance(numerator, int) or not isinstance(denominator, int): <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> elif denominator == 0: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> self.__numerator = numerator <NEW_LINE> self.__denominator = denominator <NEW_LINE> <DEDENT> def return_string(self): <NEW_LINE> <INDENT> if self.__numerator * self.__denominator < 0: <NEW_LINE> <INDENT> sign = "-" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sign = "" <NEW_LINE> <DEDENT> return "{:s}{:d}/{:d}".format(sign, abs(self.__numerator), abs(self.__denominator)) <NEW_LINE> <DEDENT> def simplify(self): <NEW_LINE> <INDENT> gcd = greatest_common_divisor(self.__numerator, self.__denominator) <NEW_LINE> self.__numerator //= gcd <NEW_LINE> self.__denominator //= gcd
|
This class represents one single fraction that consists of
numerator and denominator
|
625990228c3a8732951f741d
|
class BibIndexYearTokenizer(BibIndexDefaultTokenizer): <NEW_LINE> <INDENT> def __init__(self, stemming_language = None, remove_stopwords = False, remove_html_markup = False, remove_latex_markup = False): <NEW_LINE> <INDENT> BibIndexDefaultTokenizer.__init__(self, stemming_language, remove_stopwords, remove_html_markup, remove_latex_markup) <NEW_LINE> <DEDENT> def get_words_from_date_tag(self, datestring): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for dateword in datestring.split(): <NEW_LINE> <INDENT> out.append(dateword) <NEW_LINE> parts = dateword.split('-') <NEW_LINE> for nb in range(1, len(parts)): <NEW_LINE> <INDENT> out.append("-".join(parts[:nb])) <NEW_LINE> <DEDENT> <DEDENT> return out <NEW_LINE> <DEDENT> def tokenize_for_words_default(self, phrase): <NEW_LINE> <INDENT> return super(BibIndexYearTokenizer, self).tokenize_for_words(phrase) <NEW_LINE> <DEDENT> def tokenize_for_words(self, phrase): <NEW_LINE> <INDENT> if CFG_INSPIRE_SITE: <NEW_LINE> <INDENT> return self.get_words_from_date_tag(phrase) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.tokenize_for_words_default(phrase)
|
Year tokenizer. It tokenizes words from date tags or uses default word tokenizer.
|
625990226e29344779b01516
|
class M3DIllegalArgumentException(M3DException): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> super(M3DIllegalArgumentException, self).__init__(message)
|
Thrown to indicate that a method has been passed an illegal or inappropriate argument.
For example, invalid char exception.
|
62599022bf627c535bcb237b
|
class HardTripletLoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, margin=0.1, hardest=False, squared=False): <NEW_LINE> <INDENT> super(HardTripletLoss, self).__init__() <NEW_LINE> self.margin = margin <NEW_LINE> self.hardest = hardest <NEW_LINE> self.squared = squared <NEW_LINE> <DEDENT> def forward(self, embeddings, labels): <NEW_LINE> <INDENT> pairwise_dist = _pairwise_distance(embeddings, squared=self.squared) <NEW_LINE> if self.hardest: <NEW_LINE> <INDENT> mask_anchor_positive = _get_anchor_positive_triplet_mask(labels).float() <NEW_LINE> valid_positive_dist = pairwise_dist * mask_anchor_positive <NEW_LINE> hardest_positive_dist, _ = torch.max(valid_positive_dist, dim=1, keepdim=True) <NEW_LINE> mask_anchor_negative = _get_anchor_negative_triplet_mask(labels).float() <NEW_LINE> max_anchor_negative_dist, _ = torch.max(pairwise_dist, dim=1, keepdim=True) <NEW_LINE> anchor_negative_dist = pairwise_dist + max_anchor_negative_dist * ( 1.0 - mask_anchor_negative) <NEW_LINE> hardest_negative_dist, _ = torch.min(anchor_negative_dist, dim=1, keepdim=True) <NEW_LINE> triplet_loss = F.relu(hardest_positive_dist - hardest_negative_dist + self.margin) <NEW_LINE> triplet_loss = torch.mean(triplet_loss) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> anc_pos_dist = pairwise_dist.unsqueeze(dim=2) <NEW_LINE> anc_neg_dist = pairwise_dist.unsqueeze(dim=1) <NEW_LINE> loss = anc_pos_dist - anc_neg_dist + self.margin <NEW_LINE> mask = _get_triplet_mask(labels).float() <NEW_LINE> triplet_loss = loss * mask <NEW_LINE> triplet_loss = F.relu(triplet_loss) <NEW_LINE> hard_triplets = torch.gt(triplet_loss, 1e-16).float() <NEW_LINE> possible_triplets = reduce(lambda x, y: x*y, triplet_loss.size()) <NEW_LINE> triplets_portion = hard_triplets/possible_triplets <NEW_LINE> num_hard_triplets = torch.sum(hard_triplets) <NEW_LINE> triplet_loss = torch.sum(triplet_loss) / (num_hard_triplets + 1e-16) <NEW_LINE> <DEDENT> return triplet_loss
|
Hard/Hardest Triplet Loss
(pytorch implementation of https://omoindrot.github.io/triplet-loss)
For each anchor, we get the hardest positive and hardest negative to form a triplet.
|
62599022d164cc6175821e3d
|
class connectedBox(QtGui.QGraphicsRectItem): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(connectedBox, self).__init__(*args) <NEW_LINE> self.setAcceptHoverEvents(True) <NEW_LINE> self._isHighlighted = False <NEW_LINE> <DEDENT> def hoverEnterEvent(self, e): <NEW_LINE> <INDENT> self.setToolTip(self._ownerToolTip()) <NEW_LINE> self._ownerHoverEnter(e) <NEW_LINE> <DEDENT> def hoverLeaveEvent(self, e): <NEW_LINE> <INDENT> self._ownerHoverExit(e) <NEW_LINE> <DEDENT> def mouseDoubleClickEvent(self, e): <NEW_LINE> <INDENT> self._toggleHighlight() <NEW_LINE> <DEDENT> def connectOwnerHoverEnter(self, ownerHoverEnter): <NEW_LINE> <INDENT> self._ownerHoverEnter = ownerHoverEnter <NEW_LINE> <DEDENT> def connectOwnerHoverExit(self, ownerHoverExit): <NEW_LINE> <INDENT> self._ownerHoverExit = ownerHoverExit <NEW_LINE> <DEDENT> def connectToggleHighlight(self, ownerTH): <NEW_LINE> <INDENT> self._toggleHighlight = ownerTH <NEW_LINE> <DEDENT> def connectToolTip(self, ownerToolTip): <NEW_LINE> <INDENT> self._ownerToolTip = ownerToolTip
|
docstring for connectedBox
|
6259902263f4b57ef00864d6
|
class Teacher(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User) <NEW_LINE> school = models.ForeignKey(School) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.first_name + " " + self.last_name
|
Override the user class
|
62599022ac7a0e7691f733b2
|
class devices: <NEW_LINE> <INDENT> def __init__(self, debug): <NEW_LINE> <INDENT> self.devices = {} <NEW_LINE> self.doi = None <NEW_LINE> self.secondary = None <NEW_LINE> self.debug = debug <NEW_LINE> <DEDENT> def register(self, info): <NEW_LINE> <INDENT> if "serial" in info and info["serial"].lower() not in self.devices: <NEW_LINE> <INDENT> aio.create_task(self.set_device(info)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not self.devices[info["serial"].lower()].alive: <NEW_LINE> <INDENT> aio.create_task(self.set_device(info)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def unregister(self, mac): <NEW_LINE> <INDENT> if mac.lower() in self.devices: <NEW_LINE> <INDENT> logging.debug("%s is gone" % self.devices[mac.lower()].name) <NEW_LINE> self.devices[mac.lower()].stop() <NEW_LINE> del self.devices[mac.lower()] <NEW_LINE> <DEDENT> <DEDENT> async def set_device(self, info): <NEW_LINE> <INDENT> logging.debug(f"Adding {info}") <NEW_LINE> try: <NEW_LINE> <INDENT> newdev = await avr.avr_factory(info["name"], info["ip"]) <NEW_LINE> if newdev: <NEW_LINE> <INDENT> self.devices[info["serial"].lower()] = newdev <NEW_LINE> if self.debug: <NEW_LINE> <INDENT> self.devices[info["serial"].lower()].notifyme(notification) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> logging.warning(f"Could not connect to {info['ip']}.") <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> for dev in self.devices.values(): <NEW_LINE> <INDENT> dev.close()
|
A simple class with a register and unregister methods
|
625990226fece00bbaccc880
|
class ReturnView(DetailView): <NEW_LINE> <INDENT> model = Payment <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return self.get(request, *args, **kwargs) <NEW_LINE> <DEDENT> def render_to_response(self, context, **response_kwargs): <NEW_LINE> <INDENT> if self.request.POST['status'] == 'OK': <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('getpaid-success-fallback', kwargs={'pk': self.object.pk})) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('getpaid-failure-fallback', kwargs={'pk': self.object.pk}))
|
This view just redirects to standard backend success or failure link.
|
62599022a8ecb033258720e8
|
class CustomEmoticonsModule(Module): <NEW_LINE> <INDENT> @Module.event(['message']) <NEW_LINE> async def on_message(self, message: discord.Message): <NEW_LINE> <INDENT> if message.author.id == self.client.connection.user.id: <NEW_LINE> <INDENT> word, image = self.parse_message(message.content) <NEW_LINE> if word and image: <NEW_LINE> <INDENT> new_content = message.content.replace(word, '').strip() <NEW_LINE> if new_content: <NEW_LINE> <INDENT> await self.client.edit_message(message, new_content) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await self.client.delete_message(message) <NEW_LINE> <DEDENT> await self.client.send_file(message.channel, image) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def parse_message(self, message: str) -> Tuple[str, str]: <NEW_LINE> <INDENT> for word in message.split(): <NEW_LINE> <INDENT> if word.startswith(_EMOTICON_PREFIX): <NEW_LINE> <INDENT> image = self.find_emoticon(word[1:]) <NEW_LINE> if image is not None: <NEW_LINE> <INDENT> return word, image <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None, None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def find_emoticon(name: str) -> str: <NEW_LINE> <INDENT> for ext in _IMG_EXTENSIONS: <NEW_LINE> <INDENT> filename = name + ext <NEW_LINE> path = os.path.join(_IMG_DIR, *filename.split('/')) <NEW_LINE> if os.path.isfile(path) and _IMG_DIR in os.path.realpath(path): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> <DEDENT> return None
|
Automatically replaces messages with custom images if emoticon names are found in the text.
Images are searched for in the ``/custom-emoticons`` directory relative to the directory
of the ``__main__`` file. If a word replaceable with an emoticon is found, the message is
deleted and replaced with a new message that includes the emoticon image.
|
625990225166f23b2e24429d
|
class StupidAi(GuessingGameAi): <NEW_LINE> <INDENT> def __init__(self, game): <NEW_LINE> <INDENT> super().__init__(game) <NEW_LINE> <DEDENT> def generate_guess(self) -> int: <NEW_LINE> <INDENT> guess = random.randrange(*self.game.number_range()) <NEW_LINE> return guess <NEW_LINE> <DEDENT> def receive_hint(self, hint: Hint): <NEW_LINE> <INDENT> pass
|
StupidAi is a kind of GuessingGameAi.
Each time it makes guesses randomly without remembering previous guesses.
It also ignores the hints.
|
62599022d18da76e235b78b2
|
class CastsDeleteTestCase(BaseTestGenerator): <NEW_LINE> <INDENT> scenarios = [ ('Check Cast Node', dict(url='/browser/cast/obj/')) ] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.default_db = self.server["db"] <NEW_LINE> self.database_info = parent_node_dict['database'][-1] <NEW_LINE> self.db_name = self.database_info['db_name'] <NEW_LINE> self.server["db"] = self.db_name <NEW_LINE> self.source_type = 'money' <NEW_LINE> self.target_type = 'bigint' <NEW_LINE> self.cast_id = cast_utils.create_cast(self.server, self.source_type, self.target_type) <NEW_LINE> <DEDENT> def runTest(self): <NEW_LINE> <INDENT> self.server_id = self.database_info["server_id"] <NEW_LINE> self.db_id = self.database_info['db_id'] <NEW_LINE> db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) <NEW_LINE> if not db_con["info"] == "Database connected.": <NEW_LINE> <INDENT> raise Exception("Could not connect to database.") <NEW_LINE> <DEDENT> connection = utils.get_db_connection(self.server['db'], self.server['username'], self.server['db_password'], self.server['host'], self.server['port'], self.server['sslmode']) <NEW_LINE> response = cast_utils.verify_cast(connection, self.source_type, self.target_type) <NEW_LINE> if len(response) == 0: <NEW_LINE> <INDENT> raise Exception("Could not find cast.") <NEW_LINE> <DEDENT> delete_response = self.tester.delete( self.url + str(utils.SERVER_GROUP) + '/' + str(self.server_id) + '/' + str(self.db_id) + '/' + str(self.cast_id), follow_redirects=True) <NEW_LINE> self.assertEquals(delete_response.status_code, 200) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> database_utils.disconnect_database(self, self.server_id, self.db_id) <NEW_LINE> self.server['db'] = self.default_db
|
This class will delete the cast node added under database node.
|
625990221d351010ab8f49e0
|
class FeedNewSampleSeries(FeedEntry): <NEW_LINE> <INDENT> sample_series = models.ForeignKey(SampleSeries, models.CASCADE, verbose_name=_("sample series")) <NEW_LINE> topic = models.ForeignKey(Topic, models.CASCADE, verbose_name=_("topic")) <NEW_LINE> subscribers = models.ManyToManyField(django.contrib.auth.models.User, verbose_name=_("subscribers"), blank=True) <NEW_LINE> class Meta(PolymorphicModel.Meta): <NEW_LINE> <INDENT> verbose_name = _("new sample series feed entry") <NEW_LINE> verbose_name_plural = _("new sample series feed entries") <NEW_LINE> <DEDENT> def get_metadata(self): <NEW_LINE> <INDENT> metadata = {} <NEW_LINE> metadata["title"] = _("New sample series “{sample_series}” in topic “{topic}”").format( sample_series=self.sample_series, topic=self.topic) <NEW_LINE> metadata["category term"] = "new sample series" <NEW_LINE> metadata["category label"] = "new sample series" <NEW_LINE> metadata["link"] = self.sample_series.get_absolute_url() <NEW_LINE> return metadata <NEW_LINE> <DEDENT> def get_additional_template_context(self, user): <NEW_LINE> <INDENT> return {"subscribed": self.subscribers.filter(pk=user.pk).exists()}
|
Model for feed entries for new sample series.
|
62599022462c4b4f79dbc8d5
|
class AdalineGD(neuron.Neuron): <NEW_LINE> <INDENT> def fit(self, X, y): <NEW_LINE> <INDENT> self.fitted_weights_ = numpy.zeros(1 + X.shape[1]) <NEW_LINE> self.cost_ = [] <NEW_LINE> for i in range(self.epochs): <NEW_LINE> <INDENT> net_input = self.net_input(X) <NEW_LINE> output = self.activation(X) <NEW_LINE> errors = (y - output) <NEW_LINE> self.fitted_weights_[1:] += self.learn_rate * X.T.dot(errors) <NEW_LINE> self.fitted_weights_[0] += self.learn_rate * errors.sum() <NEW_LINE> cost = (errors**2).sum() / 2.0 <NEW_LINE> self.cost_.append(cost) <NEW_LINE> <DEDENT> return self
|
Adaline Gradient Descent classifier
|
62599022507cdc57c63a5c70
|
class Authorization(Find, Post): <NEW_LINE> <INDENT> path = "v1/payments/authorization" <NEW_LINE> def capture(self, attributes): <NEW_LINE> <INDENT> return self.post('capture', attributes, Capture) <NEW_LINE> <DEDENT> def void(self): <NEW_LINE> <INDENT> return self.post('void', {}, self) <NEW_LINE> <DEDENT> def reauthorize(self): <NEW_LINE> <INDENT> return self.post('reauthorize', self, self)
|
Enables looking up, voiding and capturing authorization and reauthorize payments
Helpful links::
https://developer.paypal.com/docs/api/#authorizations
https://developer.paypal.com/docs/integration/direct/capture-payment/#authorize-the-payment
Usage::
>>> authorization = Authorization.find("<AUTHORIZATION_ID>")
>>> capture = authorization.capture({ "amount": { "currency": "USD", "total": "1.00" } })
>>> authorization.void() # return True or False
|
62599022796e427e5384f649
|
class ProjectBallotListView(ProjectMixin, PaginationMixin, DetailView): <NEW_LINE> <INDENT> context_object_name = 'project' <NEW_LINE> template_name = 'project/ballot-list.html' <NEW_LINE> paginate_by = 1000 <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super( ProjectBallotListView, self).get_context_data(**kwargs) <NEW_LINE> committees = Committee.objects.filter(project=self.object) <NEW_LINE> ballots = [] <NEW_LINE> for committee in committees: <NEW_LINE> <INDENT> if self.request.user.is_authenticated and self.request.user in committee.users.all(): <NEW_LINE> <INDENT> committee_ballots = Ballot.objects.filter( committee=committee) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> committee_ballots = Ballot.objects.filter( committee=committee).filter(private=False) <NEW_LINE> <DEDENT> if committee_ballots: <NEW_LINE> <INDENT> ballots.append(committee_ballots) <NEW_LINE> <DEDENT> <DEDENT> context['ballots_list'] = ballots <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> if self.request.user.is_authenticated: <NEW_LINE> <INDENT> projects_qs = Project.approved_objects.all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> projects_qs = Project.public_objects.all() <NEW_LINE> <DEDENT> return projects_qs
|
List all ballots within in a project.
|
625990225166f23b2e24429f
|
class LagrangianPatchDataAll: <NEW_LINE> <INDENT> def __init__(self,cloudDirName): <NEW_LINE> <INDENT> self.dir=path.abspath(cloudDirName) <NEW_LINE> d=SolutionDirectory(self.dir,paraviewLink=False) <NEW_LINE> self.data={} <NEW_LINE> for t in d: <NEW_LINE> <INDENT> self.data[float(t.baseName())]=LagrangianPatchDataTime(t) <NEW_LINE> <DEDENT> <DEDENT> def pandas(self): <NEW_LINE> <INDENT> import pandas as pd <NEW_LINE> return pd.concat(self.data[n].pandas() for n in self.data)
|
Read the lagrangian patch data for a cloud for a whole cloud (all times)
|
62599022287bf620b6272ab8
|
class CreatePTransformOverride(PTransformOverride): <NEW_LINE> <INDENT> def get_matcher(self): <NEW_LINE> <INDENT> return self.is_streaming_create <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_streaming_create(applied_ptransform): <NEW_LINE> <INDENT> from apache_beam import Create <NEW_LINE> from apache_beam.options.pipeline_options import StandardOptions <NEW_LINE> if isinstance(applied_ptransform.transform, Create): <NEW_LINE> <INDENT> standard_options = (applied_ptransform .outputs[None] .pipeline._options .view_as(StandardOptions)) <NEW_LINE> return standard_options.streaming <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_replacement_transform(self, ptransform): <NEW_LINE> <INDENT> from apache_beam.runners.dataflow.native_io.streaming_create import StreamingCreate <NEW_LINE> coder = typecoders.registry.get_coder(ptransform.get_output_type()) <NEW_LINE> return StreamingCreate(ptransform.value, coder)
|
A ``PTransformOverride`` for ``Create`` in streaming mode.
|
62599022d164cc6175821e42
|
class twinrx_phase_offset_est(gr.hier_block2): <NEW_LINE> <INDENT> def __init__(self, num_ports=2, n_skip_ahead=8192): <NEW_LINE> <INDENT> gr.hier_block2.__init__( self, "TwinRx Phase Offset Estimate", gr.io_signaturev(num_ports, num_ports, gen_sig_io(num_ports,gr.sizeof_gr_complex)), gr.io_signaturev(num_ports-1, num_ports-1, gen_sig_io(num_ports-1,gr.sizeof_float)), ) <NEW_LINE> self.n_skip_ahead = n_skip_ahead <NEW_LINE> self.num_ports = num_ports <NEW_LINE> self.skiphead = [] <NEW_LINE> for p in range(0, num_ports): <NEW_LINE> <INDENT> object_name_skiphead = 'blocks_skiphead_'+str(p) <NEW_LINE> self.skiphead.append(blocks.skiphead(gr.sizeof_gr_complex*1, n_skip_ahead)) <NEW_LINE> self.connect((self, p), (self.skiphead[p], 0)) <NEW_LINE> <DEDENT> self.multiply_conjugate = [] <NEW_LINE> self.complex_to_arg = [] <NEW_LINE> for p in range(0, num_ports-1): <NEW_LINE> <INDENT> self.multiply_conjugate.append(blocks.multiply_conjugate_cc(1)) <NEW_LINE> self.complex_to_arg.append(blocks.complex_to_arg(1)) <NEW_LINE> self.connect((self.skiphead[0], 0), (self.multiply_conjugate[p], 0)) <NEW_LINE> self.connect((self.skiphead[p+1], 0), (self.multiply_conjugate[p], 1)) <NEW_LINE> self.connect((self.multiply_conjugate[p], 0), (self.complex_to_arg[p], 0)) <NEW_LINE> self.connect((self.complex_to_arg[p], 0), (self, p))
|
This block estimates the repeatable phase offset at the output of a USRP X310 equipped with two TwinRXs. The output is a value in [0, 2*pi).
|
62599022bf627c535bcb2381
|
class SawUp(Function): <NEW_LINE> <INDENT> def __init__(self, frequency=1.0, phase0=0.0): <NEW_LINE> <INDENT> Function.__init__(self) <NEW_LINE> self.frequency = frequency <NEW_LINE> self.T = None <NEW_LINE> self.phase0 = phase0 <NEW_LINE> <DEDENT> def __call__(self, t, f=None, phase0=None): <NEW_LINE> <INDENT> if f != None: <NEW_LINE> <INDENT> self.T = 1.0 / f <NEW_LINE> <DEDENT> if self.T == None: <NEW_LINE> <INDENT> self.T = 1.0 / self.frequency <NEW_LINE> <DEDENT> if phase0 != None: <NEW_LINE> <INDENT> self.phase0 = phase0 <NEW_LINE> <DEDENT> t += self.T * self.phase0 <NEW_LINE> t = fmod(t, self.T) <NEW_LINE> if t < 0: t += self.T <NEW_LINE> return t / self.T
|
Saw-up wave translated in the [0,1] range
|
625990226fece00bbaccc884
|
class EasyServer(base.BaseServer): <NEW_LINE> <INDENT> def __init__(self, host, port, max_connection=1024, request_model=HTTPRequest, use_ipv6=False): <NEW_LINE> <INDENT> super().__init__(host, port, max_connection, request_model, use_ipv6) <NEW_LINE> <DEDENT> def process_request(self, request): <NEW_LINE> <INDENT> response_body = {} <NEW_LINE> response_headers = "HTTP/1.1 200 OK\r\nContent-Type: text/json\r\n\r\n" <NEW_LINE> response = response_headers + json.dumps(response_body) <NEW_LINE> return response <NEW_LINE> <DEDENT> def preprocess_request(self, connection, addr, chunk=512, timeout=0.5): <NEW_LINE> <INDENT> content = b"" <NEW_LINE> connection.settimeout(timeout) <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> temp = connection.recv(chunk) <NEW_LINE> if temp: <NEW_LINE> <INDENT> content += temp <NEW_LINE> <DEDENT> <DEDENT> except socket.timeout: <NEW_LINE> <INDENT> if content.endswith(b"\r\n\r\n"): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not content: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.request_model(content, addr)
|
一个简单的http服务器
|
62599022507cdc57c63a5c72
|
class Table(CallableLuaObject, LuaNamespace): <NEW_LINE> <INDENT> def __init__(self, ref_or_iterable=None): <NEW_LINE> <INDENT> if ref_or_iterable is None: <NEW_LINE> <INDENT> self._init_empty_table() <NEW_LINE> <DEDENT> elif isinstance(ref_or_iterable, int): <NEW_LINE> <INDENT> CallableLuaObject.__init__(self, ref_or_iterable) <NEW_LINE> <DEDENT> elif isinstance(ref_or_iterable, Mapping): <NEW_LINE> <INDENT> self._init_empty_table() <NEW_LINE> for key, value in ref_or_iterable.items(): <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(ref_or_iterable, Iterable): <NEW_LINE> <INDENT> self._init_empty_table() <NEW_LINE> for value in ref_or_iterable: <NEW_LINE> <INDENT> self += value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('unknown constructor argument type: ' f'{type(ref_or_iterable).__name__}') <NEW_LINE> <DEDENT> self._ = MethodCallNamespace(self) <NEW_LINE> <DEDENT> def _init_empty_table(self): <NEW_LINE> <INDENT> _luastack.create_table() <NEW_LINE> super().__init__(_luastack.reference_create()) <NEW_LINE> <DEDENT> def _push_namespace_object(self): <NEW_LINE> <INDENT> _luastack.reference_push(self._ref) <NEW_LINE> <DEDENT> def __iadd__(self, value): <NEW_LINE> <INDENT> G.table.insert(self, value) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> if G.getmetatable(self)["__call"] is None: <NEW_LINE> <INDENT> raise ValueError("this table's metatable " "does not have __call method") <NEW_LINE> <DEDENT> super().__call__(*args) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return eval_lua("function(tbl) return #tbl end")(self) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return TableKeyIterator(self) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return iter(self) <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return TableValueIterator(self) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return TableItemIterator(self) <NEW_LINE> <DEDENT> def __dict__(self): <NEW_LINE> <INDENT> return dict(self.items())
|
Class for representing Lua tables.
:class:`Table` has a multifunctional constructor::
# Creating an empty table
t = Table()
# Converting a dict to a table
d = {'a': 1, 'b': 2}
t = Table(d)
# Converting an iterable to a table
i = [1, 2, 3]
t = Table(i)
.. note::
Members whose names start with ``_``
should be accessed with the subscription syntax::
# Won't work
Table({"_foo": 1})._foo
# Will work
Table({"_foo": 1})['_foo']
|
625990225166f23b2e2442a1
|
class PuzzleNode: <NEW_LINE> <INDENT> def __init__(self, puzzle=None, children=None, parent=None): <NEW_LINE> <INDENT> self.puzzle, self.parent = puzzle, parent <NEW_LINE> if children is None: <NEW_LINE> <INDENT> self.children = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.children = children[:] <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (type(self) == type(other) and self.puzzle == other.puzzle and all([x in self.children for x in other.children]) and all([x in other.children for x in self.children])) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}\n\n{}".format(self.puzzle, "\n".join([str(x) for x in self.children]))
|
A Puzzle configuration that refers to other configurations that it
can be extended to.
|
62599022be8e80087fbbff44
|
class DuplicateContent(Exception): <NEW_LINE> <INDENT> def __init__(self, message, pids=[], pid_cmodels={}): <NEW_LINE> <INDENT> Exception.__init__(self, message) <NEW_LINE> self.pids = pids <NEW_LINE> self.pid_cmodels = pid_cmodels
|
Custom exception to prevent ingest when duplicate content is
detected. The optional list of pids should be specified when possible,
to allow investigating the objects detected as duplicates; a pid to
content model mapping should also provided if possible, to allow
exception handling to detect the object type of the duplicate records.
|
62599022d164cc6175821e44
|
class RoleClass(CAEXObject): <NEW_LINE> <INDENT> refBaseClassPath = EAttribute(eType=EString) <NEW_LINE> attribute = EReference(upper=-1, containment=True) <NEW_LINE> externalInterface = EReference(upper=-1, containment=True) <NEW_LINE> baseClass = EReference() <NEW_LINE> roleClass = EReference(upper=-1, containment=True) <NEW_LINE> def __init__(self, attribute=None, externalInterface=None, refBaseClassPath=None, baseClass=None, roleClass=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> if refBaseClassPath is not None: <NEW_LINE> <INDENT> self.refBaseClassPath = refBaseClassPath <NEW_LINE> <DEDENT> if attribute: <NEW_LINE> <INDENT> self.attribute.extend(attribute) <NEW_LINE> <DEDENT> if externalInterface: <NEW_LINE> <INDENT> self.externalInterface.extend(externalInterface) <NEW_LINE> <DEDENT> if baseClass is not None: <NEW_LINE> <INDENT> self.baseClass = baseClass <NEW_LINE> <DEDENT> if roleClass: <NEW_LINE> <INDENT> self.roleClass.extend(roleClass)
|
Shall be used for RoleClass definition, provides base structures for a role class definition.
|
6259902230c21e258be996e4
|
class Solution: <NEW_LINE> <INDENT> def sortList(self, head): <NEW_LINE> <INDENT> if head is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if head.next is None: <NEW_LINE> <INDENT> return head <NEW_LINE> <DEDENT> mid = self.findMid(head) <NEW_LINE> rightStart = mid.next <NEW_LINE> mid.next = None <NEW_LINE> left = self.sortList(head) <NEW_LINE> right = self.sortList(rightStart) <NEW_LINE> return self.merge(left, right) <NEW_LINE> <DEDENT> def findMid(self, head): <NEW_LINE> <INDENT> if head is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> fast, slow = head.next, head <NEW_LINE> while fast is not None and fast.next is not None: <NEW_LINE> <INDENT> fast = fast.next.next <NEW_LINE> slow = slow.next <NEW_LINE> <DEDENT> return slow <NEW_LINE> <DEDENT> def merge(self, list1, list2): <NEW_LINE> <INDENT> dummy = ListNode(None) <NEW_LINE> curt = dummy <NEW_LINE> while list1 is not None and list2 is not None: <NEW_LINE> <INDENT> if list1.val >= list2.val: <NEW_LINE> <INDENT> node = list2 <NEW_LINE> list2 = list2.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = list1 <NEW_LINE> list1 = list1.next <NEW_LINE> <DEDENT> curt.next = node <NEW_LINE> curt = curt.next <NEW_LINE> <DEDENT> if list1 is not None: <NEW_LINE> <INDENT> curt.next = list1 <NEW_LINE> <DEDENT> if list2 is not None: <NEW_LINE> <INDENT> curt.next = list2 <NEW_LINE> <DEDENT> return dummy.next
|
@param head: The first node of the linked list.
@return: You should return the head of the sorted linked list,
using constant space complexity.
|
625990225e10d32532ce406c
|
class ImageClip(VideoClip): <NEW_LINE> <INDENT> def __init__(self, source, size): <NEW_LINE> <INDENT> super().__init__(source, VideoClipMetadata(size = size, frameCount = 1, fps = 30), isConstant = True) <NEW_LINE> self._image = None <NEW_LINE> <DEDENT> @memoizeHash <NEW_LINE> def __hash__(self): <NEW_LINE> <INDENT> return hash((self._source, self._metadata)) <NEW_LINE> <DEDENT> def _pseudoeq(self, other): <NEW_LINE> <INDENT> return type(self) == type(other) and self._metadata == other._metadata <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self._pseudoeq(other) and self._source == other._source <NEW_LINE> <DEDENT> def _framegen(self, n): <NEW_LINE> <INDENT> return self._imagegen() <NEW_LINE> <DEDENT> def _imagegen(self): <NEW_LINE> <INDENT> raise NotImplementedError()
|
ImageClip(source, size)
Represents a single image, e.g. a loaded png or some rendered text.
|
6259902291af0d3eaad3acf6
|
class TestERFATestCases: <NEW_LINE> <INDENT> def setup_class(cls): <NEW_LINE> <INDENT> cls.time_ut1 = Time(2400000.5, 53736.0, scale='ut1', format='jd') <NEW_LINE> cls.time_tt = Time(2400000.5, 53736.0, scale='tt', format='jd') <NEW_LINE> cls.time_ut1.delta_ut1_utc = 0. <NEW_LINE> cls.time_ut1.delta_ut1_utc = 24 * 3600 * ( (cls.time_ut1.tt.jd1 - cls.time_tt.jd1) + (cls.time_ut1.tt.jd2 - cls.time_tt.jd2)) <NEW_LINE> <DEDENT> def test_setup(self): <NEW_LINE> <INDENT> assert np.allclose((self.time_ut1.tt.jd1 - self.time_tt.jd1) + (self.time_ut1.tt.jd2 - self.time_tt.jd2), 0., atol=1.e-14) <NEW_LINE> <DEDENT> @pytest.mark.parametrize('erfa_test_input', ((1.754174972210740592, 1e-12, "eraGmst00"), (1.754174971870091203, 1e-12, "eraGmst06"), (1.754174981860675096, 1e-12, "eraGmst82"), (1.754166138018281369, 1e-12, "eraGst00a"), (1.754166136510680589, 1e-12, "eraGst00b"), (1.754166137675019159, 1e-12, "eraGst06a"), (1.754166136020645203, 1e-12, "eraGst94"))) <NEW_LINE> def test_iau_models(self, erfa_test_input): <NEW_LINE> <INDENT> result, precision, name = erfa_test_input <NEW_LINE> if name[4] == 'm': <NEW_LINE> <INDENT> kind = 'mean' <NEW_LINE> model_name = f"IAU{20 if name[7] == '0' else 19:2d}{name[7:]:s}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kind = 'apparent' <NEW_LINE> model_name = f"IAU{20 if name[6] == '0' else 19:2d}{name[6:].upper():s}" <NEW_LINE> <DEDENT> assert kind in SIDEREAL_TIME_MODELS.keys() <NEW_LINE> assert model_name in SIDEREAL_TIME_MODELS[kind] <NEW_LINE> gst = self.time_ut1.sidereal_time(kind, 'greenwich', model_name) <NEW_LINE> assert np.allclose(gst.to_value('radian'), result, rtol=1., atol=precision) <NEW_LINE> <DEDENT> def test_era(self): <NEW_LINE> <INDENT> time_ut1 = Time(2400000.5, 54388.0, format='jd', scale='ut1') <NEW_LINE> era = time_ut1.earth_rotation_angle('tio') <NEW_LINE> expected = 0.4022837240028158102 <NEW_LINE> assert np.abs(era.to_value(u.radian) - expected) < 1e-12
|
Test that we reproduce the test cases given in erfa/src/t_erfa_c.c
|
625990226e29344779b01520
|
class OpenSecureChannelRequest(FrozenClass): <NEW_LINE> <INDENT> def __init__(self, binary=None): <NEW_LINE> <INDENT> if binary is not None: <NEW_LINE> <INDENT> self._binary_init(binary) <NEW_LINE> self._freeze = True <NEW_LINE> return <NEW_LINE> <DEDENT> self.TypeId = FourByteNodeId(ObjectIds.OpenSecureChannelRequest_Encoding_DefaultBinary) <NEW_LINE> self.RequestHeader = RequestHeader() <NEW_LINE> self.Parameters = OpenSecureChannelParameters() <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> packet = [] <NEW_LINE> packet.append(self.TypeId.to_binary()) <NEW_LINE> packet.append(self.RequestHeader.to_binary()) <NEW_LINE> packet.append(self.Parameters.to_binary()) <NEW_LINE> return b''.join(packet) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_binary(data): <NEW_LINE> <INDENT> return OpenSecureChannelRequest(data) <NEW_LINE> <DEDENT> def _binary_init(self, data): <NEW_LINE> <INDENT> self.TypeId = NodeId.from_binary(data) <NEW_LINE> self.RequestHeader = RequestHeader.from_binary(data) <NEW_LINE> self.Parameters = OpenSecureChannelParameters.from_binary(data) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'OpenSecureChannelRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + 'RequestHeader:' + str(self.RequestHeader) + ', ' + 'Parameters:' + str(self.Parameters) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__
|
Creates a secure channel with a server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: OpenSecureChannelParameters
|
6259902221bff66bcd723b32
|
class SetViewSet(WgerOwnerObjectModelViewSet): <NEW_LINE> <INDENT> serializer_class = SetSerializer <NEW_LINE> is_private = True <NEW_LINE> ordering_fields = '__all__' <NEW_LINE> filterset_fields = ( 'exerciseday', 'order', 'sets', ) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Set.objects.filter(exerciseday__training__user=self.request.user) <NEW_LINE> <DEDENT> def get_owner_objects(self): <NEW_LINE> <INDENT> return [(Day, 'exerciseday')] <NEW_LINE> <DEDENT> @action(detail=True) <NEW_LINE> def computed_settings(self, request, pk): <NEW_LINE> <INDENT> out = SettingSerializer(self.get_object().compute_settings, many=True).data <NEW_LINE> return Response({'results': out}) <NEW_LINE> <DEDENT> @action(detail=True) <NEW_LINE> def smart_text(self, request, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> exercise = get_object_or_404(Exercise, pk=int(self.request.GET.get('exercise'))) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return HttpResponseNotFound() <NEW_LINE> <DEDENT> return Response({'results': self.get_object().reps_smart_text(exercise=exercise)})
|
API endpoint for workout set objects
|
6259902230c21e258be996e6
|
class Entry(Model): <NEW_LINE> <INDENT> __tablename__ = "entry" <NEW_LINE> id = Column('id', Integer, primary_key=True) <NEW_LINE> feed_id = Column(Integer, ForeignKey("feed.id")) <NEW_LINE> published = Column(Integer) <NEW_LINE> updated = Column(Integer) <NEW_LINE> title = Column(String(1024)) <NEW_LINE> content = Column(Text) <NEW_LINE> description = Column(String(256)) <NEW_LINE> link = Column(String(1024)) <NEW_LINE> remote_id = Column(String(1024)) <NEW_LINE> def __init__(self, feed_id=None, published=None, updated=None, title=None, content=None, description=None, link=None, remote_id=None,): <NEW_LINE> <INDENT> self.feed_id = feed_id <NEW_LINE> self.published = published <NEW_LINE> self.updated = updated <NEW_LINE> self.title = title <NEW_LINE> self.content = content <NEW_LINE> self.description = description <NEW_LINE> self.link = link <NEW_LINE> self.remote_id = remote_id
|
The basic model for all entries.
|
625990229b70327d1c57fc51
|
class MutationMeta(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> if 'mutate' not in attrs: <NEW_LINE> <INDENT> raise AttributeError('mutation operator class must have mutate method') <NEW_LINE> <DEDENT> if 'pm' in attrs and (attrs['pm'] <= 0.0 or attrs['pm'] > 1.0): <NEW_LINE> <INDENT> raise ValueError('Invalid mutation probability') <NEW_LINE> <DEDENT> mutate = attrs['mutate'] <NEW_LINE> sig = inspect.signature(mutate) <NEW_LINE> if 'individual' not in sig.parameters: <NEW_LINE> <INDENT> raise NameError('mutate method must have individual parameter') <NEW_LINE> <DEDENT> @wraps(mutate) <NEW_LINE> def _wrapped_mutate(self, individual, engine): <NEW_LINE> <INDENT> if not isinstance(individual, IndividualBase): <NEW_LINE> <INDENT> raise TypeError('individual\' type must be subclass of IndividualBase') <NEW_LINE> <DEDENT> return mutate(self, individual, engine) <NEW_LINE> <DEDENT> attrs['mutate'] = _wrapped_mutate <NEW_LINE> logger_name = 'gaft.{}'.format(name) <NEW_LINE> attrs['logger'] = logging.getLogger(logger_name) <NEW_LINE> return type.__new__(cls, name, bases, attrs)
|
Metaclass for mutation operator class.
|
62599022925a0f43d25e8f16
|
class StringIDProvider(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.wordset = WordSet() <NEW_LINE> self.assoc = {} <NEW_LINE> <DEDENT> def _next(self, obj): <NEW_LINE> <INDENT> return self._escape(self.wordset.fresh(True, base = str(obj))) <NEW_LINE> <DEDENT> def _escape(self, string): <NEW_LINE> <INDENT> res = "" <NEW_LINE> d = { '.' : 'A', ' ' : 'B', '(' : 'C', ')' : 'D', ',' : 'E', '=' : 'G', '#' : 'H', '\'' : 'I' } <NEW_LINE> for c in string: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> c = d[c] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> res += c <NEW_LINE> <DEDENT> <DEDENT> return res <NEW_LINE> <DEDENT> def to_string(self, obj): <NEW_LINE> <INDENT> return "\"" + self.get(obj) + "\"" <NEW_LINE> <DEDENT> def get(self, obj): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.assoc[obj] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> new = self._next(obj) <NEW_LINE> self.assoc[obj] = new <NEW_LINE> return new
|
simple class that provides unique identifiers for objects.
|
625990228c3a8732951f7428
|
class RoleConfig: <NEW_LINE> <INDENT> def __init__(self, name, namespace, api_groups, resources, verbs): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._namespace = namespace <NEW_LINE> self._api_groups = api_groups.split(',') <NEW_LINE> self._resources = resources.split(',') <NEW_LINE> self._verbs = verbs.split(',') <NEW_LINE> <DEDENT> def build(self): <NEW_LINE> <INDENT> return { 'kind': 'Role', 'metadata': {'name': self._name, 'namespace': self._namespace}, 'rules': [{ 'apiGroups': self._api_groups, 'resources': self._resources, 'verbs': self._verbs, }] }
|
Configuration builder for Kubernetes RBAC roles
|
625990226fece00bbaccc88a
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.