code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class DrawPanel(wx.Panel): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> wx.Panel.__init__(self, *args, **kwargs) <NEW_LINE> self.data=[1,2,2,3,3,3,4,4,5] <NEW_LINE> self.classes=5 <NEW_LINE> self.histogram=histogram(self.data,self.classes) <NEW_LINE> self.width=100 <NEW_LINE> self.classwidth=int(float(self.width)/self.classes) <NEW_LINE> self.minslider = wx.Slider(self, 100, 25, 0, self.classes, pos=(0, 100), size=(self.width+20, 20), style=wx.SL_HORIZONTAL | wx.SL_AUTOTICKS | wx.SL_TOP) <NEW_LINE> self.minslider.SetTickFreq(1, self.classes) <NEW_LINE> self.minslider.SetValue(0) <NEW_LINE> self.minOldValue=0 <NEW_LINE> self.maxslider = wx.Slider(self, 100, 25, 0, self.classes, pos=(0, 120), size=(self.width+20, 20), style=wx.SL_HORIZONTAL | wx.SL_AUTOTICKS | wx.SL_TOP) <NEW_LINE> self.maxslider.SetTickFreq(1, self.classes) <NEW_LINE> self.maxslider.SetValue(5) <NEW_LINE> self.maxOldValue=5 <NEW_LINE> self.Bind(wx.EVT_PAINT, self.OnPaint) <NEW_LINE> self.Bind(wx.EVT_SLIDER, self.OnSlide) <NEW_LINE> <DEDENT> def OnPaint(self, event=None): <NEW_LINE> <INDENT> self.dc = wx.PaintDC(self) <NEW_LINE> self.dc.SetBackground(wx.Brush("Grey")) <NEW_LINE> self.dc.Clear() <NEW_LINE> self.dc.SetBrush(wx.Brush("Black")) <NEW_LINE> self.dc.SetPen(wx.TRANSPARENT_PEN) <NEW_LINE> for col,value in enumerate(self.histogram): <NEW_LINE> <INDENT> self.dc.DrawRectangle(10+(self.classwidth*col), 100, self.classwidth, value*-10) <NEW_LINE> <DEDENT> <DEDENT> def OnSlide(self, event=None): <NEW_LINE> <INDENT> if self.minOldValue>self.minslider.GetValue(): <NEW_LINE> <INDENT> self.dc.SetBrush(wx.Brush("Black")) <NEW_LINE> for col in range(self.minslider.GetValue(),self.minOldValue): <NEW_LINE> <INDENT> self.dc.DrawRectangle(10+(self.classwidth*col), 100, self.classwidth/2, self.histogram[col]*-10) <NEW_LINE> <DEDENT> <DEDENT> elif self.minOldValue<self.minslider.GetValue(): <NEW_LINE> <INDENT> self.dc.SetBrush(wx.Brush("Blue")) <NEW_LINE> for col in range(self.minOldValue,self.minslider.GetValue()): <NEW_LINE> <INDENT> self.dc.DrawRectangle(10+(self.classwidth*col), 100, self.classwidth/2, self.histogram[col]*-10) <NEW_LINE> <DEDENT> <DEDENT> if self.maxOldValue>self.maxslider.GetValue(): <NEW_LINE> <INDENT> self.dc.SetBrush(wx.Brush("Blue")) <NEW_LINE> for col in range(self.maxslider.GetValue(),self.maxOldValue): <NEW_LINE> <INDENT> self.dc.DrawRectangle(20+(self.classwidth*col), 100, self.classwidth/2, self.histogram[col]*-10) <NEW_LINE> <DEDENT> <DEDENT> elif self.maxOldValue<self.maxslider.GetValue(): <NEW_LINE> <INDENT> self.dc.SetBrush(wx.Brush("Black")) <NEW_LINE> for col in range(self.maxOldValue,self.maxslider.GetValue()): <NEW_LINE> <INDENT> self.dc.DrawRectangle(20+(self.classwidth*col), 100, self.classwidth/2, self.histogram[col]*-10) <NEW_LINE> <DEDENT> <DEDENT> self.maxOldValue=self.maxslider.GetValue() <NEW_LINE> self.minOldValue=self.minslider.GetValue()
|
Draw a line to a panel.
|
6259902526238365f5fada71
|
class Handler(object): <NEW_LINE> <INDENT> def __init__(self, iterationInterval=np.inf): <NEW_LINE> <INDENT> self.iterationInterval = iterationInterval <NEW_LINE> self.lastIterationDivisor = -1 <NEW_LINE> <DEDENT> def execute(self, data, iterationNumber): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def reset_for_next_timestep(self): <NEW_LINE> <INDENT> pass
|
Abstract superclass for Handlers.
iterationInterval is the interval for executing the Handler's task. If iterationalInterval=50,
then the Handler will execute every 50 iterations (on iteration 50, 100, 150, etc.)
lastIterationDivisor tracks when the Handler should execute. The Executor computes
iterationNumber // iterationInterval (note the integer division). When iterationNumber is
large enough such that the result exceeds lastIterationDivisor, then it executes the Handler's
task and increments lastIterationDivisor.
|
62599025a4f1c619b294f514
|
class InitiatorRequestorGrpEp(ManagedObject): <NEW_LINE> <INDENT> consts = InitiatorRequestorGrpEpConsts() <NEW_LINE> naming_props = set([u'id']) <NEW_LINE> mo_meta = MoMeta("InitiatorRequestorGrpEp", "initiatorRequestorGrpEp", "req-grp-[id]", VersionMeta.Version131a, "InputOutput", 0x1f, [], ["read-only"], [u'topSystem'], [u'initiatorMemberEp', u'initiatorUnitEp'], [None]) <NEW_LINE> prop_meta = { "alloc_state": MoPropertyMeta("alloc_state", "allocState", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["allocated", "allocating", "failed", "none"], []), "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version131a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []), "ep_dn": MoPropertyMeta("ep_dn", "epDn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []), "id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version131a, MoPropertyMeta.NAMING, 0x4, None, None, None, [], []), "lc": MoPropertyMeta("lc", "lc", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["allocated", "available", "deallocated", "repurposed"], []), "pol_dn": MoPropertyMeta("pol_dn", "polDn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version131a, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "type": MoPropertyMeta("type", "type", "string", VersionMeta.Version131a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["dedicated", "policy", "shared"], []), } <NEW_LINE> prop_map = { "allocState": "alloc_state", "childAction": "child_action", "dn": "dn", "epDn": "ep_dn", "id": "id", "lc": "lc", "polDn": "pol_dn", "rn": "rn", "status": "status", "type": "type", } <NEW_LINE> def __init__(self, parent_mo_or_dn, id, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.id = id <NEW_LINE> self.alloc_state = None <NEW_LINE> self.child_action = None <NEW_LINE> self.ep_dn = None <NEW_LINE> self.lc = None <NEW_LINE> self.pol_dn = None <NEW_LINE> self.status = None <NEW_LINE> self.type = None <NEW_LINE> ManagedObject.__init__(self, "InitiatorRequestorGrpEp", parent_mo_or_dn, **kwargs)
|
This is InitiatorRequestorGrpEp class.
|
6259902556b00c62f0fb37e0
|
class GCSCredentialsProvider(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> bucket_name = os.environ.get('BUCKET_NAME', f'{get_project_id()}.appspot.com') <NEW_LINE> self.bucket = self.client.get_bucket(bucket_name) <NEW_LINE> self.client_key = self._read_gcs_file('client_key') <NEW_LINE> self.client_secret = self._read_gcs_file('client_secret') <NEW_LINE> self.resource_owner_key = self._read_gcs_file('resource_owner_key') <NEW_LINE> self.resource_owner_secret = self._read_gcs_file('resource_owner_secret') <NEW_LINE> <DEDENT> def _read_gcs_file(self, filename): <NEW_LINE> <INDENT> filepath = os.path.join('twitter', filename) <NEW_LINE> blob = self.bucket.get_blob(filepath) <NEW_LINE> contents = blob.download_as_string().decode('utf-8').strip() <NEW_LINE> return contents
|
Reads Twitter credentials from Google Cloud Storage.
|
62599025d99f1b3c44d065c3
|
class MyStepper(object): <NEW_LINE> <INDENT> def __init__(self, stepsize=0.5): <NEW_LINE> <INDENT> self.stepsize = stepsize <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> s = self.stepsize <NEW_LINE> scale_om = 10.0 <NEW_LINE> scale_Am = 100.0 <NEW_LINE> scale_Cm = 100.0 <NEW_LINE> scale_tm = 10.0 <NEW_LINE> inc = scale_om * np.random.uniform(-s, s) <NEW_LINE> if (x[0] + inc <= 0): <NEW_LINE> <INDENT> x[0] += abs(inc) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x[0] += inc <NEW_LINE> <DEDENT> x[1] += np.exp(-abs(x[1])) * np.random.uniform(-s, s) <NEW_LINE> x[2] += scale_Am * np.random.uniform(-s, s) <NEW_LINE> x[3] += scale_Cm * np.random.uniform(-s, s) <NEW_LINE> inc = scale_tm * np.random.uniform(-s, s) <NEW_LINE> if (x[4] + inc <= 0): <NEW_LINE> <INDENT> x[4] += abs(inc) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x[4] += inc <NEW_LINE> <DEDENT> return x
|
Implements some simple reasonable modifications to parameter stepping
* Frequencies are kept positive
* Powers are exponentially suppressed from getting 'large'
*
|
62599025d164cc6175821e98
|
class StatsSitemapCommand(SubcommandsCommand): <NEW_LINE> <INDENT> args = 'sitemap-uri' <NEW_LINE> help = 'Get sitemap stats. Sitemap URI is either a relative URL or a valid urlconf name' <NEW_LINE> def handle(self, *args, **options): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sitemap = args[0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise CommandError("Not enough arguments. Sitemap URI must be provided") <NEW_LINE> <DEDENT> status = api.sitemaps.sitemap_stats(sitemap) <NEW_LINE> if status.error: <NEW_LINE> <INDENT> raise CommandError(unicode(status)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stdout.write(unicode(status))
|
Wrapper for ``google_webmastertools.api.sitemaps.sitemap_stats``
|
62599025d18da76e235b78de
|
@dataclass <NEW_LINE> class Profile(BaseModel): <NEW_LINE> <INDENT> real_name: str <NEW_LINE> display_name: str <NEW_LINE> real_name_normalized: str <NEW_LINE> display_name_normalized: str <NEW_LINE> team: str
|
Example Profile
{
"avatar_hash": "ge3b51ca72de",
"status_text": "Print is dead",
"status_emoji": ":books:",
"real_name": "Egon Spengler",
"display_name": "spengler",
"real_name_normalized": "Egon Spengler",
"display_name_normalized": "spengler",
"email": "spengler@ghostbusters.example.com",
"image_original": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"image_24": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"image_32": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"image_48": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"image_72": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"image_192": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"image_512": "https://.../avatar/e3b51ca72dee4ef87916ae2b9240df50.jpg",
"team": "T012AB3C4"
}
|
625990255e10d32532ce4095
|
class TestRecorderRuns(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.session = session = SESSION() <NEW_LINE> session.query(Events).delete() <NEW_LINE> session.query(States).delete() <NEW_LINE> session.query(RecorderRuns).delete() <NEW_LINE> self.addCleanup(self.tear_down_cleanup) <NEW_LINE> <DEDENT> def tear_down_cleanup(self): <NEW_LINE> <INDENT> self.session.rollback() <NEW_LINE> <DEDENT> def test_entity_ids(self): <NEW_LINE> <INDENT> run = RecorderRuns( start=datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC), end=datetime(2016, 7, 9, 23, 0, 0, tzinfo=dt.UTC), closed_incorrect=False, created=datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC), ) <NEW_LINE> self.session.add(run) <NEW_LINE> self.session.commit() <NEW_LINE> before_run = datetime(2016, 7, 9, 8, 0, 0, tzinfo=dt.UTC) <NEW_LINE> in_run = datetime(2016, 7, 9, 13, 0, 0, tzinfo=dt.UTC) <NEW_LINE> in_run2 = datetime(2016, 7, 9, 15, 0, 0, tzinfo=dt.UTC) <NEW_LINE> in_run3 = datetime(2016, 7, 9, 18, 0, 0, tzinfo=dt.UTC) <NEW_LINE> after_run = datetime(2016, 7, 9, 23, 30, 0, tzinfo=dt.UTC) <NEW_LINE> assert run.to_native() == run <NEW_LINE> assert run.entity_ids() == [] <NEW_LINE> self.session.add( States( entity_id="sensor.temperature", state="20", last_changed=before_run, last_updated=before_run, ) ) <NEW_LINE> self.session.add( States( entity_id="sensor.sound", state="10", last_changed=after_run, last_updated=after_run, ) ) <NEW_LINE> self.session.add( States( entity_id="sensor.humidity", state="76", last_changed=in_run, last_updated=in_run, ) ) <NEW_LINE> self.session.add( States( entity_id="sensor.lux", state="5", last_changed=in_run3, last_updated=in_run3, ) ) <NEW_LINE> assert sorted(run.entity_ids()) == ["sensor.humidity", "sensor.lux"] <NEW_LINE> assert run.entity_ids(in_run2) == ["sensor.humidity"]
|
Test recorder run model.
|
625990251d351010ab8f4a38
|
class GitHubSearch(): <NEW_LINE> <INDENT> def get_json(self, search_string): <NEW_LINE> <INDENT> response = requests.get( f"https://api.github.com/search/code?q={search_string}+in:path+repo:bioconda/bioconda-recipes+path:recipes", timeout=MULLED_SOCKET_TIMEOUT) <NEW_LINE> response.raise_for_status() <NEW_LINE> return response.json() <NEW_LINE> <DEDENT> def process_json(self, json_response, search_string): <NEW_LINE> <INDENT> top_10_items = json_response['items'][0:10] <NEW_LINE> return [{'name': result['name'], 'path': result['path']} for result in top_10_items] <NEW_LINE> <DEDENT> def recipe_present(self, search_string): <NEW_LINE> <INDENT> response = requests.get(f"https://api.github.com/repos/bioconda/bioconda-recipes/contents/recipes/{search_string}", timeout=MULLED_SOCKET_TIMEOUT) <NEW_LINE> return response.status_code == 200
|
Tool to search the GitHub bioconda-recipes repo
|
625990253eb6a72ae038b587
|
class Machine(base.Machine): <NEW_LINE> <INDENT> def __init__(self, name, init_state=None): <NEW_LINE> <INDENT> self.net = PTNet(name) <NEW_LINE> self.machine = self.net.open(init_state)
|
state machine
|
625990256e29344779b01573
|
class UserBookRelation(models.Model): <NEW_LINE> <INDENT> RATE = ( (1, 'Плохо'), (2, 'Так себе'), (3, 'Нормально'), (4, 'Хорошо'), (5, 'Отлично') ) <NEW_LINE> book = models.ForeignKey(Books, on_delete=models.CASCADE, verbose_name='Книга') <NEW_LINE> user = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name='Пользователь') <NEW_LINE> like = models.BooleanField(default=False, verbose_name='Мне нравится') <NEW_LINE> in_bookmarks = models.BooleanField(default=False, verbose_name='В закладки') <NEW_LINE> rate = models.PositiveSmallIntegerField(choices=RATE, blank=True, null=True, verbose_name='Оценка') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('book', 'user') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'Отношение {self.user} к {self.book}'
|
Модель отношения пользователя к книги
|
625990251d351010ab8f4a39
|
class custom_browser(Remote): <NEW_LINE> <INDENT> def find_element_by_xpath(self, *args, **kwargs): <NEW_LINE> <INDENT> rv = super(custom_browser, self).find_element_by_xpath(*args, **kwargs) <NEW_LINE> return rv <NEW_LINE> <DEDENT> def wait_for_valid_connection(self, username, logger): <NEW_LINE> <INDENT> counter = 0 <NEW_LINE> while True and counter < 10: <NEW_LINE> <INDENT> sirens_wailing, emergency_state = emergency_exit(self, username, logger) <NEW_LINE> if sirens_wailing and emergency_state == 'not connected': <NEW_LINE> <INDENT> logger.warning('there is no valid connection') <NEW_LINE> counter += 1 <NEW_LINE> sleep(60) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wait_for_valid_authorization(self, username, logger): <NEW_LINE> <INDENT> current_url = get_current_url(self) <NEW_LINE> auth_method = 'activity counts' <NEW_LINE> counter = 0 <NEW_LINE> while True and counter < 10: <NEW_LINE> <INDENT> login_state = check_authorization(self, username, auth_method, logger) <NEW_LINE> if login_state is False: <NEW_LINE> <INDENT> logger.warning('not logged in') <NEW_LINE> counter += 1 <NEW_LINE> sleep(60) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> web_address_navigator(self, current_url)
|
Custom browser instance for manupulation later on
|
6259902526238365f5fada77
|
class UserFavouriteStore(db.Model, BaseMixin): <NEW_LINE> <INDENT> __tablename__ = 'favourite_stores' <NEW_LINE> user_id = db.Column(db.Integer(), db.ForeignKey('users.id')) <NEW_LINE> store_id = db.Column(db.Integer(), db.ForeignKey('stores.id')) <NEW_LINE> store = db.relationship('Store', primaryjoin="Store.id==UserFavouriteStore.store_id")
|
Таблица избранных магазинов для пользователя
|
6259902521bff66bcd723b88
|
class ValueMatcher(Matcher): <NEW_LINE> <INDENT> def callsign(self, args: Sequence[MatchArgT], kwargs: Mapping[Any, Any]) -> Sequence: <NEW_LINE> <INDENT> return cast(Sequence[Any], unbox(resolve_pattern(args, kwargs))) <NEW_LINE> <DEDENT> def case(self, *params: Any, **opts: Any) -> Callable: <NEW_LINE> <INDENT> def wrap(handler, *xparams, **xopts): <NEW_LINE> <INDENT> dispatch = unbox(params) <NEW_LINE> self._raise_on_conflict(dispatch) <NEW_LINE> self[dispatch] = handler <NEW_LINE> return handler <NEW_LINE> <DEDENT> return wrap <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> matchreps = self.descresponses(str) <NEW_LINE> return f"<ValueMatcher: {matchreps} >"
|
Concrete implementation of a value matching instance.
If you want to type a type matcher, use standard technique when
using ``Generic`` types:
>>> from kingston.match import ValueMatcher, Miss
>>> my_val_matcher:Matcher[int, str] = ValueMatcher({
... 1: lambda x: 'one!',
... 2: lambda x: 'two!',
... Miss: lambda x: 'many!'})
>>> my_val_matcher(1)
'one!'
>>> my_val_matcher(2)
'two!'
>>> my_val_matcher(3)
'many!'
>>> my_val_matcher('x') # ok at runtime but fails mypy (& missleading..)
'many!'
>>>
It will try to give a reasonably human representation when
inspected:
>>> my_val_matcher
<ValueMatcher: (1)->λ, (2)->λ, (<class 'kingston.match.Miss_'>)->λ >
>>>
You can also declare cases as methods in a custom ``ValueMatcher``
subclass.
Use the function ``value_case()`` to declare value
cases. **Note:** *imported as a shorthand*:
>>> from kingston.match import Matcher, ValueMatcher
>>> from kingston.match import value_case as case
>>> class SimplestEval(ValueMatcher):
... @case(Any, '+', Any)
... def _add(self, a, op, b) -> int:
... return a + b
...
... @case(Any, '-', Any)
... def _sub(self, a, op, b) -> int:
... return a - b
>>> simpl_eval = SimplestEval()
>>> simpl_eval(1, '+', 2)
3
>>> simpl_eval(10, '-', 5)
5
|
62599025be8e80087fbbff9e
|
class Keyboard(LoggerSuper): <NEW_LINE> <INDENT> logger = logging.getLogger('Keyboard') <NEW_LINE> def __init__(self, observers=None): <NEW_LINE> <INDENT> self.observers = [] <NEW_LINE> if observers is not None: <NEW_LINE> <INDENT> if isinstance(observers, list): <NEW_LINE> <INDENT> for _observer in observers: <NEW_LINE> <INDENT> self.observers.append(_observer) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.observers.append(observers) <NEW_LINE> <DEDENT> <DEDENT> self.queue = Queue(maxsize=3) <NEW_LINE> self.inputThread = threading.Thread(target=self.read_kbd_input, args=(), daemon=False) <NEW_LINE> self.inputThread.start() <NEW_LINE> self.logger.info('start keyboard thread') <NEW_LINE> <DEDENT> def add_observer(self, observer): <NEW_LINE> <INDENT> self.observers.append(observer) <NEW_LINE> <DEDENT> def del_observer(self, observer): <NEW_LINE> <INDENT> if observer in self.observers: <NEW_LINE> <INDENT> self.observers.remove(observer) <NEW_LINE> <DEDENT> <DEDENT> def notify_observers(self): <NEW_LINE> <INDENT> for observer in self.observers: <NEW_LINE> <INDENT> observer.keyboard_notify(self) <NEW_LINE> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> return self.queue.get() <NEW_LINE> <DEDENT> def read_kbd_input(self): <NEW_LINE> <INDENT> while BaseClass.working(): <NEW_LINE> <INDENT> input_str = input() <NEW_LINE> print('Enter command: ' + input_str) <NEW_LINE> cmd_list = input_str.split(' ') <NEW_LINE> if len(cmd_list) > 0: <NEW_LINE> <INDENT> if 'exit' in cmd_list: <NEW_LINE> <INDENT> BaseClass.exit() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.queue.put(cmd_list) <NEW_LINE> self.notify_observers()
|
Класс реализует поток чтение и обработку команд из консоли
|
6259902556b00c62f0fb37e6
|
class IShaderRenderStage(IRenderStage): <NEW_LINE> <INDENT> pass
|
Render stage for GLSL
|
62599025a4f1c619b294f51c
|
class RandomLighting(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, alpha): <NEW_LINE> <INDENT> super(RandomLighting, self).__init__() <NEW_LINE> self._alpha = alpha <NEW_LINE> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> return F.image.random_lighting(x, self._alpha)
|
Add AlexNet-style PCA-based noise to an image.
Parameters
----------
alpha : float
Intensity of the image.
Inputs:
- **data**: input tensor with (H x W x C) shape.
Outputs:
- **out**: output tensor with same shape as `data`.
|
625990258c3a8732951f747f
|
class MyArray: <NEW_LINE> <INDENT> def __init__(self, capacity: int): <NEW_LINE> <INDENT> self._data = [] <NEW_LINE> self._count = 0 <NEW_LINE> self._capacity = capacity <NEW_LINE> <DEDENT> def __getitem__(self, position: int) -> int: <NEW_LINE> <INDENT> return self._data[position] <NEW_LINE> <DEDENT> def find(self, index: int) -> Optional[int]: <NEW_LINE> <INDENT> if index >= self._count or index <= -self._count: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._data[index] <NEW_LINE> <DEDENT> def delete(self, index: int) -> bool: <NEW_LINE> <INDENT> if index >= self._count or index <= -self._count: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self._data[index:-1] = self._data[index + 1:] <NEW_LINE> self._count -= 1 <NEW_LINE> self._data = self._data[0:self._count] <NEW_LINE> print('delete function', self._data) <NEW_LINE> return True <NEW_LINE> <DEDENT> def insert(self, index: int, value: int) -> bool: <NEW_LINE> <INDENT> if self._capacity == self._count: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if index >= self._count: <NEW_LINE> <INDENT> self._data.append(value) <NEW_LINE> <DEDENT> if index < 0: <NEW_LINE> <INDENT> print(index) <NEW_LINE> self._data.insert(0, value) <NEW_LINE> <DEDENT> self._count += 1 <NEW_LINE> return True <NEW_LINE> <DEDENT> def insert_v2(self, index: int, value: int) -> bool: <NEW_LINE> <INDENT> if self._capacity == self._count: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if index >= self._count: <NEW_LINE> <INDENT> self._data.append(value) <NEW_LINE> <DEDENT> elif index < 0: <NEW_LINE> <INDENT> self._data.insert(0, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._data[index+1:self._count+1] = self._data[index:self._count] <NEW_LINE> self._data[index] = value <NEW_LINE> <DEDENT> self._count += 1 <NEW_LINE> return True <NEW_LINE> <DEDENT> def insert_to_tail(self, value: int) -> bool: <NEW_LINE> <INDENT> if self._count == self._capacity: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._count == len(self._data): <NEW_LINE> <INDENT> self._data.append(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._data[self._count] = value <NEW_LINE> <DEDENT> self._count += 1 <NEW_LINE> return True <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return " ".join(str(num) for num in self._data[:self._count]) <NEW_LINE> <DEDENT> def print_all(self): <NEW_LINE> <INDENT> for num in self._data[:self._count]: <NEW_LINE> <INDENT> print(f"{num}", end=" ") <NEW_LINE> <DEDENT> print("\n", flush=True)
|
A simple wrapper around List.
You cannot have -1 in the array.
|
625990255166f23b2e2442fd
|
class Encoder(serializable.Serializable): <NEW_LINE> <INDENT> def __init__(self, num_classes=None): <NEW_LINE> <INDENT> self.num_classes = num_classes <NEW_LINE> self._labels = None <NEW_LINE> self._int_to_label = {} <NEW_LINE> <DEDENT> def fit_with_labels(self, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def encode(self, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def decode(self, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return { 'num_classes': self.num_classes, 'labels': self._labels, 'int_to_label': self._int_to_label, } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_config(cls, config): <NEW_LINE> <INDENT> obj = super().from_config(config) <NEW_LINE> obj._labels = config['labels'] <NEW_LINE> obj._int_to_label = config['int_to_label']
|
Base class for encoders of the prediction targets.
# Arguments
num_classes: Int. The number of classes. Defaults to None.
|
6259902556b00c62f0fb37e8
|
@_register_item_type('mvAppItemType::DragInt4') <NEW_LINE> class DragInt4(DragInput[int, Tuple[int, int, int, int]]): <NEW_LINE> <INDENT> _default_value = (0, 0, 0, 0) <NEW_LINE> def __setup_add_widget__(self, dpg_args) -> None: <NEW_LINE> <INDENT> dpgcore.add_drag_int4(self.id, **dpg_args)
|
A drag input for 4 integer values.
If not disabled using the :attr:`no_input` property, the drag input can be CTRL+Clicked to turn it
into an input box for manual input of a value.
|
62599025287bf620b6272b17
|
class Player: <NEW_LINE> <INDENT> def __init__(self, name, history): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.history = history <NEW_LINE> self.money = 5 <NEW_LINE> <DEDENT> def currentMoney(self): <NEW_LINE> <INDENT> return self.money <NEW_LINE> <DEDENT> def makeChoice(self, choice): <NEW_LINE> <INDENT> self.choice = choice <NEW_LINE> <DEDENT> def addMoney(self): <NEW_LINE> <INDENT> self.money += 1 <NEW_LINE> <DEDENT> def stealMoney(self): <NEW_LINE> <INDENT> self.money += 2 <NEW_LINE> <DEDENT> def loseMoney(self): <NEW_LINE> <INDENT> self.money -= 2 <NEW_LINE> <DEDENT> def addMoveToHistory(self, move, history): <NEW_LINE> <INDENT> self.history.append(move) <NEW_LINE> <DEDENT> def showName(self): <NEW_LINE> <INDENT> response = "Player: " + self.name <NEW_LINE> return response <NEW_LINE> <DEDENT> def showBalance(self): <NEW_LINE> <INDENT> response = "Balance: $" + str(self.money) <NEW_LINE> return response <NEW_LINE> <DEDENT> def showHistory(self): <NEW_LINE> <INDENT> for obj in self.history: <NEW_LINE> <INDENT> print(obj) <NEW_LINE> <DEDENT> <DEDENT> def lastHistory(self, times): <NEW_LINE> <INDENT> newHistory = [] <NEW_LINE> for number in range(times): <NEW_LINE> <INDENT> tester = -number <NEW_LINE> newYatsu = self.history[tester] <NEW_LINE> newHistory.append(newYatsu) <NEW_LINE> <DEDENT> return newHistory
|
Base class for the player
|
62599025a4f1c619b294f51e
|
class FordFulkersonDFSSolver(SolverBase): <NEW_LINE> <INDENT> def __init__(self, n: int, s: int, t: int): <NEW_LINE> <INDENT> super().__init__(n,s,t) <NEW_LINE> <DEDENT> def solve(self) -> None: <NEW_LINE> <INDENT> f = -1 <NEW_LINE> while f != 0: <NEW_LINE> <INDENT> f = self.dfs(self.s, inf) <NEW_LINE> self.visitedToken += 1 <NEW_LINE> self.MaxFlow += f <NEW_LINE> <DEDENT> <DEDENT> def dfs(self, node: int, flow: float): <NEW_LINE> <INDENT> if node == self.t: <NEW_LINE> <INDENT> return flow <NEW_LINE> <DEDENT> self.visit(node) <NEW_LINE> for edge in self.graph[node]: <NEW_LINE> <INDENT> if edge.remainingCapacity() > 0 and self.visited[edge.to] != self.visitedToken: <NEW_LINE> <INDENT> bottleNeck = self.dfs(edge.to, min(flow, edge.remainingCapacity())) <NEW_LINE> if bottleNeck > 0: <NEW_LINE> <INDENT> edge.augment(bottleNeck) <NEW_LINE> return bottleNeck <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return 0
|
Extends the SolverBase method to use a depth first search method for finding the augmenting paths.
A link for depth first search can be found here: https://www.youtube.com/watch?v=AfSk24UTFS8&t=2407s
|
6259902526238365f5fada7b
|
class exc_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRING, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.success = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('exc_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRING, 0) <NEW_LINE> oprot.writeString(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- success
|
62599025ac7a0e7691f73413
|
class Interior: <NEW_LINE> <INDENT> def __init__(self, file=None): <NEW_LINE> <INDENT> self.sphere_count = 0 <NEW_LINE> self.spheres = [] <NEW_LINE> if file: <NEW_LINE> <INDENT> self.read(file) <NEW_LINE> <DEDENT> <DEDENT> def read(self, file): <NEW_LINE> <INDENT> self.sphere_count = struct.unpack("<h", file.read(2))[0] <NEW_LINE> self.spheres = [Sphere(file) for x in range(self.sphere_count)] <NEW_LINE> <DEDENT> def write(self, file): <NEW_LINE> <INDENT> file.write(struct.pack("<h", self.sphere_count)) <NEW_LINE> for x in range(self.sphere_count): <NEW_LINE> <INDENT> self.spheres[x].write(file) <NEW_LINE> <DEDENT> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> dic = {"sphere_count": self.sphere_count, "spheres": [s.as_dict() for s in self.spheres], } <NEW_LINE> return dic
|
Interior used in .hul
|
6259902521bff66bcd723b8c
|
class Rook(_Piece): <NEW_LINE> <INDENT> symbol = 'r' <NEW_LINE> def _moves(self, square, board, captures_only): <NEW_LINE> <INDENT> move_dir = ['n', 'e', 's', 'w'] <NEW_LINE> moves = self._bqr_moves(square, board.squares, self.color, move_dir) <NEW_LINE> return moves
|
A Rook
|
6259902566673b3332c31318
|
class TestCalculateMeanFofcCartSites(unittest.TestCase): <NEW_LINE> <INDENT> @unittest.skip("Not implemented") <NEW_LINE> def test_calc_mean_fofc_cart_sites(self): <NEW_LINE> <INDENT> self.assertEqual(True, False)
|
Test the calculation of |Fo-Fc| given cartesian sitess
|
625990256fece00bbaccc8e4
|
class RunnerMode: <NEW_LINE> <INDENT> REMIND = 0 <NEW_LINE> EVENTS = 1
|
Режим запуска
|
62599025a4f1c619b294f520
|
class Sale(Discount): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Sale") <NEW_LINE> verbose_name_plural = _("Sales") <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Sale, self).save(*args, **kwargs) <NEW_LINE> self._clear() <NEW_LINE> if self.active: <NEW_LINE> <INDENT> extra_filter = {} <NEW_LINE> if self.discount_deduct is not None: <NEW_LINE> <INDENT> extra_filter["unit_price__gt"] = self.discount_deduct <NEW_LINE> sale_price = models.F("unit_price") - self.discount_deduct <NEW_LINE> <DEDENT> elif self.discount_percent is not None: <NEW_LINE> <INDENT> sale_price = models.F("unit_price") - ( models.F("unit_price") / "100.0" * self.discount_percent) <NEW_LINE> <DEDENT> elif self.discount_exact is not None: <NEW_LINE> <INDENT> extra_filter["unit_price__gt"] = self.discount_exact <NEW_LINE> sale_price = self.discount_exact <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> products = self.all_products() <NEW_LINE> variations = ProductVariation.objects.filter(product__in=products) <NEW_LINE> for priced_objects in (products, variations): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> update = {"sale_id": self.id, "sale_price": sale_price, "sale_to": self.valid_to, "sale_from": self.valid_from} <NEW_LINE> priced_objects.filter(**extra_filter).update(**update) <NEW_LINE> <DEDENT> except Warning: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> self._clear() <NEW_LINE> super(Sale, self).delete(*args, **kwargs) <NEW_LINE> <DEDENT> def _clear(self): <NEW_LINE> <INDENT> update = {"sale_id": None, "sale_price": None, "sale_from": None, "sale_to": None} <NEW_LINE> for priced_model in (Product, ProductVariation): <NEW_LINE> <INDENT> priced_model.objects.filter(sale_id=self.id).update(**update)
|
Stores sales field values for price and date range which when saved
are then applied across products and variations according to the
selected categories and products for the sale.
|
62599025be8e80087fbbffa4
|
class SecurityFAILCapSETUIDWithOtherExecCompareKoji(TestCompareKoji): <NEW_LINE> <INDENT> @unittest.skipUnless(have_caps_support, lack_caps_msg) <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> TestCompareKoji.setUp(self) <NEW_LINE> installPath = "bin/fail" <NEW_LINE> sourceId = self.after_rpm.add_source( rpmfluff.SourceFile("rpminspect", ri_bytes) ) <NEW_LINE> self.after_rpm.create_parent_dirs(installPath) <NEW_LINE> self.after_rpm.section_install += ( "install -D %%{SOURCE%i} $RPM_BUILD_ROOT/%s\n" % (sourceId, self.after_rpm.escape_path(installPath)) ) <NEW_LINE> sub = self.after_rpm.get_subpackage(None) <NEW_LINE> sub.section_files += ( "%%attr(0707,root,bin) %%caps(cap_setuid=ep) /%s\n" % installPath ) <NEW_LINE> self.inspection = "ownership" <NEW_LINE> self.result = "BAD" <NEW_LINE> self.waiver_auth = "Security"
|
When comparing RPMs from Koji builds, check that a file in the new build
with CAP_SETUID set and world execution permissions fails.
|
6259902573bcbd0ca4bcb1bc
|
@attr.s(auto_attribs=True) <NEW_LINE> class CliOption: <NEW_LINE> <INDENT> name: str <NEW_LINE> description: str = None <NEW_LINE> args: Dict[str, Any] = attr.Factory(dict) <NEW_LINE> @classmethod <NEW_LINE> def fromparam(cls, parameter: inspect.Parameter): <NEW_LINE> <INDENT> params = {"name": "--" + parameter.name.replace("_", "-")} <NEW_LINE> builder = _STD_TYPES.get(parameter.annotation, None) <NEW_LINE> if builder is None: <NEW_LINE> <INDENT> raise TypeError(f"Unsupported cli option type: {parameter.annotation}") <NEW_LINE> <DEDENT> params["args"] = builder(parameter) <NEW_LINE> return cls(**params) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def frommethod(cls, f): <NEW_LINE> <INDENT> options = [] <NEW_LINE> for param in inspect.signature(f).parameters.values(): <NEW_LINE> <INDENT> if param.name == "self": <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> options.append(cls.fromparam(param)) <NEW_LINE> <DEDENT> return options
|
Represents a command line option.
|
625990259b70327d1c57fcae
|
class Servo(_BaseServo): <NEW_LINE> <INDENT> def __init__( self, pwm_out: "PWMOut", *, actuation_range: int = 180, min_pulse: int = 750, max_pulse: int = 2250 ) -> None: <NEW_LINE> <INDENT> super().__init__(pwm_out, min_pulse=min_pulse, max_pulse=max_pulse) <NEW_LINE> self.actuation_range = actuation_range <NEW_LINE> self._pwm = pwm_out <NEW_LINE> <DEDENT> @property <NEW_LINE> def angle(self) -> Optional[float]: <NEW_LINE> <INDENT> if self.fraction is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.actuation_range * self.fraction <NEW_LINE> <DEDENT> @angle.setter <NEW_LINE> def angle(self, new_angle: Optional[int]) -> None: <NEW_LINE> <INDENT> if new_angle is None: <NEW_LINE> <INDENT> self.fraction = None <NEW_LINE> return <NEW_LINE> <DEDENT> if new_angle < 0 or new_angle > self.actuation_range: <NEW_LINE> <INDENT> raise ValueError("Angle out of range") <NEW_LINE> <DEDENT> self.fraction = new_angle / self.actuation_range
|
Control the position of a servo.
:param ~pwmio.PWMOut pwm_out: PWM output object.
:param int actuation_range: The physical range of motion of the servo in degrees, for the given ``min_pulse`` and ``max_pulse`` values.
:param int min_pulse: The minimum pulse width of the servo in microseconds.
:param int max_pulse: The maximum pulse width of the servo in microseconds.
``actuation_range`` is an exposed property and can be changed at any time:
.. code-block:: python
servo = Servo(pwm)
servo.actuation_range = 135
The specified pulse width range of a servo has historically been 1000-2000us,
for a 90 degree range of motion. But nearly all modern servos have a 170-180
degree range, and the pulse widths can go well out of the range to achieve this
extended motion. The default values here of ``750`` and ``2250`` typically give
135 degrees of motion. You can set ``actuation_range`` to correspond to the
actual range of motion you observe with your given ``min_pulse`` and ``max_pulse``
values.
.. warning:: You can extend the pulse width above and below these limits to
get a wider range of movement. But if you go too low or too high,
the servo mechanism may hit the end stops, buzz, and draw extra current as it stalls.
Test carefully to find the safe minimum and maximum.
|
62599025a8ecb0332587214c
|
class LinuxArmSystemBuilder(object): <NEW_LINE> <INDENT> def __init__(self, machine_type, aarch64_kernel, **kwargs): <NEW_LINE> <INDENT> self.machine_type = machine_type <NEW_LINE> self.num_cpus = kwargs.get('num_cpus', 1) <NEW_LINE> self.mem_size = kwargs.get('mem_size', '256MB') <NEW_LINE> self.use_ruby = kwargs.get('use_ruby', False) <NEW_LINE> self.aarch64_kernel = aarch64_kernel <NEW_LINE> <DEDENT> def init_kvm(self, system): <NEW_LINE> <INDENT> system.kvm_vm = KvmVM() <NEW_LINE> GenericTimer.generateDeviceTree = SimObject.generateDeviceTree <NEW_LINE> system.realview.gic.simulate_gic = True <NEW_LINE> <DEDENT> def create_system(self): <NEW_LINE> <INDENT> if self.aarch64_kernel: <NEW_LINE> <INDENT> gem5_kernel = "vmlinux.arm64" <NEW_LINE> try: <NEW_LINE> <INDENT> if issubclass(self.cpu_class, ArmV8KvmCPU): <NEW_LINE> <INDENT> disk_image = "m5_exit_addr.squashfs.arm64" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> disk_image = "m5_exit.squashfs.arm64" <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> disk_image = "m5_exit.squashfs.arm64" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> gem5_kernel = "vmlinux.arm" <NEW_LINE> disk_image = "m5_exit.squashfs.arm" <NEW_LINE> <DEDENT> default_kernels = { "VExpress_GEM5_V1": gem5_kernel, "VExpress_GEM5_Foundation": gem5_kernel, } <NEW_LINE> sc = SysConfig(None, self.mem_size, [disk_image], "/dev/sda") <NEW_LINE> system = FSConfig.makeArmSystem(self.mem_mode, self.machine_type, self.num_cpus, sc, ruby=self.use_ruby) <NEW_LINE> system.workload.panic_on_panic = True <NEW_LINE> system.workload.panic_on_oops = True <NEW_LINE> system.workload.object_file = SysPaths.binary( default_kernels[self.machine_type]) <NEW_LINE> self.init_system(system) <NEW_LINE> system.workload.dtb_filename = os.path.join(m5.options.outdir, 'system.dtb') <NEW_LINE> system.generateDtb(system.workload.dtb_filename) <NEW_LINE> return system
|
Mix-in that implements create_system.
This mix-in is intended as a convenient way of adding an
ARM-specific create_system method to a class deriving from one of
the generic base systems.
|
6259902530c21e258be99744
|
class NetworkMediator: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._nodes = [] <NEW_LINE> <DEDENT> def register_member(self, member): <NEW_LINE> <INDENT> self._nodes.append(member) <NEW_LINE> <DEDENT> def toggle(self, source, activate): <NEW_LINE> <INDENT> if source.type == 'master': <NEW_LINE> <INDENT> self._toggle_all(activate) <NEW_LINE> <DEDENT> elif source.type == 'local_master': <NEW_LINE> <INDENT> self._toggle_local(local_group=source.local_group, activate=activate) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("{} the {} node.".format( 'Activating' if activate else 'Deactivating', source.name)) <NEW_LINE> source.active = activate <NEW_LINE> <DEDENT> <DEDENT> def _toggle_all(self, activate): <NEW_LINE> <INDENT> print("{} all nodes".format( 'Activating' if activate else 'Deactivating')) <NEW_LINE> for node in self._nodes: <NEW_LINE> <INDENT> node.active = activate <NEW_LINE> <DEDENT> <DEDENT> def _toggle_local(self, local_group, activate): <NEW_LINE> <INDENT> print("{} all nodes in the {} local group.".format( 'Activating' if activate else 'Deactivating', local_group)) <NEW_LINE> for node in self._nodes: <NEW_LINE> <INDENT> if node.local_group == local_group and node.type != 'master': <NEW_LINE> <INDENT> node.active = activate <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def print_network_status(self): <NEW_LINE> <INDENT> print("Following is the state of all nodes in the network:") <NEW_LINE> for node in self._nodes: <NEW_LINE> <INDENT> print("\t > " + str(node))
|
Responsible for maintaining relationships between all nodes. It does so
by managing the 'active' state of all nodes, conforming to the behavioural
rules defined in the aforementioned docstring.
|
62599025d164cc6175821ea6
|
class Unit: <NEW_LINE> <INDENT> def __init__(self, unit_type: Union[UnitType, type], owner: Player): <NEW_LINE> <INDENT> self._unit_type = unit_type <NEW_LINE> self._modifiers = [] <NEW_LINE> self._owner = owner <NEW_LINE> self._moving_points = self.get_default_moving_points() <NEW_LINE> <DEDENT> def reset_moving_points(self): <NEW_LINE> <INDENT> self._moving_points = self.get_default_moving_points() <NEW_LINE> <DEDENT> def get_moving_points(self) -> int: <NEW_LINE> <INDENT> return self._moving_points <NEW_LINE> <DEDENT> def get_default_moving_points(self) -> int: <NEW_LINE> <INDENT> return self._unit_type.get_default_moving_points() <NEW_LINE> <DEDENT> def get_owner(self) -> Player: <NEW_LINE> <INDENT> return self._owner <NEW_LINE> <DEDENT> def get_default_pow(self) -> int: <NEW_LINE> <INDENT> return self._unit_type.get_default_pow() <NEW_LINE> <DEDENT> def get_all_modifiers(self) -> List[POWModifier]: <NEW_LINE> <INDENT> return copy(self._modifiers) <NEW_LINE> <DEDENT> def update_modifiers(self, current_turn: int): <NEW_LINE> <INDENT> self._modifiers = list(filter_overdue_modifiers(current_turn, self._modifiers)) <NEW_LINE> <DEDENT> def apply_modifier(self, modifier: POWModifier): <NEW_LINE> <INDENT> self._modifiers.append(modifier) <NEW_LINE> <DEDENT> def next_turn(self, current_turn: int): <NEW_LINE> <INDENT> self.reset_moving_points() <NEW_LINE> self.update_modifiers(current_turn) <NEW_LINE> <DEDENT> def after_attack(self): <NEW_LINE> <INDENT> if not self._unit_type.could_move_after_attacking(): <NEW_LINE> <INDENT> self._moving_points = 0 <NEW_LINE> <DEDENT> <DEDENT> def is_alive(self) -> bool: <NEW_LINE> <INDENT> return self.count_pow() > 0 <NEW_LINE> <DEDENT> def set_damage(self, damage: int, turn: int): <NEW_LINE> <INDENT> self.apply_modifier(POWModifier(False, 2, turn, -damage, POWModifierKind.DAMAGE)) <NEW_LINE> <DEDENT> def count_pow(self) -> int: <NEW_LINE> <INDENT> return get_final_pow(self.get_default_pow(), self.get_all_modifiers()) <NEW_LINE> <DEDENT> def count_priority(self) -> int: <NEW_LINE> <INDENT> return self._unit_type.count_priority() <NEW_LINE> <DEDENT> def is_peaceful(self): <NEW_LINE> <INDENT> return self._unit_type.is_peaceful() <NEW_LINE> <DEDENT> def decrease_moving_points(self, distance: int): <NEW_LINE> <INDENT> self._moving_points -= distance <NEW_LINE> <DEDENT> def use_range_attack(self) -> bool: <NEW_LINE> <INDENT> return self._unit_type.use_range_attack() <NEW_LINE> <DEDENT> def get_vision_radius(self) -> int: <NEW_LINE> <INDENT> return self._unit_type.get_vision_radius() <NEW_LINE> <DEDENT> def get_resource(self) -> UnitTypeResource: <NEW_LINE> <INDENT> return self._unit_type.get_resource()
|
Класс, представляющий юнита на игровом поле.
|
625990251d351010ab8f4a45
|
class CustomUserCreationForm(UserCreationForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( 'email', )
|
Custom creation form for ``User`` model.
This class overrides ``UserCreationForm`` to provide different ``User``
model in Meta and also replaces ``username`` field with ``email`` field.
|
62599025d18da76e235b78e5
|
class TestCollectionViewSetRemoveCurator(BaseCollectionViewSetTest): <NEW_LINE> <INDENT> def remove_curator(self, client, user_id=None): <NEW_LINE> <INDENT> if user_id is None: <NEW_LINE> <INDENT> user_id = self.user.pk <NEW_LINE> <DEDENT> form_data = {'user': user_id} if user_id else {} <NEW_LINE> url = self.collection_url('remove-curator', self.collection.pk) <NEW_LINE> res = client.post(url, json.dumps(form_data)) <NEW_LINE> data = json.loads(res.content) if res.content else None <NEW_LINE> return res, data <NEW_LINE> <DEDENT> def test_remove_curator_anon(self): <NEW_LINE> <INDENT> res, data = self.remove_curator(self.anon) <NEW_LINE> eq_(res.status_code, 403) <NEW_LINE> eq_(PermissionDenied.default_detail, data['detail']) <NEW_LINE> <DEDENT> def test_remove_curator_no_perms(self): <NEW_LINE> <INDENT> res, data = self.remove_curator(self.client) <NEW_LINE> eq_(res.status_code, 403) <NEW_LINE> eq_(PermissionDenied.default_detail, data['detail']) <NEW_LINE> <DEDENT> def test_remove_curator_has_perms(self): <NEW_LINE> <INDENT> self.make_publisher() <NEW_LINE> res, data = self.remove_curator(self.client) <NEW_LINE> eq_(res.status_code, 205) <NEW_LINE> <DEDENT> def test_remove_curator_as_curator(self): <NEW_LINE> <INDENT> self.make_curator() <NEW_LINE> res, data = self.remove_curator(self.client) <NEW_LINE> eq_(res.status_code, 205) <NEW_LINE> <DEDENT> def test_remove_curator_email(self): <NEW_LINE> <INDENT> self.make_curator() <NEW_LINE> res, data = self.remove_curator(self.client, user_id=self.user.email) <NEW_LINE> eq_(res.status_code, 205) <NEW_LINE> <DEDENT> def test_remove_curator_nonexistent(self): <NEW_LINE> <INDENT> self.make_publisher() <NEW_LINE> res, data = self.remove_curator(self.client, user_id=100000) <NEW_LINE> eq_(res.status_code, 400) <NEW_LINE> eq_(CollectionViewSet.exceptions['user_doesnt_exist'], data['detail']) <NEW_LINE> res, data = self.remove_curator(self.client, user_id='doesnt@exi.st') <NEW_LINE> eq_(res.status_code, 400) <NEW_LINE> eq_(CollectionViewSet.exceptions['user_doesnt_exist'], data['detail']) <NEW_LINE> <DEDENT> def test_remove_curator_empty(self): <NEW_LINE> <INDENT> self.make_publisher() <NEW_LINE> res, data = self.remove_curator(self.client, user_id=False) <NEW_LINE> eq_(res.status_code, 400) <NEW_LINE> eq_(CollectionViewSet.exceptions['user_not_provided'], data['detail']) <NEW_LINE> <DEDENT> def test_remove_curator_garbage(self): <NEW_LINE> <INDENT> self.make_publisher() <NEW_LINE> res, data = self.remove_curator(self.client, user_id='garbage') <NEW_LINE> eq_(res.status_code, 400) <NEW_LINE> eq_(CollectionViewSet.exceptions['wrong_user_format'], data['detail']) <NEW_LINE> res, data = self.remove_curator(self.client, user_id='garbage@') <NEW_LINE> eq_(res.status_code, 400) <NEW_LINE> eq_(CollectionViewSet.exceptions['wrong_user_format'], data['detail'])
|
Tests the `remove-curator` action on CollectionViewSet.
|
6259902521bff66bcd723b92
|
class NoAuthMiddleware(wsgi.Middleware): <NEW_LINE> <INDENT> @webob.dec.wsgify(RequestClass=wsgi.Request) <NEW_LINE> def __call__(self, req): <NEW_LINE> <INDENT> if 'X-Auth-Token' not in req.headers: <NEW_LINE> <INDENT> user_id = req.headers.get('X-Auth-User', 'admin') <NEW_LINE> project_id = req.headers.get('X-Auth-Project-Id', 'admin') <NEW_LINE> os_url = os.path.join(req.url, project_id) <NEW_LINE> res = webob.Response() <NEW_LINE> res.headers['X-Auth-Token'] = '%s:%s' % (user_id, project_id) <NEW_LINE> res.headers['X-Server-Management-Url'] = os_url <NEW_LINE> res.headers['X-Storage-Url'] = '' <NEW_LINE> res.headers['X-CDN-Management-Url'] = '' <NEW_LINE> res.content_type = 'text/plain' <NEW_LINE> res.status = '204' <NEW_LINE> return res <NEW_LINE> <DEDENT> token = req.headers['X-Auth-Token'] <NEW_LINE> user_id, _sep, project_id = token.partition(':') <NEW_LINE> project_id = project_id or user_id <NEW_LINE> remote_address = getattr(req, 'remote_address', '127.0.0.1') <NEW_LINE> if FLAGS.use_forwarded_for: <NEW_LINE> <INDENT> remote_address = req.headers.get('X-Forwarded-For', remote_address) <NEW_LINE> <DEDENT> ctx = context.RequestContext(user_id, project_id, is_admin=True, remote_address=remote_address) <NEW_LINE> req.environ['nova.context'] = ctx <NEW_LINE> return self.application
|
Return a fake token if one isn't specified.
|
625990255166f23b2e244305
|
class Event(with_metaclass(_MetaEvent, object)): <NEW_LINE> <INDENT> _event_classes = [] <NEW_LINE> event_name = None <NEW_LINE> def __init__(self, model): <NEW_LINE> <INDENT> self._model_id = None <NEW_LINE> if model is not None: <NEW_LINE> <INDENT> self._model_id = model._id <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def decode_json(cls, dct): <NEW_LINE> <INDENT> if not (('event_name' in dct) and ('event_values' in dct)): <NEW_LINE> <INDENT> return dct <NEW_LINE> <DEDENT> event_name = dct['event_name'] <NEW_LINE> if event_name not in _CONCRETE_EVENT_CLASSES: <NEW_LINE> <INDENT> raise ValueError("Could not find appropriate Event class for event_name: %r" % event_name) <NEW_LINE> <DEDENT> event_values = dct['event_values'] <NEW_LINE> model_id = event_values.pop('model_id') <NEW_LINE> event = _CONCRETE_EVENT_CLASSES[event_name](model=None, **event_values) <NEW_LINE> event._model_id = model_id <NEW_LINE> return event
|
Base class for all Bokeh events.
This base class is not typically useful to instantiate on its own.
|
62599025d99f1b3c44d065d3
|
class Solution(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.arr = np.array((6, 7, 8)) <NEW_LINE> <DEDENT> def test1(self): <NEW_LINE> <INDENT> r = np.lcm.reduce(self.arr) <NEW_LINE> print('lcm:', r) <NEW_LINE> <DEDENT> def action(self): <NEW_LINE> <INDENT> self.test1() <NEW_LINE> C = self.arr - np.ones((1, 1)) <NEW_LINE> p = list(range(1, 50)) <NEW_LINE> cnt = 0 <NEW_LINE> for x in it.product(p, p, p): <NEW_LINE> <INDENT> cnt += 1 <NEW_LINE> r = self.arr * np.array(x) + C <NEW_LINE> if np.max(r) == np.min(r): <NEW_LINE> <INDENT> print(f"x:{x} r:{r}") <NEW_LINE> print("all same") <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> print(f'{cnt} tried')
|
class to solve the problem
|
62599025507cdc57c63a5cd8
|
class MiniBatchTrainer(ClassifierTrainer): <NEW_LINE> <INDENT> def __call__(self, train_labels, val_labels, nbr_epochs, pc_models, feature_loc, clf_type): <NEW_LINE> <INDENT> assert clf_type in config.CLASSIFIER_TYPES <NEW_LINE> t0 = time.time() <NEW_LINE> clf, ref_accs = train(train_labels, feature_loc, nbr_epochs, clf_type) <NEW_LINE> classes = clf.classes_.tolist() <NEW_LINE> val_gts, val_ests, val_scores = evaluate_classifier( clf, val_labels, classes, feature_loc) <NEW_LINE> pc_accs = [] <NEW_LINE> for pc_model in pc_models: <NEW_LINE> <INDENT> pc_gts, pc_ests, _ = evaluate_classifier(pc_model, val_labels, classes, feature_loc) <NEW_LINE> pc_accs.append(calc_acc(pc_gts, pc_ests)) <NEW_LINE> <DEDENT> return clf, ValResults( scores=val_scores, gt=[classes.index(member) for member in val_gts], est=[classes.index(member) for member in val_ests], classes=classes ), TrainClassifierReturnMsg( acc=calc_acc(val_gts, val_ests), pc_accs=pc_accs, ref_accs=ref_accs, runtime=time.time() - t0 )
|
This is the default trainer. It uses mini-batches of data
to train the classifier
|
62599025a4f1c619b294f526
|
class GoogleVisionAPI: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.endpoint_url = 'https://vision.googleapis.com/v1/images:annotate' <NEW_LINE> self.api_key = load_text(CREDENTIAL_PATH) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __make_request(img_path, feature_type): <NEW_LINE> <INDENT> request_list = [] <NEW_LINE> with open(img_path, 'rb') as img_file: <NEW_LINE> <INDENT> content_json_obj = {'content': base64.b64encode(img_file.read()).decode('UTF-8')} <NEW_LINE> feature_json_obj = [{'type': feature_type}] <NEW_LINE> request_list.append( {'image': content_json_obj, 'features': feature_json_obj} ) <NEW_LINE> <DEDENT> return json.dumps({'requests': request_list}).encode() <NEW_LINE> <DEDENT> def __get_response(self, json_data, info_field): <NEW_LINE> <INDENT> response = requests.post( url=self.endpoint_url, data=json_data, params={'key': self.api_key}, headers={'Content-Type': 'application/json'}) <NEW_LINE> ret_json = json.loads(response.text) <NEW_LINE> try: <NEW_LINE> <INDENT> return ret_json['responses'][0] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def detect_text(self, path): <NEW_LINE> <INDENT> ret_json = self.__get_response(self.__make_request(img_path=path, feature_type='DOCUMENT_TEXT_DETECTION'), info_field='textAnnotations') <NEW_LINE> return ret_json
|
Construct and use the Google Vision API service.
|
62599025bf627c535bcb23e8
|
class CsmPublishingProfileOptions(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'format': {'key': 'format', 'type': 'str'}, 'include_disaster_recovery_endpoints': {'key': 'includeDisasterRecoveryEndpoints', 'type': 'bool'}, } <NEW_LINE> def __init__( self, *, format: Optional[Union[str, "PublishingProfileFormat"]] = None, include_disaster_recovery_endpoints: Optional[bool] = None, **kwargs ): <NEW_LINE> <INDENT> super(CsmPublishingProfileOptions, self).__init__(**kwargs) <NEW_LINE> self.format = format <NEW_LINE> self.include_disaster_recovery_endpoints = include_disaster_recovery_endpoints
|
Publishing options for requested profile.
:ivar format: Name of the format. Valid values are:
FileZilla3
WebDeploy -- default
Ftp. Possible values include: "FileZilla3", "WebDeploy", "Ftp".
:vartype format: str or ~azure.mgmt.web.v2018_02_01.models.PublishingProfileFormat
:ivar include_disaster_recovery_endpoints: Include the DisasterRecover endpoint if true.
:vartype include_disaster_recovery_endpoints: bool
|
6259902591af0d3eaad3ad59
|
class AdminTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.site = admin.AdminSite() <NEW_LINE> <DEDENT> def test_admin_registration(self): <NEW_LINE> <INDENT> ua.register(self.site) <NEW_LINE> self.assertTrue( isinstance( self.site._registry[podcast.Podcast], ua.PodcastAdmin ) ) <NEW_LINE> self.assertTrue( isinstance( self.site._registry[channel.PodcastChannel], ua.PodcastChannelAdmin ) )
|
Tests to make sure that the admin snap-ins validate correctly.
|
62599025d164cc6175821ea9
|
@inside_spirv_testsuite('SpirvOptFlags') <NEW_LINE> class TestWebGPUToVulkanThenVulkanToWebGPUIsInvalid(expect.ReturnCodeIsNonZero, expect.ErrorMessageSubstr): <NEW_LINE> <INDENT> spirv_args = ['--webgpu-to-vulkan', '--vulkan-to-webgpu'] <NEW_LINE> expected_error_substr = 'Cannot use both'
|
Tests Vulkan->WebGPU flag cannot be used after WebGPU->Vulkan flag.
|
625990253eb6a72ae038b597
|
class TrajectoryMock(object): <NEW_LINE> <INDENT> def __init__(self, traj): <NEW_LINE> <INDENT> self.v_environment_name = traj.v_environment_name <NEW_LINE> self.v_name = traj.v_name <NEW_LINE> self.v_crun_ = traj.v_crun_ <NEW_LINE> self.v_crun = traj.v_crun <NEW_LINE> self.v_idx = traj.v_idx
|
Helper class that mocks properties of a trajectory.
The full trajectory is not needed to rename a log file.
In order to avoid copying the full trajectory during pickling
this class is used.
|
6259902573bcbd0ca4bcb1c4
|
class test_work_meta(TestCase, test_model_meta_mixin): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.subject = work_meta() <NEW_LINE> <DEDENT> def test__work_meta__instance(self): <NEW_LINE> <INDENT> self.assertIsInstance(self.subject, work_meta) <NEW_LINE> <DEDENT> def test__work_meta__str(self): <NEW_LINE> <INDENT> self.assertEqual(str(self.subject), 'Work Meta')
|
Tests for the `work_meta` model.
|
625990255e10d32532ce409e
|
class BadTimeSignature(BadSignature): <NEW_LINE> <INDENT> def __init__(self, message, payload=None, date_signed=None): <NEW_LINE> <INDENT> BadSignature.__init__(self, message, payload) <NEW_LINE> self.date_signed = date_signed
|
Raised if a time-based signature is invalid. This is a subclass
of :class:`BadSignature`.
|
625990255166f23b2e244309
|
class SharedPostSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> images = PostImageSerializer(many=True) <NEW_LINE> user = UserSerializerMinimal() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Post <NEW_LINE> fields = ['id', 'content', 'created', 'user', 'images', 'url']
|
Post serializer when shared in other post
|
625990251d351010ab8f4a4a
|
class JoinRideSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> passenger = serializers.IntegerField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Ride <NEW_LINE> fields = ('passenger',) <NEW_LINE> <DEDENT> def validate_passenger(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(pk=data) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> raise serializers.ValidationError('Invalid passenger.') <NEW_LINE> <DEDENT> circle = self.context['circle'] <NEW_LINE> try: <NEW_LINE> <INDENT> membership = Membership.objects.get( user=user, circle=circle, is_active=True ) <NEW_LINE> <DEDENT> except Membership.DoesNotExist: <NEW_LINE> <INDENT> raise serializers.ValidationError('User is not an active member of the circle.') <NEW_LINE> <DEDENT> self.context['user'] = user <NEW_LINE> self.context['member'] = membership <NEW_LINE> return data <NEW_LINE> <DEDENT> def validate(self, data): <NEW_LINE> <INDENT> ride = self.context['ride'] <NEW_LINE> if ride.departure_date <= timezone.now(): <NEW_LINE> <INDENT> raise serializers.ValidationError("You can't join this ride now") <NEW_LINE> <DEDENT> if ride.available_seats < 1: <NEW_LINE> <INDENT> raise serializers.ValidationError("Ride is already full!") <NEW_LINE> <DEDENT> if ride.passengers.filter(pk=self.context['user'].pk).exists(): <NEW_LINE> <INDENT> raise serializers.ValidationError('Passenger is already in this trip') <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def update(self, instance, data): <NEW_LINE> <INDENT> ride = self.context['ride'] <NEW_LINE> user = self.context['user'] <NEW_LINE> ride.passengers.add(user) <NEW_LINE> profile = user.profile <NEW_LINE> profile.rides_taken += 1 <NEW_LINE> profile.save() <NEW_LINE> member = self.context['member'] <NEW_LINE> member.rides_taken += 1 <NEW_LINE> member.save() <NEW_LINE> circle = self.context['circle'] <NEW_LINE> circle.rides_taken += 1 <NEW_LINE> circle.save() <NEW_LINE> return ride
|
Join ride serializer.
|
6259902530c21e258be9974a
|
class TrashedEntity(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> return { 'deleted_by_principal_id': (str,), 'deleted_on': (str,), 'entity_id': (str,), 'entity_name': (str,), 'original_parent_id': (str,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'deleted_by_principal_id': 'deletedByPrincipalId', 'deleted_on': 'deletedOn', 'entity_id': 'entityId', 'entity_name': 'entityName', 'original_parent_id': 'originalParentId', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value)
|
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
|
62599025d164cc6175821eab
|
@registries.BINDABLE_CLUSTERS.register(general.LevelControl.cluster_id) <NEW_LINE> @registries.ZIGBEE_CHANNEL_REGISTRY.register(general.LevelControl.cluster_id) <NEW_LINE> class LevelControlChannel(ZigbeeChannel): <NEW_LINE> <INDENT> CURRENT_LEVEL = 0 <NEW_LINE> REPORT_CONFIG = ({"attr": "current_level", "config": REPORT_CONFIG_ASAP},) <NEW_LINE> @property <NEW_LINE> def current_level(self) -> Optional[int]: <NEW_LINE> <INDENT> return self.cluster.get("current_level") <NEW_LINE> <DEDENT> @callback <NEW_LINE> def cluster_command(self, tsn, command_id, args): <NEW_LINE> <INDENT> cmd = parse_and_log_command(self, tsn, command_id, args) <NEW_LINE> if cmd in ("move_to_level", "move_to_level_with_on_off"): <NEW_LINE> <INDENT> self.dispatch_level_change(SIGNAL_SET_LEVEL, args[0]) <NEW_LINE> <DEDENT> elif cmd in ("move", "move_with_on_off"): <NEW_LINE> <INDENT> rate = args[1] <NEW_LINE> if args[0] == 0xFF: <NEW_LINE> <INDENT> rate = 10 <NEW_LINE> <DEDENT> self.dispatch_level_change(SIGNAL_MOVE_LEVEL, -rate if args[0] else rate) <NEW_LINE> <DEDENT> elif cmd in ("step", "step_with_on_off"): <NEW_LINE> <INDENT> self.dispatch_level_change( SIGNAL_MOVE_LEVEL, -args[1] if args[0] else args[1] ) <NEW_LINE> <DEDENT> <DEDENT> @callback <NEW_LINE> def attribute_updated(self, attrid, value): <NEW_LINE> <INDENT> self.debug("received attribute: %s update with value: %s", attrid, value) <NEW_LINE> if attrid == self.CURRENT_LEVEL: <NEW_LINE> <INDENT> self.dispatch_level_change(SIGNAL_SET_LEVEL, value) <NEW_LINE> <DEDENT> <DEDENT> def dispatch_level_change(self, command, level): <NEW_LINE> <INDENT> self.async_send_signal(f"{self.unique_id}_{command}", level)
|
Channel for the LevelControl Zigbee cluster.
|
6259902566673b3332c31323
|
@ECR.filter_registry.register('lifecycle-rule') <NEW_LINE> class LifecycleRule(Filter): <NEW_LINE> <INDENT> permissions = ('ecr:GetLifecyclePolicy',) <NEW_LINE> schema = type_schema( 'lifecycle-rule', state={'type': 'boolean'}, match={'type': 'array', 'items': { 'oneOf': [ {'$ref': '#/definitions/filters/value'}, {'type': 'object', 'minProperties': 1, 'maxProperties': 1}, ]}}) <NEW_LINE> policy_annotation = 'c7n:lifecycle-policy' <NEW_LINE> def process(self, resources, event=None): <NEW_LINE> <INDENT> client = local_session(self.manager.session_factory).client('ecr') <NEW_LINE> for r in resources: <NEW_LINE> <INDENT> if self.policy_annotation in r: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> r[self.policy_annotation] = json.loads( client.get_lifecycle_policy( repositoryName=r['repositoryName']).get( 'lifecyclePolicyText', '')) <NEW_LINE> <DEDENT> except client.exceptions.LifecyclePolicyNotFoundException: <NEW_LINE> <INDENT> r[self.policy_annotation] = {} <NEW_LINE> <DEDENT> <DEDENT> state = self.data.get('state', False) <NEW_LINE> matchers = [] <NEW_LINE> for matcher in self.data.get('match', []): <NEW_LINE> <INDENT> vf = ValueFilter(matcher) <NEW_LINE> vf.annotate = False <NEW_LINE> matchers.append(vf) <NEW_LINE> <DEDENT> results = [] <NEW_LINE> for r in resources: <NEW_LINE> <INDENT> found = False <NEW_LINE> for rule in r[self.policy_annotation].get('rules', []): <NEW_LINE> <INDENT> found = True <NEW_LINE> for m in matchers: <NEW_LINE> <INDENT> if not m(rule): <NEW_LINE> <INDENT> found = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if found and state: <NEW_LINE> <INDENT> results.append(r) <NEW_LINE> <DEDENT> if not found and not state: <NEW_LINE> <INDENT> results.append(r) <NEW_LINE> <DEDENT> <DEDENT> return results
|
Lifecycle rule filtering
:Example:
.. code-block:: yaml
policies:
- name: ecr-life
resource: aws.ecr
filters:
- type: lifecycle-rule
state: False
match:
- selection.tagStatus: untagged
- action.type: expire
- type: value
key: selection.countNumber
value: 30
op: less-than
|
62599025d164cc6175821eac
|
class ModelSetDefaultTestCase(ModelTestCase, SetDefaultTestCase): <NEW_LINE> <INDENT> def testSetDefaultDefault(self): <NEW_LINE> <INDENT> pass
|
Test the setdefault operation at the model level.
|
625990256fece00bbaccc8ee
|
class GoldairHeaterChildLock(LockEntity): <NEW_LINE> <INDENT> def __init__(self, device): <NEW_LINE> <INDENT> self._device = device <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._device.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._device.unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self): <NEW_LINE> <INDENT> return self._device.device_info <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> if self.is_locked is None: <NEW_LINE> <INDENT> return STATE_UNAVAILABLE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return STATE_LOCKED if self.is_locked else STATE_UNLOCKED <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_locked(self): <NEW_LINE> <INDENT> return self._device.get_property(PROPERTY_TO_DPS_ID[ATTR_CHILD_LOCK]) <NEW_LINE> <DEDENT> async def async_lock(self, **kwargs): <NEW_LINE> <INDENT> await self._device.async_set_property(PROPERTY_TO_DPS_ID[ATTR_CHILD_LOCK], True) <NEW_LINE> <DEDENT> async def async_unlock(self, **kwargs): <NEW_LINE> <INDENT> await self._device.async_set_property( PROPERTY_TO_DPS_ID[ATTR_CHILD_LOCK], False ) <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> await self._device.async_refresh()
|
Representation of a Goldair WiFi-connected heater child lock.
|
625990256e29344779b01585
|
class TimedGenerator: <NEW_LINE> <INDENT> def __init__(self, generator, timeout=None, inactivity_timeout=None, on_timeout=None, on_inactivity_timeout=None): <NEW_LINE> <INDENT> self.generator = generator <NEW_LINE> self.timeout = timeout <NEW_LINE> self.inactivity_timeout = inactivity_timeout <NEW_LINE> self.on_timeout = on_timeout <NEW_LINE> self.on_inactivity_timeout = on_inactivity_timeout <NEW_LINE> self.timer = self.inactivity_timer = None <NEW_LINE> if self.timeout is not None: <NEW_LINE> <INDENT> self.start_timer() <NEW_LINE> <DEDENT> if self.inactivity_timeout is not None: <NEW_LINE> <INDENT> self.start_inactivity_timer() <NEW_LINE> <DEDENT> <DEDENT> def start_timer(self): <NEW_LINE> <INDENT> self.timer = threading.Timer(self.timeout, _thread.interrupt_main) <NEW_LINE> self.timer.start() <NEW_LINE> <DEDENT> def start_inactivity_timer(self): <NEW_LINE> <INDENT> self.inactivity_timer = threading.Timer( self.inactivity_timeout, _thread.interrupt_main) <NEW_LINE> self.inactivity_timer.start() <NEW_LINE> <DEDENT> def reset_inactivity_timer(self): <NEW_LINE> <INDENT> if self.inactivity_timer: <NEW_LINE> <INDENT> self.inactivity_timer.cancel() <NEW_LINE> self.start_inactivity_timer() <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> to_raise = None <NEW_LINE> set_timers = [timer for timer in ( self.timer, self.inactivity_timer) if timer is not None] <NEW_LINE> try: <NEW_LINE> <INDENT> next_item = next(self.generator) <NEW_LINE> self.reset_inactivity_timer() <NEW_LINE> return next_item <NEW_LINE> <DEDENT> except KeyboardInterrupt as e: <NEW_LINE> <INDENT> if not set_timers: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> expired_timers = [ timer for timer in set_timers if not timer.is_alive()] <NEW_LINE> if expired_timers: <NEW_LINE> <INDENT> first_expired = expired_timers[0] <NEW_LINE> to_raise = StopIteration <NEW_LINE> function = self.on_timeout if ( first_expired == self.timer) else self.on_inactivity_timeout <NEW_LINE> self._run_function(function) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> to_raise = e <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> to_raise = e <NEW_LINE> <DEDENT> if to_raise: <NEW_LINE> <INDENT> for timer in set_timers: <NEW_LINE> <INDENT> timer.cancel() <NEW_LINE> <DEDENT> raise to_raise <NEW_LINE> <DEDENT> <DEDENT> def _run_function(self, function): <NEW_LINE> <INDENT> if callable(function): <NEW_LINE> <INDENT> function()
|
Add timing functionality to generator objects.
Used to create timed-generator objects as well as add inactivity functionality
(i.e. return if no items have been generated in a given time period)
|
625990258c3a8732951f748d
|
@register_block <NEW_LINE> class InterfaceStatistics( SectionMemberBlock, BlockWithTimestampMixin, BlockWithInterfaceMixin ): <NEW_LINE> <INDENT> magic_number = 0x00000005 <NEW_LINE> __slots__ = [] <NEW_LINE> schema = [ ("interface_id", IntField(32, False), 0), ("timestamp_high", IntField(32, False), 0), ("timestamp_low", IntField(32, False), 0), ( "options", OptionsField( [ Option(2, "isb_starttime", "u64"), Option(3, "isb_endtime", "u64"), Option(4, "isb_ifrecv", "u64"), Option(5, "isb_ifdrop", "u64"), Option(6, "isb_filteraccept", "u64"), Option(7, "isb_osdrop", "u64"), Option(8, "isb_usrdeliv", "u64"), ] ), None, ), ]
|
"The Interface Statistics Block (ISB) contains the capture statistics for a
given interface [...]. The statistics are referred to the interface defined
in the current Section identified by the Interface ID field."
- pcapng spec, section 4.6. Other quoted citations are from this section
unless otherwise noted.
|
62599025ac7a0e7691f7341f
|
class AnyBlokIO(Blok): <NEW_LINE> <INDENT> version = version <NEW_LINE> author = 'Suzanne Jean-Sébastien' <NEW_LINE> logo = '../anyblok-logo_alpha_256.png' <NEW_LINE> required = [ 'anyblok-core', ] <NEW_LINE> @classmethod <NEW_LINE> def declare_io(cls): <NEW_LINE> <INDENT> from anyblok import Declarations <NEW_LINE> @Declarations.register(Declarations.Model) <NEW_LINE> class IO: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def import_declaration_module(cls): <NEW_LINE> <INDENT> from . import core <NEW_LINE> cls.declare_io() <NEW_LINE> from . import mapping <NEW_LINE> from . import mixin <NEW_LINE> from . import importer <NEW_LINE> from . import exporter <NEW_LINE> from . import formater <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def reload_declaration_module(cls, reload): <NEW_LINE> <INDENT> from . import core <NEW_LINE> reload(core) <NEW_LINE> cls.declare_io() <NEW_LINE> from . import mapping <NEW_LINE> reload(mapping) <NEW_LINE> from . import mixin <NEW_LINE> reload(mixin) <NEW_LINE> from . import importer <NEW_LINE> reload(importer) <NEW_LINE> from . import exporter <NEW_LINE> reload(exporter) <NEW_LINE> from . import formater <NEW_LINE> reload(formater)
|
In / Out tool's:
* Formater: convert value 2 str or str 2 value in function of the field,
* Importer: main model to define an import,
* Exporter: main model to define an export,
|
6259902530c21e258be9974c
|
class InfoJSONEncoder(QMKJSONEncoder): <NEW_LINE> <INDENT> def encode_dict(self, obj): <NEW_LINE> <INDENT> if obj: <NEW_LINE> <INDENT> if self.indentation_level == 4: <NEW_LINE> <INDENT> return "{ " + ", ".join(f"{self.encode(key)}: {self.encode(element)}" for key, element in sorted(obj.items())) + " }" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.indentation_level += 1 <NEW_LINE> output = [self.indent_str + f"{json.dumps(key)}: {self.encode(value)}" for key, value in sorted(obj.items(), key=self.sort_dict)] <NEW_LINE> self.indentation_level -= 1 <NEW_LINE> return "{\n" + ",\n".join(output) + "\n" + self.indent_str + "}" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return "{}" <NEW_LINE> <DEDENT> <DEDENT> def sort_dict(self, key): <NEW_LINE> <INDENT> key = key[0] <NEW_LINE> if self.indentation_level == 1: <NEW_LINE> <INDENT> if key == 'manufacturer': <NEW_LINE> <INDENT> return '10keyboard_name' <NEW_LINE> <DEDENT> elif key == 'keyboard_name': <NEW_LINE> <INDENT> return '11keyboard_name' <NEW_LINE> <DEDENT> elif key == 'maintainer': <NEW_LINE> <INDENT> return '12maintainer' <NEW_LINE> <DEDENT> elif key == 'community_layouts': <NEW_LINE> <INDENT> return '97community_layouts' <NEW_LINE> <DEDENT> elif key == 'layout_aliases': <NEW_LINE> <INDENT> return '98layout_aliases' <NEW_LINE> <DEDENT> elif key == 'layouts': <NEW_LINE> <INDENT> return '99layouts' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '50' + str(key) <NEW_LINE> <DEDENT> <DEDENT> return key
|
Custom encoder to make info.json's a little nicer to work with.
|
62599025287bf620b6272b25
|
class DeviceDatasourceInstancePaginationResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'total': 'int', 'search_id': 'str', 'items': 'list[DeviceDataSourceInstance]' } <NEW_LINE> attribute_map = { 'total': 'total', 'search_id': 'searchId', 'items': 'items' } <NEW_LINE> def __init__(self, total=None, search_id=None, items=None): <NEW_LINE> <INDENT> self._total = None <NEW_LINE> self._search_id = None <NEW_LINE> self._items = None <NEW_LINE> self.discriminator = None <NEW_LINE> if total is not None: <NEW_LINE> <INDENT> self.total = total <NEW_LINE> <DEDENT> if search_id is not None: <NEW_LINE> <INDENT> self.search_id = search_id <NEW_LINE> <DEDENT> if items is not None: <NEW_LINE> <INDENT> self.items = items <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def total(self): <NEW_LINE> <INDENT> return self._total <NEW_LINE> <DEDENT> @total.setter <NEW_LINE> def total(self, total): <NEW_LINE> <INDENT> self._total = total <NEW_LINE> <DEDENT> @property <NEW_LINE> def search_id(self): <NEW_LINE> <INDENT> return self._search_id <NEW_LINE> <DEDENT> @search_id.setter <NEW_LINE> def search_id(self, search_id): <NEW_LINE> <INDENT> self._search_id = search_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> @items.setter <NEW_LINE> def items(self, items): <NEW_LINE> <INDENT> self._items = items <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(DeviceDatasourceInstancePaginationResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DeviceDatasourceInstancePaginationResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902521a7993f00c66eb3
|
class Bitmap: <NEW_LINE> <INDENT> def __init__(self, width, height, pixels=None): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> if pixels is None: <NEW_LINE> <INDENT> self.pixels = [False for __ in range(width * height)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pixels = pixels <NEW_LINE> <DEDENT> <DEDENT> def bitblt(self, src, x, y): <NEW_LINE> <INDENT> srcpixel = 0 <NEW_LINE> dstpixel = y * self.width + x <NEW_LINE> row_offset = self.width - src.width <NEW_LINE> for sy in range(src.height): <NEW_LINE> <INDENT> for sx in range(src.width): <NEW_LINE> <INDENT> self.pixels[dstpixel] = self.pixels[dstpixel] or src.pixels[srcpixel] <NEW_LINE> srcpixel += 1 <NEW_LINE> dstpixel += 1 <NEW_LINE> <DEDENT> dstpixel += row_offset <NEW_LINE> <DEDENT> <DEDENT> def show(self): <NEW_LINE> <INDENT> img = Image.new('P', (self.width, self.height)) <NEW_LINE> img.putpalette(bytes.fromhex('000000ffffff')) <NEW_LINE> img.frombytes(bytes(self.pixels)) <NEW_LINE> img.show()
|
A 2D bitmap image represented as a list of boolean values. Each
boolean value indicates the state of a single pixel in the bitmap.
|
625990256e29344779b01588
|
@reversion.register(follow=['county']) <NEW_LINE> @encoding.python_2_unicode_compatible <NEW_LINE> class SubCounty(AdministrativeUnitBase): <NEW_LINE> <INDENT> county = models.ForeignKey(County, on_delete=models.PROTECT) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
|
A county can be sub divided into sub counties.
The sub-counties do not necessarily map to constituencies
|
625990258c3a8732951f748f
|
class CalculateServicer(object): <NEW_LINE> <INDENT> def CalculateAdd(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
|
The greeting service definition.
|
62599025ac7a0e7691f73421
|
class ColorClip(ImageClip): <NEW_LINE> <INDENT> def __init__(self, tamano, col=(0, 0, 0), ismask=False, duracion=None): <NEW_LINE> <INDENT> w, h = tamano <NEW_LINE> shape = (h, w) if np.isscalar(col) else (h, w, len(col)) <NEW_LINE> ImageClip.__init__(self, np.tile(col, w * h).reshape(shape), ismask=ismask, duracion=duracion)
|
An ImageClip showing just one color.
Parameters
-----------
tamano
Size (width, height) in pixels of the clip.
color
If argument ``ismask`` is False, ``color`` indicates
the color in RGB of the clip (default is black). If `ismask``
is True, ``color`` must be a float between 0 and 1 (default is 1)
ismask
Set to true if the clip will be used as a mask.
|
6259902530c21e258be9974e
|
class getUserIdentities_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None, e=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.MAP: <NEW_LINE> <INDENT> self.success = {} <NEW_LINE> (_ktype2395, _vtype2396, _size2394) = iprot.readMapBegin() <NEW_LINE> for _i2398 in range(_size2394): <NEW_LINE> <INDENT> _key2399 = iprot.readI32() <NEW_LINE> _val2400 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> self.success[_key2399] = _val2400 <NEW_LINE> <DEDENT> iprot.readMapEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = TalkException() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getUserIdentities_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.MAP, 0) <NEW_LINE> oprot.writeMapBegin(TType.I32, TType.STRING, len(self.success)) <NEW_LINE> for kiter2401, viter2402 in self.success.items(): <NEW_LINE> <INDENT> oprot.writeI32(kiter2401) <NEW_LINE> oprot.writeString(viter2402.encode('utf-8') if sys.version_info[0] == 2 else viter2402) <NEW_LINE> <DEDENT> oprot.writeMapEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- success
- e
|
6259902521bff66bcd723b9b
|
class CrtShTimeoutException(CrtShRequestException): <NEW_LINE> <INDENT> def __init__(self, message=None, cause=None, scan_result=None): <NEW_LINE> <INDENT> super(CrtShTimeoutException, self).__init__(message=message, cause=cause, scan_result=scan_result)
|
Timeout exception
|
62599025d99f1b3c44d065db
|
class NoSetter(Exception): <NEW_LINE> <INDENT> pass
|
This exception occurs when variable has no setter, but it was called.
|
625990251f5feb6acb163b29
|
class Normalizator(): <NEW_LINE> <INDENT> def __init__(self,dataset,method="standarization"): <NEW_LINE> <INDENT> self.train_data = dataset.data <NEW_LINE> self.size = self.train_data.shape <NEW_LINE> self.mean = self.train_data.mean(axis=0) <NEW_LINE> self.std = self.train_data.std(axis = 0) <NEW_LINE> self.max = self.train_data.max(axis = 0) <NEW_LINE> self.min = self.train_data.min(axis = 0) <NEW_LINE> self.method = method <NEW_LINE> self.normalize(dataset) <NEW_LINE> <DEDENT> def normalize(self, dataset): <NEW_LINE> <INDENT> data = dataset.data <NEW_LINE> if self.method == "rescaling": <NEW_LINE> <INDENT> for i in range(0,self.size[1]-1): <NEW_LINE> <INDENT> data[:,i] = (data[:,i] - self.min[i]) / (self.max[i] - self.min[i]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(0, self.size[1] - 1): <NEW_LINE> <INDENT> data[:, i] = divide((data[:, i] - self.mean[i]), self.std[i])
|
Initialize with training data, it will get normalized, then use normalize functions for other datasets
|
62599025a8ecb03325872158
|
class ArticleColumn(Column): <NEW_LINE> <INDENT> def getChapterIndex(self, b, articleData): <NEW_LINE> <INDENT> return min(max(0, TX.asInt(b.e.form[self.C.PARAM_CHAPTER]) or 0), len(articleData.chapters or [])-1)
|
Generic column for articles.
|
62599025925a0f43d25e8f81
|
class YamlTag(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.__dict__.update(kwargs) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return getattr(self, key) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s %s>" % (self.__class__.__name__, sorted(self.__dict__.items()))
|
Superclass for constructors of custom tags defined in yaml file.
__str__ is overridden in subclass and used for serialization in module recorder.
|
62599026c432627299fa3f2e
|
class InducingPoints(InducingFeature): <NEW_LINE> <INDENT> def __init__(self, Z): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.Z = Parameter(Z, dtype=settings.float_type) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.Z.shape[0] <NEW_LINE> <DEDENT> @decors.params_as_tensors <NEW_LINE> def Kuu(self, kern, jitter=0.0): <NEW_LINE> <INDENT> Kzz = kern.K(self.Z) <NEW_LINE> Kzz += jitter * tf.eye(len(self), dtype=settings.dtypes.float_type) <NEW_LINE> return Kzz <NEW_LINE> <DEDENT> @decors.params_as_tensors <NEW_LINE> def Kuf(self, kern, Xnew): <NEW_LINE> <INDENT> Kzx = kern.K(self.Z, Xnew) <NEW_LINE> return Kzx
|
Real-space inducing points
|
625990268e05c05ec3f6f5fa
|
class ts_label(dtable): <NEW_LINE> <INDENT> repos_sn = datt(int, doc="库序号") <NEW_LINE> label_sn = datt(ts_label_seqno, doc='标签序号,库内同名同号,同名不同库号不同') <NEW_LINE> label = datt(str, doc='标签名') <NEW_LINE> type = datt(str, len=1, doc='标签类型: S题型,T标签,C分类') <NEW_LINE> props = datt(json_object, doc='标签其他定义信息,如颜色、图标等') <NEW_LINE> created_ts = datt(datetime, doc='更新时间') <NEW_LINE> updated_ts = datt(datetime, doc='更新时间') <NEW_LINE> notes = datt(str, doc='标签使用说明') <NEW_LINE> __dobject_key__ = [label_sn]
|
QuestionStyles, Tag和Category的Label信息
|
6259902626238365f5fada8d
|
class EDTestSuitePluginExecMtz2Variousv1_0(EDTestSuite): <NEW_LINE> <INDENT> def process(self): <NEW_LINE> <INDENT> self.addTestCaseFromName("EDTestCasePluginUnitExecMtz2Variousv1_0") <NEW_LINE> self.addTestCaseFromName("EDTestCasePluginExecuteExecMtz2Variousv1_0")
|
This is the test suite for EDNA plugin Mtz2Variousv1_0
It will run subsequently all unit tests and execution tests.
|
62599026be8e80087fbbffb4
|
class FamilyName(atom.core.XmlElement): <NEW_LINE> <INDENT> _qname = GDATA_TEMPLATE % 'familyName' <NEW_LINE> yomi = 'yomi'
|
The gd:familyName element.
Specifies family name of the person, eg. "Smith".
|
6259902621bff66bcd723b9f
|
class WaveSink(object): <NEW_LINE> <INDENT> def __init__(self, fp, sample_rate, sample_width): <NEW_LINE> <INDENT> self._fp = fp <NEW_LINE> self._wavep = wave.open(self._fp, 'wb') <NEW_LINE> self._wavep.setsampwidth(sample_width) <NEW_LINE> self._wavep.setnchannels(1) <NEW_LINE> self._wavep.setframerate(sample_rate) <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> self._wavep.writeframes(data) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._wavep.close() <NEW_LINE> self._fp.close() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> pass
|
Audio sink that writes audio data to a WAV file.
Args:
fp: file-like stream object to write data to.
sample_rate: sample rate in hertz.
sample_width: size of a single sample in bytes.
|
625990269b70327d1c57fcbe
|
class TestTracking(TestAsServer): <NEW_LINE> <INDENT> def setUpPreSession(self): <NEW_LINE> <INDENT> TestAsServer.setUpPreSession(self) <NEW_LINE> self.config.set_overlay(False) <NEW_LINE> self.config.set_internal_tracker(True) <NEW_LINE> <DEDENT> def test_add_remove_torrent(self): <NEW_LINE> <INDENT> tdef = TorrentDef() <NEW_LINE> sourcefn = os.path.join(os.getcwd(),"file.wmv") <NEW_LINE> tdef.add_content(sourcefn) <NEW_LINE> tdef.set_tracker(self.session.get_internal_tracker_url()) <NEW_LINE> tdef.finalize() <NEW_LINE> torrentfn = os.path.join(self.session.get_state_dir(),"gen.torrent") <NEW_LINE> tdef.save(torrentfn) <NEW_LINE> infohash = tdef.get_infohash() <NEW_LINE> hexinfohash = binascii.hexlify(infohash) <NEW_LINE> self.session.add_to_internal_tracker(tdef) <NEW_LINE> self.check_http_presence(hexinfohash,True) <NEW_LINE> self.session.remove_from_internal_tracker(tdef) <NEW_LINE> print >> sys.stderr,"test: Give network thread running tracker time to detect we removed the torrent file" <NEW_LINE> time.sleep(2) <NEW_LINE> self.check_http_presence(hexinfohash,False) <NEW_LINE> self.check_disk_presence(hexinfohash,False) <NEW_LINE> <DEDENT> def check_http_presence(self,hexinfohash,present): <NEW_LINE> <INDENT> print >> sys.stderr,"test: infohash is",hexinfohash <NEW_LINE> url = 'http://127.0.0.1:'+str(self.session.get_listen_port())+'/' <NEW_LINE> print >> sys.stderr,"test: tracker lives at",url <NEW_LINE> f = urlopen(url) <NEW_LINE> data = f.read() <NEW_LINE> f.close() <NEW_LINE> print >> sys.stderr,"test: tracker returned:",data <NEW_LINE> if present: <NEW_LINE> <INDENT> self.assert_(data.find(hexinfohash) != -1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assert_(data.find(hexinfohash) == -1) <NEW_LINE> <DEDENT> <DEDENT> def check_disk_presence(self,hexinfohash,present): <NEW_LINE> <INDENT> itrackerdir = os.path.join(self.session.get_state_dir(),STATEDIR_ITRACKER_DIR) <NEW_LINE> for filename in os.listdir(itrackerdir): <NEW_LINE> <INDENT> if filename.startswith(hexinfohash): <NEW_LINE> <INDENT> if present: <NEW_LINE> <INDENT> self.assert_(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assert_(False)
|
Testing seeding via new tribler API:
|
62599026796e427e5384f6b9
|
class GpioController(): <NEW_LINE> <INDENT> class Pin(): <NEW_LINE> <INDENT> def __init__(self, gpio, pin): <NEW_LINE> <INDENT> self.gpio = gpio <NEW_LINE> self.pin = pin <NEW_LINE> <DEDENT> def set(self, value): <NEW_LINE> <INDENT> self.gpio.set_output(self.pin, value) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, ftdi): <NEW_LINE> <INDENT> self.ftdi = ftdi <NEW_LINE> self.direction = 0xf3 <NEW_LINE> value = self._read() <NEW_LINE> value &= 0xf0 <NEW_LINE> value |= 0x03 <NEW_LINE> self._write(value) <NEW_LINE> <DEDENT> def _read(self): <NEW_LINE> <INDENT> command = bytes([ Ftdi.GET_BITS_LOW, Ftdi.SEND_IMMEDIATE ]) <NEW_LINE> self.ftdi.write_data(command) <NEW_LINE> data = self.ftdi.read_data_bytes(1, 4) <NEW_LINE> return data[0] <NEW_LINE> <DEDENT> def _write(self, value): <NEW_LINE> <INDENT> command = bytes([ Ftdi.SET_BITS_LOW, value, self.direction ]) <NEW_LINE> self.ftdi.write_data(command) <NEW_LINE> <DEDENT> def set_output(self, pin, value): <NEW_LINE> <INDENT> mask = 1 << (pin % 8) <NEW_LINE> self.direction |= mask <NEW_LINE> data = self._read() <NEW_LINE> if value: <NEW_LINE> <INDENT> data &= ~mask <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data |= mask <NEW_LINE> <DEDENT> self._write(data)
|
Represents a single pin in the GPIO controller.
|
6259902615baa72349462ed5
|
class OpLabelingTopLevel(Operator): <NEW_LINE> <INDENT> name = "OpLabelingTopLevel" <NEW_LINE> InputImages = InputSlot(level=1) <NEW_LINE> LabelInputs = InputSlot(level=1) <NEW_LINE> LabelEraserValue = InputSlot( value=255 ) <NEW_LINE> LabelDelete = ( InputSlot() ) <NEW_LINE> LabelImages = OutputSlot(level=1) <NEW_LINE> NonzeroLabelBlocks = OutputSlot(level=1) <NEW_LINE> LabelNames = OutputSlot() <NEW_LINE> LabelColors = OutputSlot() <NEW_LINE> def __init__(self, blockDims=None, *args, **kwargs): <NEW_LINE> <INDENT> super(OpLabelingTopLevel, self).__init__(*args, **kwargs) <NEW_LINE> self.opLabelLane = OpMultiLaneWrapper( OpLabelingSingleLane, operator_kwargs={"blockDims": blockDims}, parent=self ) <NEW_LINE> self.LabelInputs.connect(self.InputImages) <NEW_LINE> self.opLabelLane.InputImage.connect(self.InputImages) <NEW_LINE> self.opLabelLane.LabelInput.connect(self.LabelInputs) <NEW_LINE> self.opLabelLane.LabelEraserValue.connect(self.LabelEraserValue) <NEW_LINE> self.opLabelLane.LabelDelete.connect(self.LabelDelete) <NEW_LINE> self.LabelDelete.setValue(-1) <NEW_LINE> self.LabelImages.connect(self.opLabelLane.LabelImage) <NEW_LINE> self.NonzeroLabelBlocks.connect(self.opLabelLane.NonzeroLabelBlocks) <NEW_LINE> self.LabelColors.setValue([]) <NEW_LINE> self.LabelNames.setValue([]) <NEW_LINE> <DEDENT> def propagateDirty(self, slot, subindex, roi): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setInSlot(self, slot, subindex, roi, value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setupOutputs(self): <NEW_LINE> <INDENT> self.LabelNames.meta.dtype = object <NEW_LINE> self.LabelNames.meta.shape = (1,) <NEW_LINE> self.LabelColors.meta.dtype = object <NEW_LINE> self.LabelColors.meta.shape = (1,) <NEW_LINE> <DEDENT> def addLane(self, laneIndex): <NEW_LINE> <INDENT> numLanes = len(self.InputImages) <NEW_LINE> assert laneIndex == numLanes, "Lanes must be appended" <NEW_LINE> self.InputImages.resize(numLanes + 1) <NEW_LINE> <DEDENT> def removeLane(self, laneIndex, finalLength): <NEW_LINE> <INDENT> numLanes = len(self.InputImages) <NEW_LINE> self.InputImages.removeSlot(laneIndex, numLanes - 1) <NEW_LINE> <DEDENT> def getLane(self, laneIndex): <NEW_LINE> <INDENT> return OperatorSubView(self, laneIndex)
|
Top-level operator for the labelingApplet base class.
Provides all the slots needed by the labeling GUI, but any operator that provides the necessary slots can also be used with the LabelingGui.
|
62599026d164cc6175821eb3
|
class TestEnergy(unittest.TestCase): <NEW_LINE> <INDENT> def test_J(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"J") <NEW_LINE> self.fail('Allowed invalid unit type "J".') <NEW_LINE> <DEDENT> except quantity.QuantityError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_Jpermol(self): <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"J/mol") <NEW_LINE> self.assertAlmostEqual(q.value, 1.0, 6) <NEW_LINE> self.assertAlmostEqual(q.value_si, 1.0, delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "J/mol") <NEW_LINE> <DEDENT> def test_cal(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"cal") <NEW_LINE> self.fail('Allowed invalid unit type "cal".') <NEW_LINE> <DEDENT> except quantity.QuantityError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_calpermol(self): <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"cal/mol") <NEW_LINE> self.assertAlmostEqual(q.value, 1.0, 6) <NEW_LINE> self.assertAlmostEqual(q.value_si, 4.184, delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "cal/mol") <NEW_LINE> <DEDENT> def test_kJ(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"kJ") <NEW_LINE> self.fail('Allowed invalid unit type "kJ".') <NEW_LINE> <DEDENT> except quantity.QuantityError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_kJpermol(self): <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"kJ/mol") <NEW_LINE> self.assertAlmostEqual(q.value, 1.0, 6) <NEW_LINE> self.assertAlmostEqual(q.value_si, 1000., delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "kJ/mol") <NEW_LINE> <DEDENT> def test_kcal(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"kcal") <NEW_LINE> self.fail('Allowed invalid unit type "kcal".') <NEW_LINE> <DEDENT> except quantity.QuantityError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_kcalpermol(self): <NEW_LINE> <INDENT> q = quantity.Energy(1.0,"kcal/mol") <NEW_LINE> self.assertAlmostEqual(q.value, 1.0, 6) <NEW_LINE> self.assertAlmostEqual(q.value_si, 4184., delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "kcal/mol") <NEW_LINE> <DEDENT> def test_Kelvin(self): <NEW_LINE> <INDENT> q = quantity.Energy(10.0,"K") <NEW_LINE> self.assertAlmostEqual(q.value, 10*8.314472, delta=1e-6) <NEW_LINE> self.assertEqual(q.units, "J/mol")
|
Contains unit tests of the Energy unit type object.
|
6259902666673b3332c3132b
|
class landusechange(object): <NEW_LINE> <INDENT> def __init__(self, landusechange_variable): <NEW_LINE> <INDENT> self.var = landusechange_variable <NEW_LINE> <DEDENT> def initial(self): <NEW_LINE> <INDENT> self.var.ForestFractionInit = loadmap('ForestFraction') <NEW_LINE> self.var.DirectRunoffFractionInit = loadmap('DirectRunoffFraction') <NEW_LINE> self.var.WaterFractionInit = loadmap('WaterFraction') <NEW_LINE> self.var.IrrigationFractionInit = loadmap('IrrigationFraction') <NEW_LINE> self.var.RiceFractionInit = loadmap('RiceFraction') <NEW_LINE> self.var.OtherFractionInit = loadmap('OtherFraction') <NEW_LINE> self.var.ForestFraction = self.var.ForestFractionInit.copy() <NEW_LINE> self.var.DirectRunoffFraction = self.var.DirectRunoffFractionInit.copy() <NEW_LINE> self.var.WaterFraction =self.var.WaterFractionInit.copy() <NEW_LINE> self.var.IrrigationFraction = self.var.IrrigationFractionInit.copy() <NEW_LINE> self.var.RiceFraction = self.var.RiceFractionInit.copy() <NEW_LINE> self.var.OtherFraction = self.var.OtherFractionInit.copy() <NEW_LINE> self.var.FiveYearDayNo = np.array([1, 1462, 3288, 5115, 6941, 8767, 10593, 12420, 14246, 99999]) <NEW_LINE> <DEDENT> def dynamic(self): <NEW_LINE> <INDENT> if option['LandUseChange']: <NEW_LINE> <INDENT> FiveYearIndex = np.where(self.var.FiveYearDayNo <= self.var.currentTimeStep()) [0][-1] <NEW_LINE> self.var.ForestFraction = self.var.ForestFractionInit + self.var.ForestFractionChange[FiveYearIndex] <NEW_LINE> self.var.DirectRunoffFraction = self.var.DirectRunoffFractionInit + self.var.DirectRunoffFractionChange[FiveYearIndex] <NEW_LINE> self.var.WaterFraction = self.var.WaterFractionInit + self.var.WaterFractionChange[FiveYearIndex] <NEW_LINE> self.var.IrrigationFraction = self.var.IrrigationFractionInit + self.var.IrrigationFractionChange[FiveYearIndex] <NEW_LINE> self.var.RiceFraction = self.var.RiceFractionInit + self.var.RiceFractionChange[FiveYearIndex] <NEW_LINE> self.var.OtherFraction = self.var.OtherFractionInit + self.var.OtherFractionChange[FiveYearIndex] <NEW_LINE> self.var.Test = self.var.RiceFraction*1.0
|
# ************************************************************
# ***** LAND USE CHANGE : FRACTION MAPS **********************
# ************************************************************
# Each pixel is divided into several fractions, adding up to 1
# open water
# forest
# sealed fraction
# irrigated areas
# rice irrigation areas
# other
|
62599026c432627299fa3f30
|
class itkMultiplyByConstantImageFilterIUS3DIUS3(itkMultiplyByConstantImageFilterIUS3DIUS3_Superclass): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> InputConvertibleToOutputCheck = _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3_InputConvertibleToOutputCheck <NEW_LINE> Input1Input2OutputMultiplyOperatorCheck = _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3_Input1Input2OutputMultiplyOperatorCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetConstant(self, *args): <NEW_LINE> <INDENT> return _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3_SetConstant(self, *args) <NEW_LINE> <DEDENT> def GetConstant(self): <NEW_LINE> <INDENT> return _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3_GetConstant(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkMultiplyByConstantImageFilterPython.delete_itkMultiplyByConstantImageFilterIUS3DIUS3 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkMultiplyByConstantImageFilterPython.itkMultiplyByConstantImageFilterIUS3DIUS3_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkMultiplyByConstantImageFilterIUS3DIUS3.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
|
Proxy of C++ itkMultiplyByConstantImageFilterIUS3DIUS3 class
|
6259902673bcbd0ca4bcb1ce
|
class WebAppError( Exception ): <NEW_LINE> <INDENT> pass
|
Define a dedicated web application specific error.
|
62599026ac7a0e7691f73427
|
class FinishView(ProductStatusView): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> return Order.objects.filter(order_status=constants.ORDER_WC, is_valid=True).order_by('-update_time')
|
订单完成
|
625990265166f23b2e244313
|
class ISteps(Interface): <NEW_LINE> <INDENT> pass
|
Steps provider
|
625990265e10d32532ce40a3
|
class Promote(Resource): <NEW_LINE> <INDENT> @check_auth <NEW_LINE> def post(current_user, self, user_id): <NEW_LINE> <INDENT> if current_user["type"] != "admin": <NEW_LINE> <INDENT> return {"Message": "Must be an admin"} <NEW_LINE> <DEDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument( 'type', type=str, required=True, help="Type to promote required" ) <NEW_LINE> data = parser.parse_args() <NEW_LINE> user_type = data['type'] <NEW_LINE> if not user_type: <NEW_LINE> <INDENT> return {"Message": "Type to promote can\'t be blank"}, 400 <NEW_LINE> <DEDENT> elif user_type not in ('admin', 'client'): <NEW_LINE> <INDENT> return {"Message": "Type must either be client or admin"}, 400 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> conn = db() <NEW_LINE> cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) <NEW_LINE> cur.execute("SELECT * FROM users WHERE id=%(user_id)s", {'user_id': user_id}) <NEW_LINE> res = cur.fetchone() <NEW_LINE> if res is None: <NEW_LINE> <INDENT> return {"Message": "User with the id does not exist"}, 404 <NEW_LINE> <DEDENT> cur.execute("UPDATE users SET type=%s WHERE id=%s;", (user_type, user_id)) <NEW_LINE> conn.commit() <NEW_LINE> user = {} <NEW_LINE> user['id'] = res['id'] <NEW_LINE> user['username'] = res['username'] <NEW_LINE> user['type'] = res['type'] <NEW_LINE> user['email'] = res['email'] <NEW_LINE> return {"Message": user}, 200 <NEW_LINE> <DEDENT> except (Exception, psycopg2.DatabaseError) as error: <NEW_LINE> <INDENT> conn = db() <NEW_LINE> cur = conn.cursor() <NEW_LINE> cur.execute("rollback;") <NEW_LINE> print(error) <NEW_LINE> return {'Message': 'current transaction is aborted'}, 500
|
docstring for Promote
|
6259902621bff66bcd723ba1
|
class ProductInfoElem(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Name = None <NEW_LINE> self.Value = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Name = params.get("Name") <NEW_LINE> self.Value = params.get("Value")
|
产品详情
|
6259902615baa72349462ed7
|
class User(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> crypto_key = models.IntegerField( unique=True, validators=[ MinValueValidator(1000000000), MaxValueValidator(999999999), ], editable=False, help_text='Unique key for internal operations', ) <NEW_LINE> username = models.CharField(max_length=255, unique=True) <NEW_LINE> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserManager() <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['username'] <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> return f'{self.username}' <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f'<User: {self.username}>'
|
A model for app's Users
|
62599026d18da76e235b78ed
|
class KaldiServer: <NEW_LINE> <INDENT> def __init__(self, srv_config): <NEW_LINE> <INDENT> for key in ['name', 'host', 'port', 'samplerate']: <NEW_LINE> <INDENT> if key in srv_config: <NEW_LINE> <INDENT> setattr(self, key, srv_config[key]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def free(self): <NEW_LINE> <INDENT> await kaldi_server_queue.put(self)
|
This class describes the Kaldi server resource. It is a representation of a running instance of the Kaldi server
together with its parameters.
|
6259902626238365f5fada91
|
class RoleForm(Form): <NEW_LINE> <INDENT> name = StringField('name', [Required()]) <NEW_LINE> description = StringField('description') <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(RoleForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if not super(RoleForm, self).validate(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
|
The role form
|
625990265e10d32532ce40a4
|
class THREDDSExplorerDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/THREDDSExplorer/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull())
|
Test rerources work.
|
62599026d99f1b3c44d065e3
|
class SQLThread (threading.Thread): <NEW_LINE> <INDENT> resp_regexp = re.compile("Uploaded: (?P<filename>\d\d\d\d-\d\d-\d\d_\d\d:\d\d\.txt) Size: (?P<size>[0-9.]*) .b (?P<ok_entries>\d*) / (?P<total_entries>\d*).*") <NEW_LINE> max_entries = 35000 <NEW_LINE> def __init__(self, the_globs): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.termevent=threading.Event() <NEW_LINE> self.sqlfile = None <NEW_LINE> self.new_file() <NEW_LINE> self.globs = the_globs <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self.termevent.isSet(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = self.globs.Frame_Q.get(True,3) <NEW_LINE> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sqlreq = f.toSQL() <NEW_LINE> self.the_count += 1 <NEW_LINE> self.sqlfile.write(sqlreq) <NEW_LINE> <DEDENT> if self.the_count >= self.max_entries: <NEW_LINE> <INDENT> self.new_file() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def new_file(self): <NEW_LINE> <INDENT> if self.sqlfile != None: <NEW_LINE> <INDENT> self.sqlfile.flush() <NEW_LINE> old_name = self.sqlfile.name <NEW_LINE> self.sqlfile.close() <NEW_LINE> self.sqlfile = open(old_name,'r') <NEW_LINE> self.sqlfile.seek(0) <NEW_LINE> exception_info="" <NEW_LINE> ok = False <NEW_LINE> try: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> ok = False <NEW_LINE> exception_info = str(sys.exc_info()[0]) <NEW_LINE> resp=(0,0,"An exception has occurred while uploading " + self.sqlfile.name) <NEW_LINE> <DEDENT> self.sqlfile.close() <NEW_LINE> if ok: <NEW_LINE> <INDENT> m = self.resp_regexp.match(resp[2]) <NEW_LINE> if m: <NEW_LINE> <INDENT> nb_ok = int(m.group('ok_entries')) <NEW_LINE> filename = m.group('filename') <NEW_LINE> if ((nb_ok >= (self.max_entries -2)) and (os.path.basename(self.sqlfile.name) == filename)): <NEW_LINE> <INDENT> os.unlink(self.sqlfile.name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> d = datetime.now() <NEW_LINE> filename='/home/cedric/'+str(d.date())+"_"+"%02i"%d.hour+":"+"%02i"%d.minute+'.txt' <NEW_LINE> self.sqlfile=open(filename,'w') <NEW_LINE> self.the_count=0 <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.termevent.set() <NEW_LINE> self.sqlfile.close()
|
This thread handles the output stream for SQL stuff
|
625990261f5feb6acb163b31
|
class Palio(CarroPopular): <NEW_LINE> <INDENT> def mostra_informacao(self): <NEW_LINE> <INDENT> print("Modelo: Palio") <NEW_LINE> print("Fabricante: Fiat") <NEW_LINE> print("Categoria: Popular\n")
|
Carro popular Palio.
|
62599026d164cc6175821eb8
|
class EagerModelSerializer(EagerLoadingMixin, ModelSerializer): <NEW_LINE> <INDENT> pass
|
Serializer that includes the select and prefetch related
|
62599026d18da76e235b78ee
|
class PMMHSampler(BaseAdaptiveMHSampler): <NEW_LINE> <INDENT> def __init__(self, log_f_estimator, log_prop_density, prop_sampler, prop_scales, prng): <NEW_LINE> <INDENT> super(PMMHSampler, self).__init__(prop_scales) <NEW_LINE> self.log_f_estimator = log_f_estimator <NEW_LINE> if log_prop_density is None: <NEW_LINE> <INDENT> self.do_metropolis_update = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.do_metropolis_update = False <NEW_LINE> self.log_prop_density = log_prop_density <NEW_LINE> <DEDENT> self.prop_sampler = prop_sampler <NEW_LINE> self.prng = prng <NEW_LINE> <DEDENT> def get_samples(self, theta_init, n_sample): <NEW_LINE> <INDENT> if hasattr(theta_init, 'shape'): <NEW_LINE> <INDENT> thetas = np.empty((n_sample, theta_init.shape[0])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> thetas = np.empty(n_sample) <NEW_LINE> <DEDENT> thetas[0] = theta_init <NEW_LINE> log_f_est_curr = self.log_f_estimator(theta_init) <NEW_LINE> n_reject = 0 <NEW_LINE> for s in range(1, n_sample): <NEW_LINE> <INDENT> if self.do_metropolis_update: <NEW_LINE> <INDENT> thetas[s], log_f_est_curr, rejection = mcmc.metropolis_step( thetas[s-1], log_f_est_curr, self.log_f_estimator, self.prng, self.prop_sampler, self.prop_scales) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> thetas[s], log_f_est_curr, rejection = mcmc.met_hastings_step( thetas[s-1], log_f_est_curr, self.log_f_estimator, self.prng, self.prop_sampler, self.prop_scales, self.log_prop_density) <NEW_LINE> <DEDENT> if rejection: <NEW_LINE> <INDENT> n_reject += 1 <NEW_LINE> <DEDENT> <DEDENT> return thetas, n_reject
|
Pseudo-marginal Metropolis Hastings sampler.
Markov chain Monte Carlo sampler which uses pseudo-marginal Metropolis
Hastings updates. In the pseudo-marginal framework only an unbiased
noisy estimate of the (unnormalised) target density is available.
|
625990266fece00bbaccc8fa
|
class PersonName: <NEW_LINE> <INDENT> def __init__(self, id, given, other, family, full_name): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.given = common.standardize_text(given) <NEW_LINE> self.other = common.standardize_text(other) <NEW_LINE> self.family = common.standardize_text(family) <NEW_LINE> self.concat = s.nullify_blanks(s.make_string([self.given, self.other, self.family])) <NEW_LINE> self.is_populated = self.concat is not None <NEW_LINE> <DEDENT> def is_similar(self, other_person_name): <NEW_LINE> <INDENT> if self.family is None or other_person_name.family is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.id == other_person_name.id: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.family != other_person_name.family and jellyfish.damerau_levenshtein_distance(self.family, other_person_name.family) > 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if (self.given is None and self.other is None) or (other_person_name.given is None and other_person_name.other is None): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if ((self.given is not None or other_person_name.other is not None) and (self.other is not None or other_person_name.given is not None) and not common.names_conflict(self.given, other_person_name.given) and not common.names_conflict(self.other, other_person_name.other)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if ((self.given is not None or other_person_name.given is not None) and (self.other is not None or other_person_name.other is not None) and not common.names_conflict(self.given, other_person_name.other) and not common.names_conflict(self.other, other_person_name.given)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
|
Simplistic class to hold a PersonName. For this version, we will only work with split out names, not full
names. It is the responsibility of the PersonName crawler to figure out how to split up full names.
|
625990261f5feb6acb163b33
|
class TestMerge(TransactionCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestMerge, self).setUp() <NEW_LINE> self.wh_main = self.browse_ref('stock.warehouse0') <NEW_LINE> self.wh_ch = self.browse_ref('stock.stock_warehouse_shop0') <NEW_LINE> self.product = self.browse_ref('product.product_product_4') <NEW_LINE> inventory = self.env['stock.inventory'].create( {'name': 'Remove product for test', 'location_id': self.ref('stock.stock_location_locations'), 'filter': 'product', 'product_id': self.product.id}) <NEW_LINE> inventory.prepare_inventory() <NEW_LINE> inventory.reset_real_qty() <NEW_LINE> inventory.action_done() <NEW_LINE> inventory = self.env['stock.inventory'].create( {'name': 'Test stock available for reservation', 'location_id': self.wh_ch.lot_stock_id.id, 'filter': 'none'}) <NEW_LINE> inventory.prepare_inventory() <NEW_LINE> self.env['stock.inventory.line'].create({ 'inventory_id': inventory.id, 'product_id': self.product.id, 'location_id': self.wh_ch.lot_stock_id.id, 'product_qty': 10.0}) <NEW_LINE> inventory.action_done() <NEW_LINE> <DEDENT> def test_merge(self): <NEW_LINE> <INDENT> quant_obj = self.env['stock.quant'] <NEW_LINE> domain = [('location_id', '=', self.wh_ch.lot_stock_id.id), ('product_id', '=', self.product.id)] <NEW_LINE> quants = quant_obj.search(domain) <NEW_LINE> self.assertEqual(len(quants), 1, "There should be 1 quant") <NEW_LINE> move = self.env['stock.move'].create( {'name': 'Test move', 'product_id': self.product.id, 'location_id': self.wh_ch.lot_stock_id.id, 'location_dest_id': self.wh_main.lot_stock_id.id, 'product_uom_qty': 5.0, 'product_uom': self.product.uom_id.id}) <NEW_LINE> move.action_confirm() <NEW_LINE> move.action_assign() <NEW_LINE> quants = quant_obj.search(domain) <NEW_LINE> self.assertEqual(len(quants), 2, "There should be 2 quants") <NEW_LINE> move.action_cancel() <NEW_LINE> quants = quant_obj.search(domain) <NEW_LINE> self.assertEqual(len(quants), 1, "There should be 1 quant")
|
Test the potential quantity on a product with a multi-line BoM
|
62599026d18da76e235b78ef
|
class Bullet(Sprite): <NEW_LINE> <INDENT> def __init__(self, ai_game): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.screen = ai_game.screen <NEW_LINE> self.settings = ai_game.settings <NEW_LINE> self.color = self.settings.bullet_color <NEW_LINE> self.rect = pygame.Rect(0, 0, self.settings.bullet_width, self.settings.bullet_height) <NEW_LINE> self.rect.midtop = ai_game.ship.rect.midtop <NEW_LINE> self.y = float(self.rect.y) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.y -= self.settings.bullet_speed <NEW_LINE> self.rect.y = self.y <NEW_LINE> <DEDENT> def draw_bullet(self): <NEW_LINE> <INDENT> pygame.draw.rect(self.screen, self.color, self.rect)
|
A Class to manage bullets fired from the ship
|
6259902626238365f5fada95
|
class GenericCSV(CSV, PasswordExporter): <NEW_LINE> <INDENT> cap = Cap.IMPORT | Cap.EXPORT <NEW_LINE> name = 'csv' <NEW_LINE> himport = "pass import csv file.csv --cols 'url,login,,password'" <NEW_LINE> writer = None <NEW_LINE> def parse(self): <NEW_LINE> <INDENT> self.file.readline() <NEW_LINE> if ',' in self.cols: <NEW_LINE> <INDENT> self.fieldnames = self.cols.split(',') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise FormatError("no columns to map to credential attributes.") <NEW_LINE> <DEDENT> super().parse() <NEW_LINE> <DEDENT> def exist(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def clean(self, cmdclean, convert): <NEW_LINE> <INDENT> super().clean(cmdclean, convert) <NEW_LINE> fieldnames = set() <NEW_LINE> for entry in self.data: <NEW_LINE> <INDENT> path = entry.pop('path', '') <NEW_LINE> entry['group'] = os.path.join(self.root, os.path.dirname(path)) <NEW_LINE> entry['title'] = os.path.basename(path) <NEW_LINE> fieldnames.update(set(entry.keys())) <NEW_LINE> <DEDENT> if not self.all: <NEW_LINE> <INDENT> fieldnames = self.keyslist <NEW_LINE> <DEDENT> for entry in self.data: <NEW_LINE> <INDENT> for key in fieldnames: <NEW_LINE> <INDENT> if key not in entry: <NEW_LINE> <INDENT> entry[key] = '' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.writer = csv.DictWriter(self.file, fieldnames=sorted(fieldnames), restval='', extrasaction='raise') <NEW_LINE> self.writer.writeheader() <NEW_LINE> <DEDENT> def insert(self, entry): <NEW_LINE> <INDENT> if self.all: <NEW_LINE> <INDENT> self.writer.writerow(entry) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = {} <NEW_LINE> for key in self.keyslist: <NEW_LINE> <INDENT> res[key] = entry.get(key, '') <NEW_LINE> <DEDENT> self.writer.writerow(res) <NEW_LINE> <DEDENT> <DEDENT> def open(self): <NEW_LINE> <INDENT> if self.action is Cap.IMPORT: <NEW_LINE> <INDENT> super().open() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if os.path.isfile(self.prefix): <NEW_LINE> <INDENT> if self.force: <NEW_LINE> <INDENT> self.file = open(self.prefix, 'w', encoding=self.encoding) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PMError(f"{self.prefix} is already a file.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.file = open(self.prefix, 'w', encoding=self.encoding)
|
Importer & Exporter in generic CSV format.
:usage:
You should use the --cols option to map columns to credential attributes.
The recognized column names by pass-import are the following:
'title', 'password', 'login', 'email', 'url', 'comments',
'otpauth', 'group'
``title`` and ``group`` field are used to generate the password
path. If you have otp data, they should be named as ``otpauth``.
These are the *standard* field names. You can add any other field
you want.
|
62599026ac7a0e7691f7342d
|
class AAAA(dns.rdata.Rdata): <NEW_LINE> <INDENT> __slots__ = ['address'] <NEW_LINE> def __init__(self, rdclass, rdtype, address): <NEW_LINE> <INDENT> super().__init__(rdclass, rdtype) <NEW_LINE> dns.ipv6.inet_aton(address) <NEW_LINE> object.__setattr__(self, 'address', address) <NEW_LINE> <DEDENT> def to_text(self, origin=None, relativize=True, **kw): <NEW_LINE> <INDENT> return self.address <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None): <NEW_LINE> <INDENT> address = tok.get_identifier() <NEW_LINE> tok.get_eol() <NEW_LINE> return cls(rdclass, rdtype, address) <NEW_LINE> <DEDENT> def _to_wire(self, file, compress=None, origin=None, canonicalize=False): <NEW_LINE> <INDENT> file.write(dns.ipv6.inet_aton(self.address)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): <NEW_LINE> <INDENT> address = dns.ipv6.inet_ntoa(parser.get_remaining()) <NEW_LINE> return cls(rdclass, rdtype, address)
|
AAAA record.
|
6259902621bff66bcd723ba7
|
@enum.unique <NEW_LINE> class Subject(enum.Enum): <NEW_LINE> <INDENT> value = 0 <NEW_LINE> type = 1 <NEW_LINE> frame = 2 <NEW_LINE> inferior = 3 <NEW_LINE> thread = 4 <NEW_LINE> progspace = 5 <NEW_LINE> objfile = 6 <NEW_LINE> block = 7 <NEW_LINE> symbol = 8 <NEW_LINE> symbol_table = 9 <NEW_LINE> line_table = 10 <NEW_LINE> user_breakpoint = 11 <NEW_LINE> user_watchpoint = 12 <NEW_LINE> architecture = 13 <NEW_LINE> macro = 14
|
The list of valid subject codes for messages.
|
62599026d99f1b3c44d065e7
|
class UafWatchpoint(gdb.Breakpoint): <NEW_LINE> <INDENT> def __init__(self, addr): <NEW_LINE> <INDENT> super(UafWatchpoint, self).__init__("*{:#x}".format(addr), gdb.BP_WATCHPOINT, internal=True) <NEW_LINE> self.address = addr <NEW_LINE> self.silent = True <NEW_LINE> self.enabled = True <NEW_LINE> return <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> frame = gdb.selected_frame() <NEW_LINE> if frame.name() in ("_int_malloc", "malloc_consolidate", "__libc_calloc"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> pc = gdb_get_nth_previous_instruction_address(current_arch.pc, 2) <NEW_LINE> insn = gef_current_instruction(pc) <NEW_LINE> msg = [] <NEW_LINE> msg.append(Color.colorify("Heap-Analysis", "yellow bold")) <NEW_LINE> msg.append("Possible Use-after-Free in '{:s}': pointer {:#x} was freed, but is attempted to be used at {:#x}" .format(get_filepath(), self.address, pc)) <NEW_LINE> msg.append("{:#x} {:s} {:s}".format(insn.address, insn.mnemonic, Color.yellowify(", ".join(insn.operands)))) <NEW_LINE> push_context_message("warn", "\n".join(msg)) <NEW_LINE> return True
|
Custom watchpoints set TraceFreeBreakpoint() to monitor free()d pointers being used.
|
62599026796e427e5384f6c1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.