code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class Dog(Animal): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> print('Dog is running')
|
docstring for Dog
|
6259902e6fece00bbaccc9f9
|
class Tool(benchexec.tools.template.BaseTool): <NEW_LINE> <INDENT> def executable(self): <NEW_LINE> <INDENT> executable = util.find_executable('predatorHP.py') <NEW_LINE> executableDir = os.path.dirname(executable) <NEW_LINE> if not os.path.isfile(os.path.join(executableDir, "predator-build-ok")): <NEW_LINE> <INDENT> self._buildPredatorHp(executableDir) <NEW_LINE> <DEDENT> return executable <NEW_LINE> <DEDENT> def _buildPredatorHp(self, executableDir): <NEW_LINE> <INDENT> proc = subprocess.Popen([os.path.join(executableDir, 'build-all.sh')], cwd=executableDir) <NEW_LINE> proc.communicate() <NEW_LINE> if proc.returncode: <NEW_LINE> <INDENT> sys.exit('Failed to build Predator-HP, please fix the build first.') <NEW_LINE> <DEDENT> <DEDENT> def name(self): <NEW_LINE> <INDENT> return 'Predator-HP' <NEW_LINE> <DEDENT> def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}): <NEW_LINE> <INDENT> spec = ["--propertyfile", propertyfile] if propertyfile is not None else [] <NEW_LINE> return [executable] + options + spec + tasks <NEW_LINE> <DEDENT> def determine_result(self, returncode, returnsignal, output, isTimeout): <NEW_LINE> <INDENT> output = '\n'.join(output) <NEW_LINE> status = "ERROR" <NEW_LINE> if "UNKNOWN" in output: <NEW_LINE> <INDENT> status = result.RESULT_UNKNOWN <NEW_LINE> <DEDENT> elif "TRUE" in output: <NEW_LINE> <INDENT> status = result.RESULT_TRUE_PROP <NEW_LINE> <DEDENT> elif "FALSE(valid-memtrack)" in output: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_MEMTRACK <NEW_LINE> <DEDENT> elif "FALSE(valid-deref)" in output: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_DEREF <NEW_LINE> <DEDENT> elif "FALSE(valid-free)" in output: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_FREE <NEW_LINE> <DEDENT> elif "FALSE" in output: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_REACH <NEW_LINE> <DEDENT> if (status == "ERROR" and isTimeout): <NEW_LINE> <INDENT> status = "TIMEOUT" <NEW_LINE> <DEDENT> return status <NEW_LINE> <DEDENT> def program_files(self, executable): <NEW_LINE> <INDENT> executableDir = os.path.dirname(executable) <NEW_LINE> dependencies = [ "predator-repo", "build-all.sh" ] <NEW_LINE> return [executable] + util.flatten(util.expand_filename_pattern(dep, installDir) for dep in dependencies) <NEW_LINE> <DEDENT> def working_directory(self, executable): <NEW_LINE> <INDENT> return os.path.dirname(executable)
|
Wrapper for a Predator - Hunting Party
http://www.fit.vutbr.cz/research/groups/verifit/tools/predator-hp/
|
6259902e0a366e3fb87dda32
|
class SerializingManager(models.Manager): <NEW_LINE> <INDENT> def get_query_set(self): <NEW_LINE> <INDENT> return SerializingQuerySet(self.model, using=self._db)
|
Applies the SerializingQuerySet
|
6259902e507cdc57c63a5df3
|
class Tweet(DB.Model): <NEW_LINE> <INDENT> id = DB.Column(DB.BigInteger, primary_key=True) <NEW_LINE> text = DB.Column(DB.Unicode(500)) <NEW_LINE> embedding = DB.Column(DB.PickleType, nullable=False) <NEW_LINE> user_id = DB.Column(DB.BigInteger, DB.ForeignKey('user.id'), nullable=False) <NEW_LINE> user = DB.relationship('User', backref=DB.backref('tweets', lazy=True)) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Tweet {}>'.format(self.text)
|
Tweets
|
6259902e73bcbd0ca4bcb2dc
|
class ready(Event): <NEW_LINE> <INDENT> pass
|
ready Event
|
6259902ea4f1c619b294f641
|
class Startstate(object): <NEW_LINE> <INDENT> board = [] <NEW_LINE> def __init__(self, teams, simulator): <NEW_LINE> <INDENT> counter = 0 <NEW_LINE> max_count = len(teams)/6; <NEW_LINE> for i in range(0, math.ceil(max_count)): <NEW_LINE> <INDENT> self.board.append([]) <NEW_LINE> <DEDENT> for i in sorted(teams, key=lambda x: x[1], reverse = True): <NEW_LINE> <INDENT> self.board[counter].append({'name' : i[0], 'won': 0, 'score' : 0}) <NEW_LINE> counter += 1 <NEW_LINE> if(counter == max_count): counter = 0 <NEW_LINE> <DEDENT> self.simulator = simulator <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> for group in self.board: <NEW_LINE> <INDENT> for j in group: <NEW_LINE> <INDENT> print(str(j), end='\n') <NEW_LINE> <DEDENT> print('\n') <NEW_LINE> <DEDENT> return qs.QualificationState(self.board, self.simulator) <NEW_LINE> <DEDENT> def has_next(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def print_table(self): <NEW_LINE> <INDENT> for group in self.board: <NEW_LINE> <INDENT> for j in group: <NEW_LINE> <INDENT> print(str(j), end='\n') <NEW_LINE> <DEDENT> print('\n')
|
UEFA - počiatočný stav. Tu sa udeje vygenerovanie tabuľky
|
6259902e96565a6dacd2d7b4
|
class CharNullField(models.CharField): <NEW_LINE> <INDENT> description = "CharField that stores NULL but returns ''" <NEW_LINE> def to_python(self, value): <NEW_LINE> <INDENT> if isinstance(value, models.CharField): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if value is None: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> def get_db_prep_value(self, value, connection, prepared=False): <NEW_LINE> <INDENT> if value == "": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value
|
Courtesy of https://code.djangoproject.com/ticket/9590.
|
6259902e6e29344779b0169b
|
class Stock_Yahoo(Stock): <NEW_LINE> <INDENT> def __init__(self, symbol, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.data = query(symbol)["query"]["results"]["quote"] <NEW_LINE> self.name = symbol
|
Creates a share-object from the date from Yahoo. You will need internet access, since the data are pulled live for yahoo.
|
6259902e8a43f66fc4bf31d1
|
class QuotaReport(InitDict): <NEW_LINE> <INDENT> required_arguments = [ "disk_limit", "file_limit", "threshold", "soft_disk_limit", "soft_file_limit", "quota_target", "files_used", "disk_used", "tree" ]
|
Data object representing a quota report.
|
6259902e15baa72349462fe4
|
class _PortPool: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._port_queue = collections.deque() <NEW_LINE> self.ports_checked_for_last_request = 0 <NEW_LINE> <DEDENT> def num_ports(self): <NEW_LINE> <INDENT> return len(self._port_queue) <NEW_LINE> <DEDENT> def get_port_for_process(self, pid): <NEW_LINE> <INDENT> if not self._port_queue: <NEW_LINE> <INDENT> raise RuntimeError('No ports being managed.') <NEW_LINE> <DEDENT> check_count = 0 <NEW_LINE> max_ports_to_test = len(self._port_queue) <NEW_LINE> while check_count < max_ports_to_test: <NEW_LINE> <INDENT> candidate = self._port_queue.pop() <NEW_LINE> self._port_queue.appendleft(candidate) <NEW_LINE> check_count += 1 <NEW_LINE> if (candidate.start_time == 0 or candidate.start_time != _get_process_start_time(candidate.pid)): <NEW_LINE> <INDENT> if _is_port_free(candidate.port): <NEW_LINE> <INDENT> candidate.pid = pid <NEW_LINE> candidate.start_time = _get_process_start_time(pid) <NEW_LINE> if not candidate.start_time: <NEW_LINE> <INDENT> logging.info('Can\'t read start time for pid %d.', pid) <NEW_LINE> <DEDENT> self.ports_checked_for_last_request = check_count <NEW_LINE> return candidate.port <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info( 'Port %d unexpectedly in use, last owning pid %d.', candidate.port, candidate.pid) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logging.info('All ports in use.') <NEW_LINE> self.ports_checked_for_last_request = check_count <NEW_LINE> return 0 <NEW_LINE> <DEDENT> def add_port_to_free_pool(self, port): <NEW_LINE> <INDENT> if port < 1 or port > 65535: <NEW_LINE> <INDENT> raise ValueError( 'Port must be in the [1, 65535] range, not %d.' % port) <NEW_LINE> <DEDENT> port_info = _PortInfo(port=port) <NEW_LINE> self._port_queue.append(port_info)
|
Manage available ports for processes.
Ports are reclaimed when the reserving process exits and the reserved port
is no longer in use. Only ports which are free for both TCP and UDP will be
handed out. It is easier to not differentiate between protocols.
The pool must be pre-seeded with add_port_to_free_pool() calls
after which get_port_for_process() will allocate and reclaim ports.
The len() of a _PortPool returns the total number of ports being managed.
Attributes:
ports_checked_for_last_request: int. The number of ports examined
in order to return from the most recent get_port_for_process()
request. A high number here likely means the number of
available ports with no active process using them is getting
low.
|
6259902e73bcbd0ca4bcb2de
|
class Flickr(models.Source): <NEW_LINE> <INDENT> FAST_POLL = datetime.timedelta(minutes=60) <NEW_LINE> GR_CLASS = gr_flickr.Flickr <NEW_LINE> SHORT_NAME = 'flickr' <NEW_LINE> URL_CANONICALIZER = util.UrlCanonicalizer( domain=GR_CLASS.DOMAIN, approve=r'https://www\.flickr\.com/(photos|people)/[^/?]+/([^/?]+/)?$', reject=r'https://login\.yahoo\.com/.*', subdomain='www', trailing_slash=True, headers=util.USER_AGENT_HEADER) <NEW_LINE> username = ndb.StringProperty() <NEW_LINE> @staticmethod <NEW_LINE> def new(handler, auth_entity=None, **kwargs): <NEW_LINE> <INDENT> person = json.loads(auth_entity.user_json).get('person', {}) <NEW_LINE> return Flickr( id=person.get('nsid'), auth_entity=auth_entity.key, name=person.get('realname', {}).get('_content'), username=(person.get('path_alias') or person.get('username', {}).get('_content')), picture='https://farm{}.staticflickr.com/{}/buddyicons/{}.jpg' .format( person.get('iconfarm'), person.get('iconserver'), person.get('nsid')), url=person.get('profileurl', {}).get('_content'), **kwargs) <NEW_LINE> <DEDENT> def silo_url(self): <NEW_LINE> <INDENT> return self.url <NEW_LINE> <DEDENT> def user_tag_id(self): <NEW_LINE> <INDENT> return self.gr_source.tag_uri(self.username) <NEW_LINE> <DEDENT> def get_activities_response(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.setdefault('group_id', SELF) <NEW_LINE> if 'min_id' in kwargs: <NEW_LINE> <INDENT> del kwargs['min_id'] <NEW_LINE> <DEDENT> return self.gr_source.get_activities_response(*args, **kwargs) <NEW_LINE> <DEDENT> def canonicalize_url(self, url, activity=None, **kwargs): <NEW_LINE> <INDENT> if not url.endswith('/'): <NEW_LINE> <INDENT> url = url + '/' <NEW_LINE> <DEDENT> if self.username: <NEW_LINE> <INDENT> url = url.replace('flickr.com/photos/%s/' % self.username, 'flickr.com/photos/%s/' % self.key.id()) <NEW_LINE> url = url.replace('flickr.com/people/%s/' % self.username, 'flickr.com/people/%s/' % self.key.id()) <NEW_LINE> <DEDENT> return super(Flickr, self).canonicalize_url(url, **kwargs)
|
A flickr account.
The key name is the nsid
|
6259902e287bf620b6272c33
|
class TestTextPageDataRuleMetaData(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testTextPageDataRuleMetaData(self): <NEW_LINE> <INDENT> pass
|
TextPageDataRuleMetaData unit test stubs
|
6259902e1d351010ab8f4b65
|
class HOdlcoin(Bitcoin): <NEW_LINE> <INDENT> name = 'hodlcoin' <NEW_LINE> symbols = ('HODL', ) <NEW_LINE> seeds = ("westcoast.hodlcoin.com", "eastcoast.hodlcoin.com", "europe.hodlcoin.com", "asia.hodlcoin.com", "seed.hodlcoin.oo.fi", "seed.hodlcoin.dk", "seed.hodlcoin.com") <NEW_LINE> port = 1989 <NEW_LINE> message_start = b'\xf9\xbc\xb5\xd9' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 40, 'SCRIPT_ADDR': 5, 'SECRET_KEY': 168 }
|
Class with all the necessary HOdlcoin network information based on
https://github.com/HOdlcoin/HOdlcoin/blob/HODLCoin0.11.3/src/chainparams.cpp
(date of access: 02/15/2018)
|
6259902ed4950a0f3b111664
|
class ManagedClusterSKU(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: Optional[Union[str, "ManagedClusterSKUName"]] = None, tier: Optional[Union[str, "ManagedClusterSKUTier"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ManagedClusterSKU, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.tier = tier
|
ManagedClusterSKU.
:ivar name: Name of a managed cluster SKU. Possible values include: "Basic".
:vartype name: str or ~azure.mgmt.containerservice.v2020_11_01.models.ManagedClusterSKUName
:ivar tier: Tier of a managed cluster SKU. Possible values include: "Paid", "Free".
:vartype tier: str or ~azure.mgmt.containerservice.v2020_11_01.models.ManagedClusterSKUTier
|
6259902e6e29344779b0169d
|
class COMTrans(object): <NEW_LINE> <INDENT> def Recv(): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> count = ser.inWaiting() <NEW_LINE> if count!=0: <NEW_LINE> <INDENT> recv = ser.read(count) <NEW_LINE> currenttime = strftime("%Y-%m-%d %H:%M:%S",localtime(time())) <NEW_LINE> print(currenttime + '<< ' + recv.decode('utf-8')) <NEW_LINE> <DEDENT> ser.flushInput() <NEW_LINE> sleep(0.5) <NEW_LINE> <DEDENT> <DEDENT> def RunRecv(): <NEW_LINE> <INDENT> t1 = threading.Thread(target=COMTrans.Recv) <NEW_LINE> t1.start() <NEW_LINE> <DEDENT> def Send(data): <NEW_LINE> <INDENT> ser.write(data) <NEW_LINE> <DEDENT> def RunSend(data): <NEW_LINE> <INDENT> t2 = threading.Thread(target=COMTrans.Send,args=[data,]) <NEW_LINE> t2.start() <NEW_LINE> t2.join()
|
定义串口发送和接收功能;
RunRecv()函数没有参数,打印接收的串口数据;
RunSend(data),参数data:需要发送的数据;如发送字符串"root",则data='root'.encode('utf-8')
|
6259902e15baa72349462fe6
|
class DHCPEncodeError(DHCPException) : <NEW_LINE> <INDENT> pass
|
There was an error with the parameters while building the DHCP packet.
|
6259902e56b00c62f0fb390f
|
class ReadCounts(HighDimBase): <NEW_LINE> <INDENT> def _validate_header_extensions(self): <NEW_LINE> <INDENT> self._check_header_extensions() <NEW_LINE> <DEDENT> def remap_to(self, destination=None): <NEW_LINE> <INDENT> return self._remap_to_chromosomal_regions(destination) <NEW_LINE> <DEDENT> @property <NEW_LINE> def samples(self): <NEW_LINE> <INDENT> return [h.rsplit('.', 1)[0] for h in self.header[1:]] <NEW_LINE> <DEDENT> @property <NEW_LINE> def allowed_header(self): <NEW_LINE> <INDENT> return ['readcount', 'normalizedreadcount']
|
Subclass for ReadCounts.
|
6259902e66673b3332c3143e
|
class MarketMonitor(BasicMonitor): <NEW_LINE> <INDENT> def __init__(self, eventEngine, parent=None): <NEW_LINE> <INDENT> super(MarketMonitor, self).__init__(eventEngine, parent) <NEW_LINE> d = OrderedDict() <NEW_LINE> d['symbol'] = {'chinese': u'合约代码', 'cellType': ""} <NEW_LINE> d['vtSymbol'] = {'chinese': u'名称', 'cellType': ""} <NEW_LINE> d['lastPrice'] = {'chinese': u'最新价', 'cellType': ""} <NEW_LINE> d['volume'] = {'chinese': u'成交量', 'cellType': ""} <NEW_LINE> d['openInterest'] = {'chinese': u'持仓量', 'cellType': ""} <NEW_LINE> d['openPrice'] = {'chinese': u'开盘价', 'cellType': ""} <NEW_LINE> d['highPrice'] = {'chinese': u'最高价', 'cellType': ""} <NEW_LINE> d['lowPrice'] = {'chinese': u'最低价', 'cellType': ""} <NEW_LINE> d['bidPrice1'] = {'chinese': u'买一价', 'cellType': ""} <NEW_LINE> d['bidVolume1'] = {'chinese': u'买一量', 'cellType': ""} <NEW_LINE> d['askPrice1'] = {'chinese': u'卖一价', 'cellType': ""} <NEW_LINE> d['askVolume1'] = {'chinese': u'卖一量', 'cellType': ""} <NEW_LINE> d['time'] = {'chinese': u'时间', 'cellType': ""} <NEW_LINE> d['gatewayName'] = {'chinese': u'接口', 'cellType': ""} <NEW_LINE> self.setHeaderDict(d) <NEW_LINE> self.setDataKey('vtSymbol') <NEW_LINE> self.setEventType(EVENT_TICK) <NEW_LINE> self.registerEvent()
|
市场监控组件
|
6259902e23e79379d538d559
|
class OrderedDict(dict): <NEW_LINE> <INDENT> def __init__(self, d=None, **kwargs): <NEW_LINE> <INDENT> self._order = [] <NEW_LINE> self.data = {} <NEW_LINE> if d is not None: <NEW_LINE> <INDENT> if hasattr(d, 'keys'): <NEW_LINE> <INDENT> self.update(d) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for k,v in d: <NEW_LINE> <INDENT> self[k] = v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(kwargs): <NEW_LINE> <INDENT> self.update(kwargs) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{'+', '.join([('%r: %r' % item) for item in self.items()])+'}' <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if not self.has_key(key): <NEW_LINE> <INDENT> self._order.append(key) <NEW_LINE> <DEDENT> dict.__setitem__(self, key, value) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return self.__class__(self) <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> dict.__delitem__(self, key) <NEW_LINE> self._order.remove(key) <NEW_LINE> <DEDENT> def iteritems(self, reverse = False): <NEW_LINE> <INDENT> if not reverse: <NEW_LINE> <INDENT> for item in self._order: <NEW_LINE> <INDENT> yield (item, self[item]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for item in reversed(self._order): <NEW_LINE> <INDENT> yield (item, self[item]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def items(self): <NEW_LINE> <INDENT> return list(self.iteritems()) <NEW_LINE> <DEDENT> def itervalues(self, reverse = False): <NEW_LINE> <INDENT> if not reverse: <NEW_LINE> <INDENT> for item in self._order: <NEW_LINE> <INDENT> yield self[item] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for item in reversed(self._order): <NEW_LINE> <INDENT> yield self[item] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def values(self): <NEW_LINE> <INDENT> return list(self.itervalues()) <NEW_LINE> <DEDENT> def iterkeys(self): <NEW_LINE> <INDENT> return iter(self._order) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return list(self._order) <NEW_LINE> <DEDENT> def popitem(self): <NEW_LINE> <INDENT> key = self._order[-1] <NEW_LINE> value = self[key] <NEW_LINE> del self[key] <NEW_LINE> return (key, value) <NEW_LINE> <DEDENT> def setdefault(self, item, default): <NEW_LINE> <INDENT> if self.has_key(item): <NEW_LINE> <INDENT> return self[item] <NEW_LINE> <DEDENT> self[item] = default <NEW_LINE> return default <NEW_LINE> <DEDENT> def update(self, d): <NEW_LINE> <INDENT> for k, v in d.items(): <NEW_LINE> <INDENT> self[k] = v
|
A UserDict that preserves insert order whenever possible.
|
6259902e287bf620b6272c35
|
@parser(Specs.ip_neigh_show) <NEW_LINE> class IpNeighShow(IpNeighParser): <NEW_LINE> <INDENT> pass
|
Class to parse ``ip neigh show`` or ``ip -s -s neigh show`` command output.
|
6259902e4e696a045264e649
|
class AssemblingError(Exception): <NEW_LINE> <INDENT> pass
|
Raised if the parser could not be configured due to malformed
or conflicting command declarations.
|
6259902ee76e3b2f99fd9a5b
|
class Status(models.Model): <NEW_LINE> <INDENT> verbatim = models.ForeignKey(Verbatim) <NEW_LINE> country = models.ForeignKey(Country) <NEW_LINE> status = models.CharField(max_length=5, db_index=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s" % self.status <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together=(("verbatim", "country"), )
|
MODEL
instances of status determined by a verbatim and a country (association model)
|
6259902ed164cc6175821fc0
|
class New(EndPoint): <NEW_LINE> <INDENT> argparse_noflag = "compose_version" <NEW_LINE> schema = { "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", "properties": properties }
|
创建一个docker-compose文件.
当指定的dockercompose文件存在时创建全新内容并覆盖原来老的compose文件,老的会被重新保存为`原名.{timestamp}_bak`;
当指定的dockercompose文件不存在时创建新的compose文件.
更新操作只能更新如下内容:
1. service
2. 外部networks声明
3. 外部volumes声明
4. 外部configs声明
5. 外部secrits声明
|
6259902e287bf620b6272c36
|
class sale_agent(orm.Model): <NEW_LINE> <INDENT> _name = "sale.agent" <NEW_LINE> _description = "Sale agent" <NEW_LINE> _columns = { 'name': fields.char('Saleagent Name', size=125, required=True), 'type': fields.selection((('asesor', 'Adviser'), ('comercial', 'Commercial')), 'Type', required=True), 'partner_id': fields.many2one('res.partner', 'Partner', ondelete='cascade', help='Associated partner, is necessary for income invoices.'), 'code': fields.related('partner_id', 'ref', string='Code', readonly=True, type='char', help='Se obtiene del código de la empresa relacionada'), 'employee_id': fields.many2one('hr.employee', 'Associated Employee', help='Employee associated to agent, is necessary for set an employee ' 'to settle commissions in wage.'), 'customer': fields.one2many('res.partner.agent', 'agent_id', 'Customer', readonly=True), 'commission': fields.many2one('commission', 'Commission by default', required=True), 'settlement': fields.selection((('m', 'Monthly'), ('t', 'Quarterly'), ('s', 'Semiannual'), ('a', 'Annual')), 'Period settlement', required=True), 'active': fields.boolean('Active'), 'retention_id': fields.many2one('account.tax', 'Applied retention'), 'settlement_ids': fields.one2many('settlement.agent', 'agent_id', 'Settlements executed', readonly=True) } <NEW_LINE> _defaults = { 'active': True, 'type': 'asesor', } <NEW_LINE> def calcula_tramos(self, cr, uid, ids, base, context=None): <NEW_LINE> <INDENT> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> agente = self.browse(cr, uid, ids, context=context)[0] <NEW_LINE> return agente.commission.calcula_tramos(base)
|
Agente de ventas
|
6259902e5166f23b2e244426
|
class MetricsResponse(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'date_time_begin': {'key': 'dateTimeBegin', 'type': 'iso-8601'}, 'date_time_end': {'key': 'dateTimeEnd', 'type': 'iso-8601'}, 'granularity': {'key': 'granularity', 'type': 'str'}, 'series': {'key': 'series', 'type': '[MetricsResponseSeriesItem]'}, } <NEW_LINE> def __init__( self, *, date_time_begin: Optional[datetime.datetime] = None, date_time_end: Optional[datetime.datetime] = None, granularity: Optional[Union[str, "MetricsResponseGranularity"]] = None, series: Optional[List["MetricsResponseSeriesItem"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(MetricsResponse, self).__init__(**kwargs) <NEW_LINE> self.date_time_begin = date_time_begin <NEW_LINE> self.date_time_end = date_time_end <NEW_LINE> self.granularity = granularity <NEW_LINE> self.series = series
|
Metrics Response.
:param date_time_begin:
:type date_time_begin: ~datetime.datetime
:param date_time_end:
:type date_time_end: ~datetime.datetime
:param granularity: Possible values include: "PT5M", "PT1H", "P1D".
:type granularity: str or ~azure.mgmt.cdn.models.MetricsResponseGranularity
:param series:
:type series: list[~azure.mgmt.cdn.models.MetricsResponseSeriesItem]
|
6259902e1d351010ab8f4b67
|
class VerseIter(object): <NEW_LINE> <INDENT> def __init__(self, start, end='Revelation of John 22:21'): <NEW_LINE> <INDENT> start, end = sorted([Verse(start), Verse(end)]) <NEW_LINE> self._verse_iter = iter(VerseRange(start, end)) <NEW_LINE> self._verse_ref = '' <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> return str(next(self._verse_iter)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> return self.__next__()
|
Iterator of verse references.
|
6259902e50485f2cf55dbfcb
|
class UidPublish: <NEW_LINE> <INDENT> def __init__(self, signal, raise_if_disconnected=False, **put_kw): <NEW_LINE> <INDENT> self._uid = None <NEW_LINE> self.last_start = None <NEW_LINE> self.uid_signal = signal <NEW_LINE> self.put_kw = put_kw <NEW_LINE> self.raise_if_disconnected = raise_if_disconnected <NEW_LINE> <DEDENT> @property <NEW_LINE> def uid(self): <NEW_LINE> <INDENT> return self._uid <NEW_LINE> <DEDENT> @uid.setter <NEW_LINE> def uid(self, uid): <NEW_LINE> <INDENT> self._uid = uid <NEW_LINE> if uid is None: <NEW_LINE> <INDENT> uid = '' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.uid_signal.put(uid, **self.put_kw) <NEW_LINE> <DEDENT> except (DisconnectedError, TimeoutError): <NEW_LINE> <INDENT> logger.error('UID signal disconnected. Is the IOC running?') <NEW_LINE> if self.raise_if_disconnected: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.uid = None <NEW_LINE> <DEDENT> def __call__(self, name, doc): <NEW_LINE> <INDENT> if name == 'start': <NEW_LINE> <INDENT> self.last_start = doc <NEW_LINE> <DEDENT> if self.last_start and name in ('start', 'stop'): <NEW_LINE> <INDENT> self.uid = self.last_start['uid']
|
Publishes current run start document UID to a given signal
Processed on every start/end document.
Note: If used with an EpicsSignal, it's recommended to use a waveform in
place of a stringin record on the EPICS side, as the start document UID
will be published both on run start and run completion. A stringin record
will only process on change and monitor events will only be received on run
start.
Sample EPICS record definition:
record(waveform, "$(Sys)$(Dev)UID-I") {
# Using waveform here as it always reprocesses, so you'll get a
# monitor event on start/stop of the run
field(DESC, "Last run UID")
field(FTVL, "STRING")
field(MPST, "Always")
info(autosaveFields_pass0, "VAL")
}
Parameters
----------
uid_signal : Signal
The signal to publish to
raise_if_disconnected : bool, optional
Fail if the UID signal is disconnected
put_kw : kwargs, optional
Keyword arguments to send to uid_signal.put()
|
6259902e96565a6dacd2d7b6
|
class TwoPortElement(Element): <NEW_LINE> <INDENT> def __init__(self, name, node_plus, node_minus, *args, **kwargs): <NEW_LINE> <INDENT> pins = (Pin(self, 'plus', node_plus), Pin(self, 'minus', node_minus)) <NEW_LINE> super(TwoPortElement, self).__init__(name, pins, *args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def plus(self): <NEW_LINE> <INDENT> return self.pins[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def minus(self): <NEW_LINE> <INDENT> return self.pins[1]
|
This class implements a base class for a two-port element.
|
6259902e50485f2cf55dbfcc
|
class GCodeParserSpecialCharacter(GCodeParserElementBase): <NEW_LINE> <INDENT> pass
|
G-code parser special character element
|
6259902e5e10d32532ce412b
|
class GitLabUser(GitLabMixin, User): <NEW_LINE> <INDENT> def __init__(self, token: Union[GitLabPrivateToken, GitLabOAuthToken], identifier: Optional[Union[str, int]]=None): <NEW_LINE> <INDENT> self._token = token <NEW_LINE> self._url = '/user' <NEW_LINE> self._id = identifier <NEW_LINE> if identifier: <NEW_LINE> <INDENT> if isinstance(identifier, str): <NEW_LINE> <INDENT> params = {'username': identifier} <NEW_LINE> resp = get(self._token, self.absolute_url('/users'), params) <NEW_LINE> if resp: <NEW_LINE> <INDENT> self._id = resp[0]['id'] <NEW_LINE> self._url = '/users/' + str(resp[0]['id']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ElementDoesntExistError('This username does not ' 'exist.') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._url = '/users/' + str(identifier) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def username(self) -> str: <NEW_LINE> <INDENT> return self.data['username'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def identifier(self) -> int: <NEW_LINE> <INDENT> return self._id or self.data['id'] <NEW_LINE> <DEDENT> def installed_repositories(self, installation_id: int): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_installations(self, jwt): <NEW_LINE> <INDENT> raise NotImplementedError
|
A GitLab user, e.g. sils :)
|
6259902e8e05c05ec3f6f683
|
class Anova(object): <NEW_LINE> <INDENT> def __init__(self, model_a, model_b, acquisition_directions, tensor): <NEW_LINE> <INDENT> super(Anova, self).__init__() <NEW_LINE> self.__model_a = model_a <NEW_LINE> self.__model_b = model_b <NEW_LINE> self.__acquisition_directions = acquisition_directions <NEW_LINE> self.__tensor = tensor <NEW_LINE> <DEDENT> def equivalent(self, significance_level): <NEW_LINE> <INDENT> f_denominator = ((self.__free_parameters(self.__model_b) - self.__free_parameters(self.__model_a))*self.__mean_squared_error(self.__model_b)) <NEW_LINE> if f_denominator == 0.0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = ( ((len(self.__acquisition_directions) - self.__free_parameters(self.__model_b))*(self.__variance(self.__model_b) - self.__variance(self.__model_a)))/ f_denominator ) <NEW_LINE> d1 = len(self.__acquisition_directions) - self.__free_parameters(self.__model_b) - 1 <NEW_LINE> d2 = self.__free_parameters(self.__model_b) - self.__free_parameters(self.__model_a) <NEW_LINE> distribution = f_dist(d1, d2) <NEW_LINE> return f <= distribution.ppf(significance_level) <NEW_LINE> <DEDENT> <DEDENT> def __mean(self, model): <NEW_LINE> <INDENT> accum = complex(0,0) <NEW_LINE> for acquisition_direction in self.__acquisition_directions: <NEW_LINE> <INDENT> accum += model.value(acquisition_direction) <NEW_LINE> <DEDENT> return accum/len(self.__acquisition_directions) <NEW_LINE> <DEDENT> def __variance(self, model): <NEW_LINE> <INDENT> accum = complex(0,0) <NEW_LINE> mean = self.__mean(model) <NEW_LINE> for acquisition_direction in self.__acquisition_directions: <NEW_LINE> <INDENT> accum += (model.value(acquisition_direction) - mean)**2 <NEW_LINE> <DEDENT> return accum/len(self.__acquisition_directions) <NEW_LINE> <DEDENT> def __mean_squared_error(self, model): <NEW_LINE> <INDENT> accum = complex(0,0) <NEW_LINE> for acquisition_direction in self.__acquisition_directions: <NEW_LINE> <INDENT> accum += (model.value(acquisition_direction) - adc(self.__tensor, acquisition_direction))**2 <NEW_LINE> <DEDENT> return accum/len(self.__acquisition_directions) <NEW_LINE> <DEDENT> def __free_parameters(self, model): <NEW_LINE> <INDENT> accum = 0 <NEW_LINE> for n in range(model.order + 1): <NEW_LINE> <INDENT> accum += (2*n + 1) <NEW_LINE> <DEDENT> return accum
|
Analysis of Variance
|
6259902e30c21e258be9985c
|
class BinResponse(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'id': 'int' } <NEW_LINE> self.attribute_map = { 'id': 'id' } <NEW_LINE> self._id = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902ed99f1b3c44d066f3
|
class Investment(BaseObject): <NEW_LINE> <INDENT> CODE_TYPE_ISIN = u'ISIN' <NEW_LINE> CODE_TYPE_AMF = u'AMF' <NEW_LINE> label = StringField('Label of stocks') <NEW_LINE> code = StringField('Identifier of the stock') <NEW_LINE> code_type = StringField('Type of stock code (ISIN or AMF)') <NEW_LINE> description = StringField('Short description of the stock') <NEW_LINE> quantity = DecimalField('Quantity of stocks') <NEW_LINE> unitprice = DecimalField('Buy price of one stock') <NEW_LINE> unitvalue = DecimalField('Current value of one stock') <NEW_LINE> valuation = DecimalField('Total current valuation of the Investment') <NEW_LINE> vdate = DateField('Value date of the valuation amount') <NEW_LINE> diff = DecimalField('Difference between the buy cost and the current valuation') <NEW_LINE> diff_ratio = DecimalField('Difference in ratio (1 meaning 100%) between the buy cost and the current valuation') <NEW_LINE> portfolio_share = DecimalField('Ratio (1 meaning 100%) of the current amount relative to the total') <NEW_LINE> performance_history = Field('History of the performances of the stock (key=years, value=diff_ratio)', dict) <NEW_LINE> srri = IntField('Synthetic Risk and Reward Indicator of the stock (from 1 to 7)') <NEW_LINE> asset_category = StringField('Category of the stock') <NEW_LINE> recommended_period = StringField('Recommended investment period of the stock') <NEW_LINE> original_currency = StringField('Currency of the original amount') <NEW_LINE> original_valuation = DecimalField('Original valuation (in another currency)') <NEW_LINE> original_unitvalue = DecimalField('Original unitvalue (in another currency)') <NEW_LINE> original_unitprice = DecimalField('Original unitprice (in another currency)') <NEW_LINE> original_diff = DecimalField('Original diff (in another currency)') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Investment label=%r code=%r valuation=%r>' % (self.label, self.code, self.valuation) <NEW_LINE> <DEDENT> @property <NEW_LINE> def diff_percent(self): <NEW_LINE> <INDENT> return self.diff_ratio <NEW_LINE> <DEDENT> @diff_percent.setter <NEW_LINE> def diff_percent(self, value): <NEW_LINE> <INDENT> self.diff_ratio = value
|
Investment in a financial market.
|
6259902e56b00c62f0fb3911
|
class PreviewScreen(Screen): <NEW_LINE> <INDENT> def on_enter(self, *args): <NEW_LINE> <INDENT> widget = walk_toolbar(disabled=False) <NEW_LINE> widget.icon = 'arrow-left-bold-outline' <NEW_LINE> setattr(widget, 'on_release', lambda: setattr(self.parent, 'current', 'picker')) <NEW_LINE> self.parent.transition.direction = 'right'
|
Shows list in progress
|
6259902e287bf620b6272c38
|
class RecipeSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> ingredients = serializers.PrimaryKeyRelatedField( many=True, queryset=Ingredient.objects.all() ) <NEW_LINE> tags = serializers.PrimaryKeyRelatedField( many=True, queryset=Tag.objects.all() ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Recipe <NEW_LINE> fields = ('id', 'title', 'ingredients', 'tags', 'time_minutes', 'price', 'link') <NEW_LINE> read_only_fields = ('id', )
|
Serialize for recipe objects
|
6259902e5166f23b2e244428
|
class SublocationAPIViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset = Location.objects.all() <NEW_LINE> serializer_class = LocationListSerializer <NEW_LINE> permission_classes = (rest_permissions.AllowAny,) <NEW_LINE> paginate_by = None <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> pk = self.request.QUERY_PARAMS.get('pk', None) <NEW_LINE> if pk: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> location = Location.objects.get(pk=pk) <NEW_LINE> key = "{}_{}_{}".format(location.slug, translation.get_language(), 'sub') <NEW_LINE> cached_qs = redis_cache.get(key, None) <NEW_LINE> if cached_qs is None or not settings.USE_CACHE: <NEW_LINE> <INDENT> queryset = location.location_set.all() <NEW_LINE> redis_cache.set(key, queryset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> queryset = cached_qs <NEW_LINE> <DEDENT> <DEDENT> except Location.DoesNotExist: <NEW_LINE> <INDENT> queryset = Location.objects.all() <NEW_LINE> <DEDENT> return sort_by_locale(queryset, lambda x: x.__unicode__(), translation.get_language()) <NEW_LINE> <DEDENT> return sort_by_locale(Location.objects.all(), lambda x: x.name, translation.get_language())
|
Prosty widok umożliwiający pobranie listy lokalizacji z podstawowymi informacjami.
Domyślnie prezentowana jest lista wszystkich lokalizacji. Do parametrów GET
możemy dodać `pk` lokalizacji, której bezpośrednie "dzieci" chcemy pobrać, np.:
```/api-locations/sublocations/pk=1```
|
6259902e30c21e258be9985e
|
class CollectorService(os_service.Service): <NEW_LINE> <INDENT> def start(self): <NEW_LINE> <INDENT> dispatcher_managers = dispatcher.load_dispatcher_manager() <NEW_LINE> (self.meter_manager, self.event_manager) = dispatcher_managers <NEW_LINE> self.sample_listener = None <NEW_LINE> self.event_listener = None <NEW_LINE> super(CollectorService, self).start() <NEW_LINE> if cfg.CONF.collector.udp_address: <NEW_LINE> <INDENT> self.tg.add_thread(self.start_udp) <NEW_LINE> <DEDENT> transport = messaging.get_transport(optional=True) <NEW_LINE> if transport: <NEW_LINE> <INDENT> if list(self.meter_manager): <NEW_LINE> <INDENT> sample_target = oslo_messaging.Target( topic=cfg.CONF.publisher_notifier.metering_topic) <NEW_LINE> self.sample_listener = ( messaging.get_batch_notification_listener( transport, [sample_target], [SampleEndpoint(self.meter_manager)], allow_requeue=True, batch_size=cfg.CONF.collector.batch_size, batch_timeout=cfg.CONF.collector.batch_timeout)) <NEW_LINE> self.sample_listener.start() <NEW_LINE> <DEDENT> if cfg.CONF.notification.store_events and list(self.event_manager): <NEW_LINE> <INDENT> event_target = oslo_messaging.Target( topic=cfg.CONF.publisher_notifier.event_topic) <NEW_LINE> self.event_listener = ( messaging.get_batch_notification_listener( transport, [event_target], [EventEndpoint(self.event_manager)], allow_requeue=True, batch_size=cfg.CONF.collector.batch_size, batch_timeout=cfg.CONF.collector.batch_timeout)) <NEW_LINE> self.event_listener.start() <NEW_LINE> <DEDENT> if not cfg.CONF.collector.udp_address: <NEW_LINE> <INDENT> self.tg.add_timer(604800, lambda: None) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def start_udp(self): <NEW_LINE> <INDENT> address_family = socket.AF_INET <NEW_LINE> if netutils.is_valid_ipv6(cfg.CONF.collector.udp_address): <NEW_LINE> <INDENT> address_family = socket.AF_INET6 <NEW_LINE> <DEDENT> udp = socket.socket(address_family, socket.SOCK_DGRAM) <NEW_LINE> udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> udp.bind((cfg.CONF.collector.udp_address, cfg.CONF.collector.udp_port)) <NEW_LINE> self.udp_run = True <NEW_LINE> while self.udp_run: <NEW_LINE> <INDENT> data, source = udp.recvfrom(64 * units.Ki) <NEW_LINE> try: <NEW_LINE> <INDENT> sample = msgpack.loads(data, encoding='utf-8') <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> LOG.warning(_("UDP: Cannot decode data sent by %s"), source) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> LOG.debug("UDP: Storing %s", sample) <NEW_LINE> self.meter_manager.map_method('record_metering_data', sample) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> LOG.exception(_("UDP: Unable to store meter")) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.udp_run = False <NEW_LINE> if self.sample_listener: <NEW_LINE> <INDENT> utils.kill_listeners([self.sample_listener]) <NEW_LINE> <DEDENT> if self.event_listener: <NEW_LINE> <INDENT> utils.kill_listeners([self.event_listener]) <NEW_LINE> <DEDENT> super(CollectorService, self).stop() <NEW_LINE> <DEDENT> def record_metering_data(self, context, data): <NEW_LINE> <INDENT> self.meter_manager.map_method('record_metering_data', data=data)
|
Listener for the collector service.
|
6259902e1f5feb6acb163c42
|
class Body37(InstanceCreateRequest): <NEW_LINE> <INDENT> swagger_types = { } <NEW_LINE> if hasattr(InstanceCreateRequest, "swagger_types"): <NEW_LINE> <INDENT> swagger_types.update(InstanceCreateRequest.swagger_types) <NEW_LINE> <DEDENT> attribute_map = { } <NEW_LINE> if hasattr(InstanceCreateRequest, "attribute_map"): <NEW_LINE> <INDENT> attribute_map.update(InstanceCreateRequest.attribute_map) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.discriminator = None <NEW_LINE> InstanceCreateRequest.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(Body37, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Body37): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902e796e427e5384f7ce
|
class FastHttpUser(User): <NEW_LINE> <INDENT> client: FastHttpSession = None <NEW_LINE> network_timeout: float = 60.0 <NEW_LINE> connection_timeout: float = 60.0 <NEW_LINE> max_redirects: int = 5 <NEW_LINE> max_retries: int = 1 <NEW_LINE> insecure: bool = True <NEW_LINE> abstract = True <NEW_LINE> def __init__(self, environment): <NEW_LINE> <INDENT> super().__init__(environment) <NEW_LINE> if self.host is None: <NEW_LINE> <INDENT> raise LocustError( "You must specify the base host. Either in the host attribute in the User class, or on the command line using the --host option." ) <NEW_LINE> <DEDENT> if not re.match(r"^https?://[^/]+", self.host, re.I): <NEW_LINE> <INDENT> raise LocustError("Invalid host (`%s`), must be a valid base URL. E.g. http://example.com" % self.host) <NEW_LINE> <DEDENT> self.client = FastHttpSession( self.environment, base_url=self.host, network_timeout=self.network_timeout, connection_timeout=self.connection_timeout, max_redirects=self.max_redirects, max_retries=self.max_retries, insecure=self.insecure, )
|
FastHttpUser uses a different HTTP client (geventhttpclient) compared to HttpUser (python-requests).
It's significantly faster, but not as capable.
The behaviour of this user is defined by it's tasks. Tasks can be declared either directly on the
class by using the :py:func:`@task decorator <locust.task>` on the methods, or by setting
the :py:attr:`tasks attribute <locust.User.tasks>`.
This class creates a *client* attribute on instantiation which is an HTTP client with support
for keeping a user session between requests.
|
6259902e507cdc57c63a5dfb
|
class StatMain(tk.Frame): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> tk.Frame.__init__(self, parent) <NEW_LINE> fm_top = tk.Frame(self) <NEW_LINE> fm_top.pack(side=tk.TOP, fill=tk.X, pady=20, padx=5) <NEW_LINE> self.btn_home = tk.Button(fm_top, text='Home') <NEW_LINE> self.btn_home.pack(side=tk.LEFT) <NEW_LINE> self.repo = Repository(DB_PATH) <NEW_LINE> self.listbox = MultiColumnListBox(self, headers=['Stat', 'Value']) <NEW_LINE> self.listbox.pack(fill=tk.X, expand=True, anchor=tk.N, padx=10, pady=10) <NEW_LINE> self.btn_reset = tk.Button(self.listbox, text='Reset Stat', command=self.clean_stat) <NEW_LINE> self.btn_reset.grid(pady=10) <NEW_LINE> self._build_tree() <NEW_LINE> <DEDENT> def _build_tree(self): <NEW_LINE> <INDENT> res = self.repo.get_stat() <NEW_LINE> for attr in res._fields: <NEW_LINE> <INDENT> self.listbox.tree.insert('', tk.END, values=[f'{attr}'.replace('_', ' '), getattr(res, attr)]) <NEW_LINE> <DEDENT> <DEDENT> def clean_stat(self): <NEW_LINE> <INDENT> exit_msg = msg.askquestion('Warning', 'Are you sure you want to reset your statistics?\nYou data will be lost.') <NEW_LINE> if exit_msg == 'yes': <NEW_LINE> <INDENT> self.listbox.tree.delete(*self.listbox.tree.get_children()) <NEW_LINE> self.repo.del_stat() <NEW_LINE> self._build_tree()
|
StatMain is a frame and
|
6259902e66673b3332c31443
|
class Storage(object): <NEW_LINE> <INDENT> def __init__(self, name, for_sync): <NEW_LINE> <INDENT> self.is_new = not os.path.exists(name) <NEW_LINE> self.for_sync = for_sync or self.is_new <NEW_LINE> self.open(name, create=self.is_new) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def import_module(cls): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def open(self, name, create): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def put(self, key, value): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def write_batch(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def iterator(self, prefix=b'', reverse=False): <NEW_LINE> <INDENT> raise NotImplementedError
|
Abstract base class of the DB backend abstraction.
|
6259902e8c3a8732951f75ab
|
class PowerManagerAdapter(SuspendAdapter): <NEW_LINE> <INDENT> def __init__(self, bus_name='org.freedesktop.PowerManagement', object_name='/org/freedesktop/PowerManagement/Inhibit', interface_name='org.freedesktop.PowerManagement.Inhibit'): <NEW_LINE> <INDENT> SuspendAdapter.__init__(self, bus_name, object_name, interface_name); <NEW_LINE> <DEDENT> def _dbus_inhibit_call(self): <NEW_LINE> <INDENT> self.cookie = self.iface.Inhibit(self.PROGRAM, self.ACTIVITY) <NEW_LINE> <DEDENT> def _dbus_uninhibit_call(self): <NEW_LINE> <INDENT> self.iface.UnInhibit(self.cookie)
|
Default Adapter, implemented by most desktop sessions
Adapter for org.freedesktop.PowerManagement.Inhibit Interface
Some desktop sesssions use different bus names for this interface
and have other small variances
|
6259902ee76e3b2f99fd9a5f
|
class test_schema01(wttest.WiredTigerTestCase): <NEW_LINE> <INDENT> basename = 'test_schema01' <NEW_LINE> tablename = 'table:' + basename <NEW_LINE> cgname = 'colgroup:' + basename <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> wttest.WiredTigerTestCase.__init__(self, *args, **kwargs) <NEW_LINE> self.reconcile = False <NEW_LINE> <DEDENT> def create_table(self): <NEW_LINE> <INDENT> self.pr('create table') <NEW_LINE> self.session.create(self.tablename, 'key_format=5s,value_format=HQ,' + 'columns=(country,year,population),' + 'colgroups=(year,population)') <NEW_LINE> self.session.create(self.cgname + ':year', 'columns=(year)') <NEW_LINE> self.session.create(self.cgname + ':population', 'columns=(population)') <NEW_LINE> <DEDENT> def drop_table(self): <NEW_LINE> <INDENT> self.pr('drop table') <NEW_LINE> self.dropUntilSuccess(self.session, self.tablename) <NEW_LINE> <DEDENT> def cursor(self, config=None): <NEW_LINE> <INDENT> self.pr('open cursor') <NEW_LINE> return self.session.open_cursor(self.tablename, None, config) <NEW_LINE> <DEDENT> def test_populate(self): <NEW_LINE> <INDENT> for reopen in (False, True): <NEW_LINE> <INDENT> self.create_table() <NEW_LINE> c = self.cursor('overwrite') <NEW_LINE> try: <NEW_LINE> <INDENT> for record in pop_data: <NEW_LINE> <INDENT> c[record[0]] = record[1:] <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> c.close() <NEW_LINE> <DEDENT> if reopen: <NEW_LINE> <INDENT> self.reopen_conn() <NEW_LINE> <DEDENT> c = self.cursor() <NEW_LINE> expectpos = 0 <NEW_LINE> for record in c: <NEW_LINE> <INDENT> self.assertEqual(str(record), expected_out[expectpos]) <NEW_LINE> expectpos += 1 <NEW_LINE> <DEDENT> c.close() <NEW_LINE> self.drop_table()
|
Test various tree types becoming empty
|
6259902ea4f1c619b294f649
|
class PyPyprof2html(PythonPackage): <NEW_LINE> <INDENT> pypi = "pyprof2html/pyprof2html-0.3.1.tar.gz" <NEW_LINE> version('0.3.1', sha256='db2d37e21d8c76f2fd25fb1ba9273c9b3ff4a98a327e37d943fed1ea225a6720') <NEW_LINE> patch('version_0.3.1.patch', when="@0.3.1") <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-jinja2', type=('build', 'run'))
|
Python cProfile and hotshot profile's data to HTML Converter
|
6259902eec188e330fdf98e6
|
class V1PolyaxonSidecarContainer(BaseConfig, polyaxon_sdk.V1PolyaxonSidecarContainer): <NEW_LINE> <INDENT> SCHEMA = PolyaxonSidecarContainerSchema <NEW_LINE> IDENTIFIER = "polyaxon_sidecar" <NEW_LINE> REDUCED_ATTRIBUTES = [ "imageTag", "imagePullPolicy", "sleepInterval", "resources", "syncInterval", "monitorLogs", ] <NEW_LINE> def get_image(self, default_version: str): <NEW_LINE> <INDENT> image = self.image or "polyaxon/polyaxon-sidecar" <NEW_LINE> image_tag = self.image_tag if self.image_tag is not None else default_version <NEW_LINE> return "{}:{}".format(image, image_tag) if image_tag else image <NEW_LINE> <DEDENT> def get_resources(self): <NEW_LINE> <INDENT> return self.resources if self.resources else get_sidecar_resources()
|
Polyaxon sidecar is a helper container that collects outputs, artifacts,
and metadata about the main container.
Polyaxon CE and Polyaxon Agent are deployed with default values for the sidecar container,
however if you need to control or update one or several aspects
of how the sidecar container that gets injected, this guide walks through the possible options.
Args:
image: str, optional.
image_tag: str, optional.
image_pull_policy: str, optional.
resources: V1ResourceRequirements, optional.
sleep_interval: int, optional.
sync_interval: int, optional.
## YAML usage
```yaml
>>> sidecar:
>>> image: polyaxon/polyaxon-sidecar
>>> imageTag: v1.x
>>> imagePullPolicy: IfNotPresent
>>> resources: requests:
>>> memory: "64Mi"
>>> cpu: "50m"
>>> sleepInterval: 5
>>> syncInterval: 60
```
## Fields
### image
The container image to use.
```yaml
>>> sidecar:
>>> image: polyaxon/polyaxon-sidecar
```
### imageTag
The container image tag to use.
```yaml
>>> sidecar:
>>> imageTag: dev
```
### imagePullPolicy
The image pull policy to use, it must be a valid policy supported by Kubernetes.
```yaml
>>> sidecar:
>>> imagePullPolicy: Always
```
### resources
The resources requirements to allocate to the container.
```yaml
>>> sidecar:
>>> resources:
>>> memory: "64Mi"
>>> cpu: "50m"
```
### sleepInterval
The interval between two consecutive checks, default 10s.
> **N.B.1**: It's possible to alter this behaviour on per operation level
> using the sidecar plugin.
> **N.B.2**: be careful of the trade-off between a large sleep interval and a short interval,
> you don't want the sidecar to overwhelm the API and Kuberenetes API,
> and you don't want also the sidecar to penalize your workload with additional latency.
```yaml
>>> sidecar:
>>> sleepInterval: 5
```
### syncInterval
The interval between two consecutive archiving checks. default 10s.
> **N.B.1**: It's possible to alter this behaviour on per operation level
> using the sidecar plugin.
> **N.B.2**: Only changed files since a previous check are synced.
> **N.B.3**: If you don't need to access intermediate artifacts while the workload is running,
> you might set this field to a high value, or `-1` to only trigger
> this behavior when the workload is done.
```yaml
>>> sidecar:
>>> syncInterval: 5
```
|
6259902ed4950a0f3b111667
|
class itkInPlaceImageFilterIF2ICVF22(itkImageToImageFilterBPython.itkImageToImageFilterIF2ICVF22): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> InputImageDimension = _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_InputImageDimension <NEW_LINE> OutputImageDimension = _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_OutputImageDimension <NEW_LINE> def SetInPlace(self, *args): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_SetInPlace(self, *args) <NEW_LINE> <DEDENT> def GetInPlace(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_GetInPlace(self) <NEW_LINE> <DEDENT> def InPlaceOn(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_InPlaceOn(self) <NEW_LINE> <DEDENT> def InPlaceOff(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_InPlaceOff(self) <NEW_LINE> <DEDENT> def CanRunInPlace(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_CanRunInPlace(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkInPlaceImageFilterBPython.delete_itkInPlaceImageFilterIF2ICVF22 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterBPython.itkInPlaceImageFilterIF2ICVF22_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkInPlaceImageFilterIF2ICVF22.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
|
Proxy of C++ itkInPlaceImageFilterIF2ICVF22 class
|
6259902e30c21e258be99860
|
class TestQueueResult(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testQueueResult(self): <NEW_LINE> <INDENT> pass
|
QueueResult unit test stubs
|
6259902e925a0f43d25e909c
|
class JSONField(with_metaclass(models.SubfieldBase, models.TextField)): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> default = kwargs.get('default', None) <NEW_LINE> if default is None: <NEW_LINE> <INDENT> kwargs['default'] = '{}' <NEW_LINE> <DEDENT> models.TextField.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if self.blank and not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(value, string_types): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return json.loads(value) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ValidationError(str(e)) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def validate(self, value, model_instance): <NEW_LINE> <INDENT> if isinstance(value, string_types): <NEW_LINE> <INDENT> super(JSONField, self).validate(value, model_instance) <NEW_LINE> try: <NEW_LINE> <INDENT> json.loads(value) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ValidationError(str(e)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_prep_value(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return json.dumps(value, cls=DjangoJSONEncoder) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ValidationError(str(e)) <NEW_LINE> <DEDENT> <DEDENT> def value_to_string(self, obj): <NEW_LINE> <INDENT> return smart_text(self.get_prep_value(self._get_val_from_obj(obj)))
|
Simple JSON field that stores python structures as JSON strings
on database.
Borrowed from django-social-auth :):
https://github.com/omab/django-social-auth/blob/master/social_auth/fields.py
|
6259902e8a349b6b4368728f
|
class NcpdqError(ExternalCommandError): <NEW_LINE> <INDENT> def __init__(self, class_name, filename, command, traceback_text): <NEW_LINE> <INDENT> super().__init__(class_name, 'ncpdq', filename, command, traceback_text)
|
When ncpdq fails.
|
6259902ed10714528d69eeb5
|
class ScoreService(Service): <NEW_LINE> <INDENT> __model__ = Score <NEW_LINE> def new_score(self, winner, loser, first_user_score, second_user_score): <NEW_LINE> <INDENT> if first_user_score == second_user_score: <NEW_LINE> <INDENT> raise endpoints.BadRequestException('Score cannot be created, game was a draw') <NEW_LINE> <DEDENT> score = super(ScoreService, self).new() <NEW_LINE> data = {"date": date.today(), "winner": winner, "loser": loser, "winner_score": first_user_score if first_user_score > second_user_score else second_user_score, "loser_score": first_user_score if first_user_score < second_user_score else second_user_score} <NEW_LINE> super(ScoreService, self).update(score, **data) <NEW_LINE> return score <NEW_LINE> <DEDENT> def get_user_scores(self, user): <NEW_LINE> <INDENT> scores = Score.query(ndb.OR(Score.winner == user.key, Score.loser == user.key)).fetch() <NEW_LINE> return scores <NEW_LINE> <DEDENT> def to_form(self, score): <NEW_LINE> <INDENT> form = ScoreForm(date=str(score.date), winner=score.winner.get().name, loser=score.loser.get().name, winner_score=score.winner_score, loser_score=score.loser_score) <NEW_LINE> return form
|
Service class interacting with the Score datastore
|
6259902e507cdc57c63a5dfd
|
class UpdateFleetNameRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.FleetId = None <NEW_LINE> self.Name = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.FleetId = params.get("FleetId") <NEW_LINE> self.Name = params.get("Name") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
|
UpdateFleetName请求参数结构体
|
6259902e66673b3332c31445
|
@dataclass <NEW_LINE> class Synchronizer: <NEW_LINE> <INDENT> src: Database <NEW_LINE> dst: Database <NEW_LINE> user: UserSynchronizer_ = field(init=False, repr=False) <NEW_LINE> group: GroupSynchronizer_ = field(init=False, repr=False) <NEW_LINE> UserSynchronizer: ClassVar[Type[UserSynchronizer_]] = UserSynchronizer <NEW_LINE> GroupSynchronizer: ClassVar[Type[GroupSynchronizer_]] = GroupSynchronizer <NEW_LINE> def __post_init__(self) -> None: <NEW_LINE> <INDENT> self.user = self.UserSynchronizer(self.src.User, self.dst.User) <NEW_LINE> self.group = self.GroupSynchronizer(self.src.Group, self.dst.Group) <NEW_LINE> <DEDENT> def entry(self, src, syncids=None, strict=False): <NEW_LINE> <INDENT> syncid = SyncId(uuid=src.uuid) <NEW_LINE> if syncids is not None: <NEW_LINE> <INDENT> syncids |= {syncid} <NEW_LINE> <DEDENT> if isinstance(src, User): <NEW_LINE> <INDENT> syncer = self.user <NEW_LINE> DstEntry = self.dst.User <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> syncer = self.group <NEW_LINE> DstEntry = self.dst.Group <NEW_LINE> <DEDENT> dst = DstEntry.find_syncid(syncid) <NEW_LINE> if dst is None and not strict: <NEW_LINE> <INDENT> logger.info("guessing matching entry for %s", src) <NEW_LINE> dst = DstEntry.find_match(src) <NEW_LINE> <DEDENT> if dst is None: <NEW_LINE> <INDENT> logger.info("creating new entry for %s", src) <NEW_LINE> dst = DstEntry.create() <NEW_LINE> <DEDENT> logger.info("synchronizing entry %s", src) <NEW_LINE> syncer.sync(src, dst) <NEW_LINE> <DEDENT> def delete(self, syncids, invert=False, delete=False): <NEW_LINE> <INDENT> for dst in self.dst.find_syncids(syncids, invert=invert): <NEW_LINE> <INDENT> if delete: <NEW_LINE> <INDENT> logger.info("deleting entry %s", dst) <NEW_LINE> dst.delete() <NEW_LINE> <DEDENT> elif dst.enabled: <NEW_LINE> <INDENT> logger.info("disabling entry %s", dst) <NEW_LINE> dst.enabled = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def sync(self, persist=True, strict=False, delete=False): <NEW_LINE> <INDENT> self.dst.prepare() <NEW_LINE> syncids = set() <NEW_LINE> for src in self.src.watch(cookie=self.dst.state.cookie, persist=persist): <NEW_LINE> <INDENT> if isinstance(src, Entry): <NEW_LINE> <INDENT> self.entry(src, syncids=syncids, strict=strict) <NEW_LINE> if not syncids: <NEW_LINE> <INDENT> self.dst.commit() <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(src, UnchangedSyncIds): <NEW_LINE> <INDENT> if syncids is not None: <NEW_LINE> <INDENT> syncids |= set(src) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(src, DeletedSyncIds): <NEW_LINE> <INDENT> self.delete(src, invert=False, delete=delete) <NEW_LINE> <DEDENT> elif isinstance(src, RefreshComplete): <NEW_LINE> <INDENT> if syncids is not None and src.autodelete: <NEW_LINE> <INDENT> logger.info("deleting unmentioned entries") <NEW_LINE> self.delete(SyncIds(syncids), invert=True, delete=delete) <NEW_LINE> <DEDENT> syncids = None <NEW_LINE> logger.info("refresh complete") <NEW_LINE> self.dst.commit() <NEW_LINE> <DEDENT> elif isinstance(src, SyncCookie): <NEW_LINE> <INDENT> self.dst.state.cookie = src <NEW_LINE> if not syncids: <NEW_LINE> <INDENT> self.dst.commit() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError(src)
|
A user database synchronizer
|
6259902e1d351010ab8f4b6c
|
class ModelNameIdentBase: <NEW_LINE> <INDENT> def __init__(self, rootName=""): <NEW_LINE> <INDENT> self._root = rootName <NEW_LINE> self.componentCounter = 0 <NEW_LINE> <DEDENT> def setComponentCounter(self, c): <NEW_LINE> <INDENT> self.componentCounter = c <NEW_LINE> <DEDENT> def getComponentCounter(self): <NEW_LINE> <INDENT> return self.componentCounter <NEW_LINE> <DEDENT> def setRootName(self, root): <NEW_LINE> <INDENT> if re.match("^[^_^\(^\)]*$", root) is None: <NEW_LINE> <INDENT> raise(PE.PyAValError("The proposed root '"+root+"' contains forbidden characters: '_()'.")) <NEW_LINE> <DEDENT> self._root = root <NEW_LINE> <DEDENT> def identifier(self): <NEW_LINE> <INDENT> result = self._root <NEW_LINE> if self.componentCounter != 0: <NEW_LINE> <INDENT> result += "(" + str(self.componentCounter) + ")" <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def getRoot(self): <NEW_LINE> <INDENT> return self._root <NEW_LINE> <DEDENT> def composeVariableName(self, property, rootName=None, counter=None): <NEW_LINE> <INDENT> if rootName is None: <NEW_LINE> <INDENT> rootName = self._root <NEW_LINE> <DEDENT> if counter is None: <NEW_LINE> <INDENT> counter = self.componentCounter <NEW_LINE> <DEDENT> if rootName == "" and counter == 0: <NEW_LINE> <INDENT> return property <NEW_LINE> <DEDENT> result = property + "_" + rootName <NEW_LINE> if counter == 0: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> result += "("+str(counter)+")" <NEW_LINE> return result <NEW_LINE> <DEDENT> def convertSpecifier(self, specifier): <NEW_LINE> <INDENT> component = "" <NEW_LINE> counter = 0 <NEW_LINE> prop = "" <NEW_LINE> if isinstance(specifier, tuple): <NEW_LINE> <INDENT> prop = specifier[0] <NEW_LINE> if len(specifier) >= 2: <NEW_LINE> <INDENT> component = specifier[1] <NEW_LINE> <DEDENT> if len(specifier) == 3: <NEW_LINE> <INDENT> counter = specifier[2] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> prop = specifier <NEW_LINE> <DEDENT> return (self.composeVariableName(prop, component, counter), prop, component, counter) <NEW_LINE> <DEDENT> def specifierToName(self, input): <NEW_LINE> <INDENT> if isinstance(input, list): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for el in input: <NEW_LINE> <INDENT> result.append(self.convertSpecifier(el)[0]) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> if isinstance(input, dict): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for k, v in six.iteritems(input): <NEW_LINE> <INDENT> result[self.convertSpecifier(k)[0]] = v <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> return self.convertSpecifier(input)[0] <NEW_LINE> <DEDENT> def decomposeVariableName(self, name): <NEW_LINE> <INDENT> r = re.match("([^_]+)(_([^\(]*)(\(([0-9]+)\))?)?", name) <NEW_LINE> return r.group(1), r.group(3), r.group(5)
|
Managing the naming of model components.
This class handles the names of models or model components.
Individual names or identifiers are composed of a "root name"
and a "component counter". The root name is supposed to be a
concise string summarizing the type of model, while the component
counter is used to distinguish between components
with the same root name in composed models.
Parameters
----------
rootName : string, optional
A concise name for the model (default="").
Notes
-----
The term "specifier" is used to indicate either a string
containing a variable name or a tuple composed of
(property, root name, component counter), which
specifies a variable. In some cases, parts of the specifier
tuple may be left out.
|
6259902ea4f1c619b294f64b
|
class PluginContext(Context): <NEW_LINE> <INDENT> def __init__(self, request, dict=None, current_app=None): <NEW_LINE> <INDENT> if current_app is None: <NEW_LINE> <INDENT> Context.__init__(self, dict) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Context.__init__(self, dict, current_app=current_app) <NEW_LINE> <DEDENT> for processor in _STANDARD_REQUEST_CONTEXT_PROCESSORS: <NEW_LINE> <INDENT> self.update(processor(request))
|
A template Context class similar to :class:`~django.template.context.RequestContext`, that enters some pre-filled data.
This ensures that variables such as ``STATIC_URL`` and ``request`` are available in the plugin templates.
|
6259902eb57a9660fecd2ad8
|
class ConfirmDialogWithInput(ConfirmDialog): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> super(ConfirmDialogWithInput, self).__init__(*args) <NEW_LINE> self.keyboard = input.Keyboard.create() <NEW_LINE> <DEDENT> def enter_text(self, text, clear=True): <NEW_LINE> <INDENT> text_field = self._select_text_field() <NEW_LINE> text_field.write(text, clear) <NEW_LINE> <DEDENT> def _select_text_field(self): <NEW_LINE> <INDENT> return self.select_single( ubuntuuitoolkit.TextField, objectName='inputField')
|
ConfirmDialogWithInput Autopilot emulator.
|
6259902e5166f23b2e24442c
|
class AbstractChemenvError(Exception): <NEW_LINE> <INDENT> def __init__(self, cls, method, msg): <NEW_LINE> <INDENT> self.cls = cls <NEW_LINE> self.method = method <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.cls) + ": " + self.method + "\n" + repr(self.msg)
|
Abstract class for Chemenv errors.
|
6259902ed99f1b3c44d066f9
|
class TextGenericDropForSectionTV(TextGenericDropForPropertyTV): <NEW_LINE> <INDENT> targets = [BaseProperty, BaseSection]
|
can drop Properties and Section, inherited is the capability to only drop
Properties into Sections (not into Documents)
|
6259902e8c3a8732951f75ae
|
class S3Config(BaseConfig): <NEW_LINE> <INDENT> targets = ( ('buckets', 'Buckets', 'list_buckets', {}, False), ) <NEW_LINE> def __init__(self, thread_config): <NEW_LINE> <INDENT> self.buckets = {} <NEW_LINE> self.buckets_count = 0 <NEW_LINE> super(S3Config, self).__init__(thread_config) <NEW_LINE> <DEDENT> def parse_buckets(self, bucket, params): <NEW_LINE> <INDENT> bucket['name'] = bucket.pop('Name') <NEW_LINE> api_client = params['api_clients'][get_s3_list_region(params['api_clients'].keys()[0])] <NEW_LINE> bucket['CreationDate'] = str(bucket['CreationDate']) <NEW_LINE> bucket['region'] = get_s3_bucket_location(api_client, bucket['name']) <NEW_LINE> if bucket['region'] == 'EU': <NEW_LINE> <INDENT> bucket['region'] = 'eu-west-1' <NEW_LINE> <DEDENT> if bucket['region'] not in params['api_clients']: <NEW_LINE> <INDENT> printInfo('Skipping bucket %s (region %s outside of scope)' % (bucket['name'], bucket['region'])) <NEW_LINE> self.buckets_count -= 1 <NEW_LINE> return <NEW_LINE> <DEDENT> api_client = params['api_clients'][bucket['region']] <NEW_LINE> get_s3_bucket_logging(api_client, bucket['name'], bucket) <NEW_LINE> get_s3_bucket_versioning(api_client, bucket['name'], bucket) <NEW_LINE> get_s3_bucket_webhosting(api_client, bucket['name'], bucket) <NEW_LINE> bucket['grantees'] = get_s3_acls(api_client, bucket['name'], bucket) <NEW_LINE> get_s3_bucket_policy(api_client, bucket['name'], bucket) <NEW_LINE> bucket['id'] = self.get_non_aws_id(bucket['name']) <NEW_LINE> self.buckets[bucket['id']] = bucket
|
S3 configuration for all AWS regions
:cvar targets: Tuple with all S3 resource names that may be fetched
|
6259902ebe8e80087fbc00d1
|
class SpectrumParameterWidget(ChoiceParameterWidget): <NEW_LINE> <INDENT> def __init__(self, ds, **kwargs): <NEW_LINE> <INDENT> self.choice_dict = {"{}".format(str(s)):s for s in ds.get_all_spectra().values()} <NEW_LINE> if len(self.choice_dict) == 0: <NEW_LINE> <INDENT> self.choice_dict = {"No spectra loaded": None} <NEW_LINE> <DEDENT> super().__init__(list(self.choice_dict.keys()), **kwargs) <NEW_LINE> <DEDENT> def get_value(self): <NEW_LINE> <INDENT> return self.choice_dict[self.field.text] <NEW_LINE> <DEDENT> def set_value(self, new): <NEW_LINE> <INDENT> self.field.text = str(new)
|
A widget for a dropdown menu of spectra.
|
6259902e0a366e3fb87dda3e
|
class TGSTTADataset(TGSDataset): <NEW_LINE> <INDENT> def __init__(self, postproc=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.postproc = postproc <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return 2 * super().__len__() <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> tp = idx // len(self.data) <NEW_LINE> idx = idx % len(self.data) <NEW_LINE> record = deepcopy(self.data[idx]) <NEW_LINE> record = self._apply(record, tp) <NEW_LINE> return record <NEW_LINE> <DEDENT> def _apply(self, record, tp): <NEW_LINE> <INDENT> if tp == 1: <NEW_LINE> <INDENT> record["image"] = record["image"][:,::-1] <NEW_LINE> if "mask" in record: <NEW_LINE> <INDENT> record["mask"] = record["mask"][:,::-1] <NEW_LINE> <DEDENT> record["id"] += "_flipped" <NEW_LINE> <DEDENT> if self.postproc is not None: <NEW_LINE> <INDENT> if "mask" in record: <NEW_LINE> <INDENT> record["image"], record["mask"] = self.postproc(record["image"], record["mask"]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> record["image"], _ = self.postproc(record["image"], record["image"]) <NEW_LINE> <DEDENT> <DEDENT> return record
|
Dataset with test time augmentations
Parameters:
postproc: function
receives (image, mask) and returns postprocessed versions of these two
Returns a dict with keys:
id, image, mask, depth
|
6259902ed6c5a102081e317d
|
class ChanceScheduler(driver.Scheduler): <NEW_LINE> <INDENT> def _filter_hosts(self, request_spec, hosts, **kwargs): <NEW_LINE> <INDENT> filter_properties = kwargs.get('filter_properties', {}) <NEW_LINE> ignore_hosts = filter_properties.get('ignore_hosts', []) <NEW_LINE> hosts = [host for host in hosts if host not in ignore_hosts] <NEW_LINE> return hosts <NEW_LINE> <DEDENT> def _schedule(self, context, topic, request_spec, **kwargs): <NEW_LINE> <INDENT> elevated = context.elevated() <NEW_LINE> hosts = self.hosts_up(elevated, topic) <NEW_LINE> if not hosts: <NEW_LINE> <INDENT> msg = _("Is the appropriate service running?") <NEW_LINE> raise exception.NoValidHost(reason=msg) <NEW_LINE> <DEDENT> hosts = self._filter_hosts(request_spec, hosts, **kwargs) <NEW_LINE> if not hosts: <NEW_LINE> <INDENT> msg = _("Could not find another compute") <NEW_LINE> raise exception.NoValidHost(reason=msg) <NEW_LINE> <DEDENT> return hosts[int(random.random() * len(hosts))] <NEW_LINE> <DEDENT> def schedule(self, context, topic, method, *_args, **kwargs): <NEW_LINE> <INDENT> host = self._schedule(context, topic, None, **kwargs) <NEW_LINE> driver.cast_to_host(context, topic, host, method, **kwargs) <NEW_LINE> <DEDENT> def schedule_run_instance(self, context, request_spec, *_args, **kwargs): <NEW_LINE> <INDENT> num_instances = request_spec.get('num_instances', 1) <NEW_LINE> instances = [] <NEW_LINE> for num in xrange(num_instances): <NEW_LINE> <INDENT> host = self._schedule(context, 'compute', request_spec, **kwargs) <NEW_LINE> instance = self.create_instance_db_entry(context, request_spec) <NEW_LINE> driver.cast_to_compute_host(context, host, 'run_instance', instance_uuid=instance['uuid'], **kwargs) <NEW_LINE> instances.append(driver.encode_instance(instance)) <NEW_LINE> del request_spec['instance_properties']['uuid'] <NEW_LINE> <DEDENT> return instances <NEW_LINE> <DEDENT> def schedule_prep_resize(self, context, request_spec, *args, **kwargs): <NEW_LINE> <INDENT> host = self._schedule(context, 'compute', request_spec, **kwargs) <NEW_LINE> driver.cast_to_compute_host(context, host, 'prep_resize', **kwargs)
|
Implements Scheduler as a random node selector.
|
6259902e9b70327d1c57fddb
|
class TestAppDirectoriesFinder(StaticFilesTestCase, FinderTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestAppDirectoriesFinder, self).setUp() <NEW_LINE> self.finder = finders.AppDirectoriesFinder() <NEW_LINE> test_file_path = os.path.join(django_settings.TEST_ROOT, 'apps/test/static/test/file1.txt') <NEW_LINE> self.find_first = ("test/file1.txt", test_file_path) <NEW_LINE> self.find_all = ("test/file1.txt", [test_file_path])
|
Test AppDirectoriesFinder.
|
6259902e6e29344779b016a7
|
class Type3RecurrenceModel(BaseRecurrenceModel): <NEW_LINE> <INDENT> def cumulative_value(self, slip_moment, mmax, mag_value, bbar, dbar): <NEW_LINE> <INDENT> moment_ratio = slip_moment / _scale_moment(mmax) <NEW_LINE> delta_m = mmax - mag_value <NEW_LINE> rhs_1 = (dbar * (dbar - bbar)) / bbar <NEW_LINE> rhs_3 = (1. / bbar) * (np.exp(bbar * delta_m) - 1.) - delta_m <NEW_LINE> return rhs_1 * moment_ratio * rhs_3
|
Calculate N(M > mag_value) using Anderson & Luco Type 1 formula as
inverse of formula III.5 of Table 4 in Anderson & Luco (1993).
|
6259902ed99f1b3c44d066fb
|
class RFPDupeFilter(BaseDupeFilter): <NEW_LINE> <INDENT> logger = logger <NEW_LINE> def __init__(self, server, key, debug=False): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> self.key = key <NEW_LINE> self.debug = debug <NEW_LINE> self.logdupes = True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_settings(cls, settings): <NEW_LINE> <INDENT> server = get_redis_from_settings(settings) <NEW_LINE> key = defaults.DUPEFILTER_KEY % {'timestamp': int(time.time())} <NEW_LINE> debug = settings.getbool('DUPEFILTER_DEBUG') <NEW_LINE> return cls(server, key=key, debug=debug) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_crawler(cls, crawler): <NEW_LINE> <INDENT> return cls.from_settings(crawler.settings) <NEW_LINE> <DEDENT> def request_seen(self, request): <NEW_LINE> <INDENT> fp = self.request_fingerprint(request) <NEW_LINE> added = self.server.sadd(self.key, fp) <NEW_LINE> return added == 0 <NEW_LINE> <DEDENT> def request_fingerprint(self, request): <NEW_LINE> <INDENT> return request_fingerprint(request) <NEW_LINE> <DEDENT> def close(self, reason=''): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.server.delete(self.key) <NEW_LINE> <DEDENT> def log(self, request, spider): <NEW_LINE> <INDENT> if self.debug: <NEW_LINE> <INDENT> msg = "Filtered duplicate request: %(request)s" <NEW_LINE> self.logger.debug(msg, {'request': request}, extra={'spider': spider}) <NEW_LINE> <DEDENT> elif self.logdupes: <NEW_LINE> <INDENT> msg = ("Filtered duplicate request %(request)s" " - no more duplicates will be shown" " (see DUPEFILTER_DEBUG to show all duplicates)") <NEW_LINE> self.logger.debug(msg, {'request': request}, extra={'spider': spider}) <NEW_LINE> self.logdupes = False
|
Redis-based request duplicates filter.
This class can also be used with default Scrapy's scheduler.
|
6259902e8e05c05ec3f6f687
|
class ParentLogHandler(ChildLogHandler): <NEW_LINE> <INDENT> def __init__(self, handlers, queue): <NEW_LINE> <INDENT> ChildLogHandler.__init__(self, queue) <NEW_LINE> self._handlers = handlers <NEW_LINE> for handler in self._handlers: <NEW_LINE> <INDENT> handler.setFormatter(memdam.FORMATTER) <NEW_LINE> <DEDENT> self._thread = threading.Thread(target=self.receive) <NEW_LINE> self._thread.daemon = True <NEW_LINE> self._thread.start() <NEW_LINE> atexit.register(self._shutdown) <NEW_LINE> <DEDENT> def _shutdown(self): <NEW_LINE> <INDENT> if self._thread != None: <NEW_LINE> <INDENT> self.queue.put(memdam.common.poisonpill.PoisonPill()) <NEW_LINE> self._thread.join() <NEW_LINE> self.queue.close() <NEW_LINE> self.queue.join_thread() <NEW_LINE> self._thread = None <NEW_LINE> <DEDENT> <DEDENT> def receive(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> records = memdam.common.parallel.read_next_from_queue(self.queue) <NEW_LINE> if len(records) <= 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> assert len(records) == 1 <NEW_LINE> for handler in self._handlers: <NEW_LINE> <INDENT> handler.emit(records[0]) <NEW_LINE> <DEDENT> <DEDENT> except (KeyboardInterrupt, SystemExit): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> traceback.print_exc(file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> records = memdam.common.parallel.read_all_from_queue(self.queue) <NEW_LINE> for record in records: <NEW_LINE> <INDENT> for handler in self._handlers: <NEW_LINE> <INDENT> handler.emit(record) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except (KeyboardInterrupt, SystemExit): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> traceback.print_exc(file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> for handler in self._handlers: <NEW_LINE> <INDENT> handler.close() <NEW_LINE> <DEDENT> logging.Handler.close(self) <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> ChildLogHandler.emit(self, record) <NEW_LINE> self.flush()
|
Collects all logs from child processes and sends them to a set of handlers at the parent.
Note: this flushes the logs at exit, but be sure not to register any other atexit calls which
might make logging calls BEFORE creating this log handler, otherwise there is no guarantee that
the messages will be written!
|
6259902e8c3a8732951f75b0
|
class wb_standard_slave_sequencer(UVMSequencer): <NEW_LINE> <INDENT> def __init__(self, name, parent=None): <NEW_LINE> <INDENT> super().__init__(name, parent) <NEW_LINE> self.seq_item_export = UVMBlockingPeekPort("seq_item_export", self)
|
Class: Memory Interface Read Slave Sequencer
Definition: Contains functions, tasks and methods of this agent's sequencer.
|
6259902e91af0d3eaad3ae83
|
class GadgetType(object): <NEW_LINE> <INDENT> NoOperation = 0 <NEW_LINE> Jump = 1 <NEW_LINE> MoveRegister = 2 <NEW_LINE> LoadConstant = 3 <NEW_LINE> Arithmetic = 4 <NEW_LINE> LoadMemory = 5 <NEW_LINE> StoreMemory = 6 <NEW_LINE> ArithmeticLoad = 7 <NEW_LINE> ArithmeticStore = 8 <NEW_LINE> Undefined = 9 <NEW_LINE> @staticmethod <NEW_LINE> def to_string(gadget_type): <NEW_LINE> <INDENT> strings = { GadgetType.NoOperation : "No Operation", GadgetType.Jump : "Jump", GadgetType.MoveRegister : "Move Register", GadgetType.LoadConstant : "Load Constant", GadgetType.Arithmetic : "Arithmetic", GadgetType.LoadMemory : "Load Memory", GadgetType.StoreMemory : "Store Memory", GadgetType.ArithmeticLoad : "Arithmetic Load", GadgetType.ArithmeticStore : "Arithmetic Store", GadgetType.Undefined : "Undefined", } <NEW_LINE> return strings[gadget_type]
|
Enumeration of Gadget Types.
|
6259902eb57a9660fecd2adc
|
class notify_around(ContextDecorator): <NEW_LINE> <INDENT> def __init__(self, event, *args, **kwargs): <NEW_LINE> <INDENT> self.event = event <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> if 'uuid' not in kwargs: <NEW_LINE> <INDENT> kwargs['uuid'] = str(uuid.uuid4()) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> notify(self.event, when=NotifyType.BEFORE, *self.args, **self.kwargs) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc, exc_tb): <NEW_LINE> <INDENT> if exc_type is not None: <NEW_LINE> <INDENT> kwargs = self.kwargs.copy() <NEW_LINE> kwargs["exc_args"] = (exc_type, exc, exc_tb) <NEW_LINE> notify(self.event, *self.args, when=NotifyType.EXCEPTION, **self.kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> notify(self.event, *self.args, when=NotifyType.AFTER, **self.kwargs) <NEW_LINE> <DEDENT> return False
|
class is decorator and context manager.
In order to match up BEFORE and AFTER events, a uuid field is included in
the kwargs for loggers/etc.
If an exception occurs in the wrapped function, then the
NotifyType.EXCEPTION type is sent is used.
|
6259902e63f4b57ef008659f
|
class ParsedConfig(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.pragmas = [] <NEW_LINE> self.pipelines = [] <NEW_LINE> self.jobs = [] <NEW_LINE> self.project_templates = [] <NEW_LINE> self.projects = [] <NEW_LINE> self.projects_by_regex = {} <NEW_LINE> self.nodesets = [] <NEW_LINE> self.secrets = [] <NEW_LINE> self.semaphores = [] <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> r = ParsedConfig() <NEW_LINE> r.pragmas = self.pragmas[:] <NEW_LINE> r.pipelines = self.pipelines[:] <NEW_LINE> r.jobs = self.jobs[:] <NEW_LINE> r.project_templates = self.project_templates[:] <NEW_LINE> r.projects = self.projects[:] <NEW_LINE> r.projects_by_regex = copy.copy(self.projects_by_regex) <NEW_LINE> r.nodesets = self.nodesets[:] <NEW_LINE> r.secrets = self.secrets[:] <NEW_LINE> r.semaphores = self.semaphores[:] <NEW_LINE> return r <NEW_LINE> <DEDENT> def extend(self, conf): <NEW_LINE> <INDENT> if isinstance(conf, ParsedConfig): <NEW_LINE> <INDENT> self.pragmas.extend(conf.pragmas) <NEW_LINE> self.pipelines.extend(conf.pipelines) <NEW_LINE> self.jobs.extend(conf.jobs) <NEW_LINE> self.project_templates.extend(conf.project_templates) <NEW_LINE> self.projects.extend(conf.projects) <NEW_LINE> self.nodesets.extend(conf.nodesets) <NEW_LINE> self.secrets.extend(conf.secrets) <NEW_LINE> self.semaphores.extend(conf.semaphores) <NEW_LINE> for regex, projects in conf.projects_by_regex.items(): <NEW_LINE> <INDENT> self.projects_by_regex.setdefault(regex, []).extend(projects) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError()
|
A collection of parsed config objects.
|
6259902e8a43f66fc4bf31df
|
class _FakeStuffCreator(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from ..gui.plotter import digital, analog <NEW_LINE> from io import BytesIO <NEW_LINE> self.BytesIO = BytesIO <NEW_LINE> self.mod_D, self.mod_A = digital, analog <NEW_LINE> D, A = self.mod_D.example_signals, self.mod_A.example_signals <NEW_LINE> self.channels = _create_fake_channels( D, A ) <NEW_LINE> self.signals = _create_fake_signals(D, A) <NEW_LINE> self.clocks = _create_fake_dev_clocks(D, A) <NEW_LINE> self.transitions = _create_fake_transitions(D, A) <NEW_LINE> <DEDENT> def print_fake_stuff(self): <NEW_LINE> <INDENT> print('type: ', type(self.signals)) <NEW_LINE> print('clocks: ', self.clocks) <NEW_LINE> print('channels: ', self.channels) <NEW_LINE> print('transitions: ', self.transitions) <NEW_LINE> print('name_map: ', create_channel_name_map(self.channels, self.clocks)) <NEW_LINE> A = self.signals.to_arrays(self.transitions, self.clocks, self.channels) <NEW_LINE> print('array:\n', A) <NEW_LINE> <DEDENT> def test_gnuplot(self): <NEW_LINE> <INDENT> A = self.signals.to_arrays(self.transitions, self.clocks, self.channels) <NEW_LINE> f = self.BytesIO() <NEW_LINE> A.save( f ) <NEW_LINE> return f.getvalue().decode()
|
class to create fake data for testing Signals(Set) classes
|
6259902e66673b3332c3144b
|
class DocumentsMetadataConfiguration(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "S3Prefix": (str, False), }
|
`DocumentsMetadataConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kendra-datasource-documentsmetadataconfiguration.html>`__
|
6259902e4e696a045264e64f
|
class UserFollowingGroup(domain_object.DomainObject): <NEW_LINE> <INDENT> def __init__(self, follower_id, object_id): <NEW_LINE> <INDENT> self.follower_id = follower_id <NEW_LINE> self.object_id = object_id <NEW_LINE> self.datetime = datetime.datetime.now() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(self, follower_id, object_id): <NEW_LINE> <INDENT> query = meta.Session.query(UserFollowingGroup) <NEW_LINE> query = query.filter(UserFollowingGroup.follower_id == follower_id) <NEW_LINE> query = query.filter(UserFollowingGroup.object_id == object_id) <NEW_LINE> return query.first() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_following(cls, follower_id, object_id): <NEW_LINE> <INDENT> return UserFollowingGroup.get(follower_id, object_id) is not None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def followee_count(cls, follower_id): <NEW_LINE> <INDENT> return meta.Session.query(UserFollowingGroup).filter( UserFollowingGroup.follower_id == follower_id).count() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def followee_list(cls, follower_id): <NEW_LINE> <INDENT> return meta.Session.query(UserFollowingGroup).filter( UserFollowingGroup.follower_id == follower_id).all() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def follower_count(cls, object_id): <NEW_LINE> <INDENT> return meta.Session.query(UserFollowingGroup).filter( UserFollowingGroup.object_id == object_id).count() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def follower_list(cls, object_id): <NEW_LINE> <INDENT> return meta.Session.query(UserFollowingGroup).filter( UserFollowingGroup.object_id == object_id).all()
|
A many-many relationship between users and groups.
A relationship between a user (the follower) and a group (the object),
that means that the user is currently following the group.
|
6259902e287bf620b6272c42
|
class Rectangle: <NEW_LINE> <INDENT> number_of_instances = 0 <NEW_LINE> print_symbol = '#' <NEW_LINE> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> type(self).number_of_instances += 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("width must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("width must be >= 0") <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("height must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("height must be >= 0") <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__width * self.__height <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 2 * (self.__width + self.__height) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = "" <NEW_LINE> for i in range(self.__height): <NEW_LINE> <INDENT> s = s + (str(self.print_symbol) * self.__width) <NEW_LINE> if i != self.__height - 1: <NEW_LINE> <INDENT> s = s + '\n' <NEW_LINE> <DEDENT> <DEDENT> return s <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Rectangle(" + str(self.__width) + ", " + str(self.__height) + ")" <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print("Bye rectangle...") <NEW_LINE> type(self).number_of_instances -= 1
|
Class that defines a rectangle
|
6259902e1d351010ab8f4b73
|
class Solution: <NEW_LINE> <INDENT> def inorderTraversal(self, root): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return self.inorderTraversal(root.left)+[root.val]+self.inorderTraversal(root.right)
|
@param root: A Tree
@return: Inorder in ArrayList which contains node values.
|
6259902ebe8e80087fbc00d7
|
class LibdepsGraph(networkx.DiGraph): <NEW_LINE> <INDENT> def __init__(self, graph=networkx.DiGraph()): <NEW_LINE> <INDENT> super().__init__(incoming_graph_data=graph) <NEW_LINE> self._progressbar = None <NEW_LINE> self._deptypes = None <NEW_LINE> <DEDENT> def get_deptype(self, deptype): <NEW_LINE> <INDENT> if not self._deptypes: <NEW_LINE> <INDENT> self._deptypes = json.loads(self.graph.get('deptypes', "{}")) <NEW_LINE> if self.graph['graph_schema_version'] == 1: <NEW_LINE> <INDENT> self._deptypes['Global'] = self._deptypes.get('Global', 0) <NEW_LINE> self._deptypes['Public'] = self._deptypes.get('Public', 1) <NEW_LINE> self._deptypes['Private'] = self._deptypes.get('Private', 2) <NEW_LINE> self._deptypes['Interface'] = self._deptypes.get('Interface', 3) <NEW_LINE> self._deptypes['Typeinfo'] = self._deptypes.get('Typeinfo', 4) <NEW_LINE> <DEDENT> <DEDENT> return self._deptypes[deptype] <NEW_LINE> <DEDENT> def _strip_build_dir(self, node): <NEW_LINE> <INDENT> return str(Path(node).resolve().relative_to(Path(self.graph['build_dir']).resolve())) <NEW_LINE> <DEDENT> def get_direct_nonprivate_graph(self): <NEW_LINE> <INDENT> def filter_direct_nonprivate_edges(n1, n2): <NEW_LINE> <INDENT> return (self[n1][n2].get(EdgeProps.direct.name) and (self[n1][n2].get(EdgeProps.visibility.name) == self.get_deptype('Public') or self[n1][n2].get(EdgeProps.visibility.name) == self.get_deptype('Interface'))) <NEW_LINE> <DEDENT> return networkx.subgraph_view(self, filter_edge=filter_direct_nonprivate_edges) <NEW_LINE> <DEDENT> def get_node_tree(self, node): <NEW_LINE> <INDENT> direct_nonprivate_graph = self.get_direct_nonprivate_graph() <NEW_LINE> substree_set = networkx.descendants(direct_nonprivate_graph, node) <NEW_LINE> def subtree(n1): <NEW_LINE> <INDENT> return n1 in substree_set or n1 == node <NEW_LINE> <DEDENT> return networkx.subgraph_view(direct_nonprivate_graph, filter_node=subtree) <NEW_LINE> <DEDENT> def get_progress(self, value=None): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> value = ('progressbar' in globals()) <NEW_LINE> <DEDENT> if self._progressbar: <NEW_LINE> <INDENT> return self._progressbar <NEW_LINE> <DEDENT> if value: <NEW_LINE> <INDENT> def get_progress_bar(title, *args): <NEW_LINE> <INDENT> custom_bar = progressbar.ProgressBar(widgets=[ title, progressbar.Counter(format='[%(value)d/%(max_value)d]'), progressbar.Timer(format=" Time: %(elapsed)s "), progressbar.Bar(marker='>', fill=' ', left='|', right='|') ]) <NEW_LINE> return custom_bar(*args) <NEW_LINE> <DEDENT> self._progressbar = get_progress_bar <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._progressbar = null_progressbar <NEW_LINE> <DEDENT> return self._progressbar
|
Class for analyzing the graph.
|
6259902e8e05c05ec3f6f689
|
class LoggingMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, logger_name='cf.falcon.logger'): <NEW_LINE> <INDENT> self._logger_name = logger_name <NEW_LINE> <DEDENT> def process_request(self, request, response): <NEW_LINE> <INDENT> framework = cf_logging.FRAMEWORK <NEW_LINE> cid = framework.request_reader.get_correlation_id(request) <NEW_LINE> framework.context.set_correlation_id(cid, request) <NEW_LINE> framework.context.set('request_started_at', datetime.utcnow(), request) <NEW_LINE> <DEDENT> def process_response(self, request, response, resource, req_succeeded): <NEW_LINE> <INDENT> cf_logging.FRAMEWORK.context.set( 'response_sent_at', datetime.utcnow(), request) <NEW_LINE> extra = {REQUEST_KEY: request, RESPONSE_KEY: response} <NEW_LINE> logging.getLogger(self._logger_name).info('', extra=extra)
|
Falcon logging middleware
|
6259902e30c21e258be99868
|
class TestTaskCreate(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.user1 = create_user(1) <NEW_LINE> self.skill1 = create_skill("Python") <NEW_LINE> self.skill2 = create_skill("PHP") <NEW_LINE> <DEDENT> def test_task_create(self): <NEW_LINE> <INDENT> token = api_login(self.user1) <NEW_LINE> url = reverse('task-create') <NEW_LINE> data = { 'title' : 'Task 1', 'description' : 'Desc 1', 'offer' : 50, 'location' : 'Loc 1', 'is_remote' : True, 'skills': [self.skill1.code, self.skill2.code] } <NEW_LINE> response = self.client.post(url, data, format="json", HTTP_AUTHORIZATION='Token {}'.format(token)) <NEW_LINE> task = Task.objects.get(title="Task 1") <NEW_LINE> self.assertEqual(Task.objects.count(), 1) <NEW_LINE> self.assertEqual(task.title, 'Task 1') <NEW_LINE> self.assertEqual(task.description, 'Desc 1') <NEW_LINE> self.assertEqual(task.offer, 50) <NEW_LINE> self.assertEqual(task.location, 'Loc 1') <NEW_LINE> self.assertEqual(task.is_remote, True) <NEW_LINE> self.assertEqual(task.skills.count(), 2) <NEW_LINE> self.assertEqual(task.skills.get(code="Py").code, 'Py') <NEW_LINE> self.assertEqual(task.skills.get(code="Py").title, 'Python') <NEW_LINE> self.assertEqual(task.skills.get(title="PHP").code, 'PH') <NEW_LINE> self.assertEqual(task.skills.get(title="PHP").title, 'PHP')
|
Model tests for create tasks
|
6259902e8c3a8732951f75b4
|
class IsAuthenticatedWithPermission(IsAuthenticated): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> return obj.has_permission(request.user)
|
Implements `has_object_permission` to check for object level
permission
Author: Himanshu Shankar (https://himanshus.com)
|
6259902e15baa72349462ff4
|
class TargetReference(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'type': {'required': True, 'constant': True}, 'id': {'required': True, 'pattern': r'^\/[Ss][Uu][Bb][Ss][Cc][Rr][Ii][Pp][Tt][Ii][Oo][Nn][Ss]\/[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\/[Rr][Ee][Ss][Oo][Uu][Rr][Cc][Ee][Gg][Rr][Oo][Uu][Pp][Ss]\/[a-zA-Z0-9_\-\.\(\)]*[a-zA-Z0-9_\-\(\)]\/[Pp][Rr][Oo][Vv][Ii][Dd][Ee][Rr][Ss]\/[a-zA-Z0-9]+\.[a-zA-Z0-9]+\/[a-zA-Z0-9_\-\.]+\/[a-zA-Z0-9_\-\.]+\/[Pp][Rr][Oo][Vv][Ii][Dd][Ee][Rr][Ss]\/[Mm][Ii][Cc][Rr][Oo][Ss][Oo][Ff][Tt]\.[Cc][Hh][Aa][Oo][Ss]\/[Tt][Aa][Rr][Gg][Ee][Tt][Ss]\/[a-zA-Z0-9_\-\.]+$'}, } <NEW_LINE> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, } <NEW_LINE> type = "ChaosTarget" <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(TargetReference, self).__init__(**kwargs) <NEW_LINE> self.id = kwargs['id']
|
Model that represents a reference to a Target in the selector.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar type: Enum of the Target reference type. Has constant value: "ChaosTarget".
:vartype type: str
:param id: Required. String of the resource ID of a Target resource.
:type id: str
|
6259902e796e427e5384f7d8
|
class InlineQueryResultGif(InlineQueryResult): <NEW_LINE> <INDENT> type: base.String = fields.Field(alias='type', default='gif') <NEW_LINE> gif_url: base.String = fields.Field() <NEW_LINE> gif_width: base.Integer = fields.Field() <NEW_LINE> gif_height: base.Integer = fields.Field() <NEW_LINE> gif_duration: base.Integer = fields.Field() <NEW_LINE> thumb_url: base.String = fields.Field() <NEW_LINE> title: base.String = fields.Field() <NEW_LINE> caption: base.String = fields.Field() <NEW_LINE> input_message_content: InputMessageContent = fields.Field(base=InputMessageContent) <NEW_LINE> def __init__(self, *, id: base.String, gif_url: base.String, gif_width: typing.Optional[base.Integer] = None, gif_height: typing.Optional[base.Integer] = None, gif_duration: typing.Optional[base.Integer] = None, thumb_url: typing.Optional[base.String] = None, title: typing.Optional[base.String] = None, caption: typing.Optional[base.String] = None, reply_markup: typing.Optional[InlineKeyboardMarkup] = None, input_message_content: typing.Optional[InputMessageContent] = None): <NEW_LINE> <INDENT> super(InlineQueryResultGif, self).__init__(id=id, gif_url=gif_url, gif_width=gif_width, gif_height=gif_height, gif_duration=gif_duration, thumb_url=thumb_url, title=title, caption=caption, reply_markup=reply_markup, input_message_content=input_message_content)
|
Represents a link to an animated GIF file.
By default, this animated GIF file will be sent by the user with optional caption.
Alternatively, you can use input_message_content to send a message with the specified content
instead of the animation.
https://core.telegram.org/bots/api#inlinequeryresultgif
|
6259902ed10714528d69eeb9
|
class GuessMachine(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.number_to_guess=random.randint(MIN,MAX) <NEW_LINE> self.number_of_attempt=0 <NEW_LINE> <DEDENT> def guess (self,num): <NEW_LINE> <INDENT> self.number_of_attempt +=1 <NEW_LINE> if num < self.number_to_guess: <NEW_LINE> <INDENT> return 'too low' <NEW_LINE> <DEDENT> elif num> self.number_to_guess: <NEW_LINE> <INDENT> return 'too high' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'found'
|
I have a number in mind,
you have to guess it !!
+self.number_to_guess is generated during creation of the object
+ use'guess(num)' method to make a guess
+I'll count the number of attempt in self.number_of_attempt
|
6259902e4e696a045264e650
|
class RelationValidateError(ValidateError): <NEW_LINE> <INDENT> pass
|
A base validate error for RelationValidator
|
6259902e26238365f5fadbaf
|
class TestSocket(): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def sendall(str_to_send): <NEW_LINE> <INDENT> pprint.pprint(str_to_send) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def recv(len_header): <NEW_LINE> <INDENT> read = click.prompt( 'This is a simulation as no socket is here, please input the string you want to pass to the socket') <NEW_LINE> return read
|
This emulates the mysql socket even though we dont actually have one
|
6259902e6fece00bbaccca0c
|
class ItemListView(ListView): <NEW_LINE> <INDENT> model = Item <NEW_LINE> template_name = 'salesmanager/item/item_list.html' <NEW_LINE> context_object_name = 'items'
|
List all the items
|
6259902e6e29344779b016ad
|
class Model(Resource): <NEW_LINE> <INDENT> DATABASE_DATA = { 'database': 'triage', 'user': 'model_handler', 'password': 'password', 'host': 'db', 'port': '5432' } <NEW_LINE> arg_schema_post = { 'clinic_id': fields.Int(required=True), 'model_weights': fields.Field(), 'severity': fields.Int(required=True), 'accuracy': fields.Float(required=True), 'make_in_use': fields.Boolean(), } <NEW_LINE> def post(self): <NEW_LINE> <INDENT> args = parser.parse(self.arg_schema_post, request, location='json_or_form') <NEW_LINE> data_file = request.files['model_weights'] <NEW_LINE> if not data_file: <NEW_LINE> <INDENT> return 'Unprocessable Entity', 422 <NEW_LINE> <DEDENT> file_path = self.save_weight_file_locally(data_file, args['clinic_id'], args['severity']) <NEW_LINE> model_id = self.save_model_file_path_to_db(file_path, args['clinic_id'], args['severity'], args['accuracy'], False) <NEW_LINE> if 'make_in_use' in args and args['make_in_use']: <NEW_LINE> <INDENT> Models().set_active_model(args['clinic_id'], model_id) <NEW_LINE> <DEDENT> <DEDENT> def save_model_file_path_to_db(self, file_path, clinic_id, severity, accuracy, in_use): <NEW_LINE> <INDENT> db = DataBase(self.DATABASE_DATA) <NEW_LINE> query = "INSERT INTO triagedata.models (file_path, clinic_id, severity, accuracy, in_use) " <NEW_LINE> query += "VALUES ('%s', %s, %s, %s, %s) " % (file_path, clinic_id, severity, accuracy, in_use) <NEW_LINE> query += "RETURNING id" <NEW_LINE> return db.insert(query, returning=True) <NEW_LINE> <DEDENT> def save_weight_file_locally(self, data_file, clinic_id, severity): <NEW_LINE> <INDENT> upload_dir = FILE_STORAGE_PATH + "%s/%s" % (clinic_id, severity) <NEW_LINE> file_name = uuid.uuid4().hex + '.h5' <NEW_LINE> file_path = os.path.join(upload_dir, file_name) <NEW_LINE> self.create_directory_if_not_exists(upload_dir) <NEW_LINE> data_file.save(file_path) <NEW_LINE> return file_path <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_directory_if_not_exists(directory_path): <NEW_LINE> <INDENT> os.makedirs(directory_path, exist_ok=True)
|
The `Model` class handles all of the requests relative to new model data uploads for the API.
|
6259902ebe8e80087fbc00d9
|
class DirectionWrapper(NumGridWrapper, gym.ActionWrapper): <NEW_LINE> <INDENT> def __init__(self, env, distance=1): <NEW_LINE> <INDENT> super().__init__(env) <NEW_LINE> self.distance = distance <NEW_LINE> self.direction_space = spaces.Direction() <NEW_LINE> self.action_space = gym.spaces.Tuple((self.digit_space, self.direction_space)) <NEW_LINE> <DEDENT> def _action(self, action): <NEW_LINE> <INDENT> digit, direction = action <NEW_LINE> pos = self.cursor_move(direction, self.distance) <NEW_LINE> return (digit, pos) <NEW_LINE> <DEDENT> def cursor_move(self, direction, distance): <NEW_LINE> <INDENT> return self.env.cursor_pos + np.array(direction) * distance
|
An action wrapper for NumGrid converting directions into positions.
Since it needs access to the cursor position, which is not saved in
the wrapper stack, it must be used first in the stack.
|
6259902e796e427e5384f7da
|
class CapabilityStatementImplementation(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_type = "CapabilityStatementImplementation" <NEW_LINE> def __init__(self, jsondict=None, strict=True): <NEW_LINE> <INDENT> self.custodian = None <NEW_LINE> self.description = None <NEW_LINE> self.url = None <NEW_LINE> super(CapabilityStatementImplementation, self).__init__(jsondict=jsondict, strict=strict) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(CapabilityStatementImplementation, self).elementProperties() <NEW_LINE> js.extend([ ("custodian", "custodian", fhirreference.FHIRReference, False, None, False), ("description", "description", str, False, None, True), ("url", "url", str, False, None, False), ]) <NEW_LINE> return js
|
If this describes a specific instance.
Identifies a specific implementation instance that is described by the
capability statement - i.e. a particular installation, rather than the
capabilities of a software program.
|
6259902e507cdc57c63a5e07
|
class SoftwareDeployment(BASE, HeatBase, StateAware): <NEW_LINE> <INDENT> __tablename__ = 'software_deployment' <NEW_LINE> __table_args__ = ( sqlalchemy.Index('ix_software_deployment_created_at', 'created_at'),) <NEW_LINE> id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True, default=lambda: str(uuid.uuid4())) <NEW_LINE> config_id = sqlalchemy.Column( 'config_id', sqlalchemy.String(36), sqlalchemy.ForeignKey('software_config.id'), nullable=False) <NEW_LINE> config = relationship(SoftwareConfig, backref=backref('deployments')) <NEW_LINE> server_id = sqlalchemy.Column('server_id', sqlalchemy.String(36), nullable=False, index=True) <NEW_LINE> input_values = sqlalchemy.Column('input_values', types.Json) <NEW_LINE> output_values = sqlalchemy.Column('output_values', types.Json) <NEW_LINE> tenant = sqlalchemy.Column( 'tenant', sqlalchemy.String(64), nullable=False, index=True) <NEW_LINE> stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64)) <NEW_LINE> updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
|
Represents applying a software configuration resource to a
single server resource.
|
6259902e0a366e3fb87dda46
|
class check_required_params: <NEW_LINE> <INDENT> def __init__(self, required_params: list) -> None: <NEW_LINE> <INDENT> assert required_params <NEW_LINE> self.required_params = required_params <NEW_LINE> <DEDENT> def __call__(self, function: Callable) -> Callable: <NEW_LINE> <INDENT> @wraps(function) <NEW_LINE> def wrapped_view_function(*args, **kwargs) -> Any: <NEW_LINE> <INDENT> request = args[0] <NEW_LINE> request_args = request.args <NEW_LINE> missing_params = [ param for param in self.required_params if not request_args.get(param) ] <NEW_LINE> if missing_params: <NEW_LINE> <INDENT> return response.json({ 'status': 'ERROR', 'description': f'Following request params are required: {missing_params}.' }, status=400) <NEW_LINE> <DEDENT> return function(request, **kwargs) <NEW_LINE> <DEDENT> return wrapped_view_function
|
This will return 400 from a view if a required param/params are missing.
|
6259902e287bf620b6272c46
|
class CryptoManager: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cwd = os.getcwd() <NEW_LINE> self.now = datetime.datetime.now() <NEW_LINE> self.dfop = pd.DataFrame() <NEW_LINE> self.summary_table = pd.DataFrame() <NEW_LINE> logger.info("CryptoManager class created") <NEW_LINE> <DEDENT> def save_crypto_operations_pkl(self): <NEW_LINE> <INDENT> logger.info("CryptoManager - save_crypto_operations_pkl") <NEW_LINE> if not self.dfop.empty: <NEW_LINE> <INDENT> self.dfop.to_pickle(os.path.join(self.cwd, "pd_raw_data", "crypto_operations.pkl")) <NEW_LINE> <DEDENT> <DEDENT> def load_crypto_operations_pkl(self): <NEW_LINE> <INDENT> logger.info("CryptoManager - load_crypto_operations_pkl") <NEW_LINE> self.dfop = pd.read_pickle(os.path.join(self.cwd, "pd_raw_data", "crypto_operations.pkl")) <NEW_LINE> <DEDENT> def save_crypto_operations_csv(self): <NEW_LINE> <INDENT> logger.info("CryptoManager - save_crypto_operations_csv") <NEW_LINE> if not self.dfop.empty: <NEW_LINE> <INDENT> self.dfop.to_csv(os.path.join(self.cwd, "pd_raw_data", "crypto_operations.csv"), index=False, sep=';') <NEW_LINE> <DEDENT> <DEDENT> def add_info(self, **kwargs): <NEW_LINE> <INDENT> logger.info("BrStockManager - add_info - kwargs %s", kwargs) <NEW_LINE> broker = kwargs["broker"] <NEW_LINE> ticker = kwargs["ticker"] <NEW_LINE> date = kwargs["date"] <NEW_LINE> price = kwargs["price"] <NEW_LINE> quantity = kwargs["quantity"] <NEW_LINE> comments = kwargs.get("comments", "") <NEW_LINE> new_row = {"Broker": broker, "Ticker": ticker, "Quantity": quantity, "Price": price, "Comments": comments} <NEW_LINE> new_row["Date"] = pd.to_datetime(date, errors='coerce', format="%Y-%m-%d").date() <NEW_LINE> new_row["Crypto Value"] = price/quantity <NEW_LINE> self.dfop = self.dfop.append(new_row, ignore_index=True)
|
Manager for Crypto operations
|
6259902e9b70327d1c57fde3
|
class Authorization(object): <NEW_LINE> <INDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> self.resource_meta = instance <NEW_LINE> return self <NEW_LINE> <DEDENT> def read_list(self, object_list, bundle): <NEW_LINE> <INDENT> return object_list <NEW_LINE> <DEDENT> def read_detail(self, object_list, bundle): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def create_list(self, object_list, bundle): <NEW_LINE> <INDENT> raise NotImplementedError("BoxMeAPI has no way to determine if all objects should be allowed to be created.") <NEW_LINE> <DEDENT> def create_detail(self, object_list, bundle): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def update_list(self, object_list, bundle): <NEW_LINE> <INDENT> return object_list <NEW_LINE> <DEDENT> def update_detail(self, object_list, bundle): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def delete_list(self, object_list, bundle): <NEW_LINE> <INDENT> return object_list <NEW_LINE> <DEDENT> def delete_detail(self, object_list, bundle): <NEW_LINE> <INDENT> return True
|
A base class that provides no permissions checking.
|
6259902eac7a0e7691f73548
|
class AdaptiveDetrend(CtrlNode): <NEW_LINE> <INDENT> nodeName = 'AdaptiveDetrend' <NEW_LINE> uiTemplate = [ ('threshold', 'doubleSpin', {'value': 3.0, 'min': 0, 'max': 1000000}) ] <NEW_LINE> def processData(self, data): <NEW_LINE> <INDENT> return functions.adaptiveDetrend(data, threshold=self.ctrls['threshold'].value())
|
Removes baseline from data, ignoring anomalous events
|
6259902e63f4b57ef00865a2
|
class StabilityShares(Bitcoin): <NEW_LINE> <INDENT> name = 'stability_shares' <NEW_LINE> symbols = ('XSS', ) <NEW_LINE> nodes = ("80.112.144.84", "82.139.127.205", "23.253.82.83", "27.33.1.58", "87.147.43.53", "174.108.122.202", "204.195.130.236", "92.55.41.212", "88.127.170.75", "94.23.196.92") <NEW_LINE> port = 7711 <NEW_LINE> message_start = b'\x4d\xd2\xf5\xc6' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 76, 'SCRIPT_ADDR': 5, 'SECRET_KEY': 204 }
|
Class with all the necessary Stability Shares network information based on
https://bitcointalk.org/index.php?topic=490529.0
(date of access: 02/16/2018)
|
6259902e6fece00bbaccca0e
|
class Variable(NameBlock): <NEW_LINE> <INDENT> KIND = "variable" <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "#<%s %s %s>" % (self.KIND, self.name, self.body.get("default", None))
|
``variable`` block. Exposes `.name` as a property.
If you're defining a variable without a type or a description (90%
of the cases in practice), it is probably easier to use
:func:`variables <p10s.terraform.variables>`.
|
6259902ed4950a0f3b11166d
|
class Container: <NEW_LINE> <INDENT> def __init__(self,saveprefix,savefolder,plotformat,overwrite): <NEW_LINE> <INDENT> self.data = {'saveprefix':saveprefix, 'savefolder':savefolder, 'plotformat':plotformat, 'overwrite':overwrite } <NEW_LINE> self._prep_container() <NEW_LINE> <DEDENT> def _prep_container(self): <NEW_LINE> <INDENT> tmp = glob.glob('*') <NEW_LINE> if self.data['savefolder'] in tmp: <NEW_LINE> <INDENT> sentinel = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sentinel = False <NEW_LINE> <DEDENT> if not sentinel: <NEW_LINE> <INDENT> os.mkdir(self.data['savefolder']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not self.data['overwrite']: <NEW_LINE> <INDENT> string = 'Folder {0} already exists. To create a fresh folder, set overwrite = True'.format(self.data['savefolder']) <NEW_LINE> warnings.warn(string) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.system('rm -r ./{0}'.format(self.data['savefolder'])) <NEW_LINE> os.mkdir(self.data['savefolder'])
|
Container is a class facilitating how the output files would be named, and packaged in a folder.
Name convention of HSTPHOT files is ./savefolder/saveprefix_suffix.ext.
suffix and ext would be chosen internally in the pipeline. If it is a graphic file such as plots, plotformat determines its ext.
overwrite, if set True, an existing folder with the same name would be removed at the beginning.
|
6259902e8c3a8732951f75b7
|
class Smoke1(module_framework.AvocadoTest): <NEW_LINE> <INDENT> def test_uname(self): <NEW_LINE> <INDENT> self.start() <NEW_LINE> self.run("uname | grep Linux") <NEW_LINE> <DEDENT> def test_echo(self): <NEW_LINE> <INDENT> self.start() <NEW_LINE> self.runHost("echo test | grep test")
|
:avocado: enable
|
6259902ed99f1b3c44d06703
|
@with_author <NEW_LINE> class PriceLevel(TimeStampedModel, models.Model): <NEW_LINE> <INDENT> pricable = models.ForeignKey( settings.PRICE_LEVEL_MODEL, verbose_name=_("Pricable"), on_delete=models.CASCADE, ) <NEW_LINE> name = models.CharField( verbose_name=_("Name"), max_length=127, ) <NEW_LINE> price = models.FloatField( verbose_name=_("Price"), validators=[ MinValueValidator(1), ], default=100, ) <NEW_LINE> category = models.CharField( verbose_name=_("Category"), max_length=20, choices=settings.PRICE_LEVEL_CATEGORY_CHOICES, default=settings.PRICE_LEVEL_CATEGORY_DEFAULT, ) <NEW_LINE> takes_effect_on = models.DateTimeField( verbose_name=_("Date, when this takes effect"), ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
|
Stores price levels.
|
6259902e6fece00bbaccca0f
|
class TestenvConfig: <NEW_LINE> <INDENT> def __init__(self, envname, config, factors, reader): <NEW_LINE> <INDENT> self.envname = envname <NEW_LINE> self.config = config <NEW_LINE> self.factors = factors <NEW_LINE> self._reader = reader <NEW_LINE> <DEDENT> def get_envbindir(self): <NEW_LINE> <INDENT> if (sys.platform == "win32" and "jython" not in self.basepython and "pypy" not in self.basepython): <NEW_LINE> <INDENT> return self.envdir.join("Scripts") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.envdir.join("bin") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def envbindir(self): <NEW_LINE> <INDENT> return self.get_envbindir() <NEW_LINE> <DEDENT> @property <NEW_LINE> def envpython(self): <NEW_LINE> <INDENT> return self.get_envpython() <NEW_LINE> <DEDENT> def get_envpython(self): <NEW_LINE> <INDENT> if "jython" in str(self.basepython): <NEW_LINE> <INDENT> name = "jython" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = "python" <NEW_LINE> <DEDENT> return self.envbindir.join(name) <NEW_LINE> <DEDENT> def get_envsitepackagesdir(self): <NEW_LINE> <INDENT> x = self.config.interpreters.get_sitepackagesdir( info=self.python_info, envdir=self.envdir) <NEW_LINE> return x <NEW_LINE> <DEDENT> @property <NEW_LINE> def python_info(self): <NEW_LINE> <INDENT> return self.config.interpreters.get_info(envconfig=self) <NEW_LINE> <DEDENT> def getsupportedinterpreter(self): <NEW_LINE> <INDENT> if sys.platform == "win32" and self.basepython and "jython" in self.basepython: <NEW_LINE> <INDENT> raise tox.exception.UnsupportedInterpreter( "Jython/Windows does not support installing scripts") <NEW_LINE> <DEDENT> info = self.config.interpreters.get_info(envconfig=self) <NEW_LINE> if not info.executable: <NEW_LINE> <INDENT> raise tox.exception.InterpreterNotFound(self.basepython) <NEW_LINE> <DEDENT> if not info.version_info: <NEW_LINE> <INDENT> raise tox.exception.InvocationError( 'Failed to get version_info for %s: %s' % (info.name, info.err)) <NEW_LINE> <DEDENT> if info.version_info < (2, 6): <NEW_LINE> <INDENT> raise tox.exception.UnsupportedInterpreter( "python2.5 is not supported anymore, sorry") <NEW_LINE> <DEDENT> return info.executable
|
Testenv Configuration object.
In addition to some core attributes/properties this config object holds all
per-testenv ini attributes as attributes, see "tox --help-ini" for an overview.
|
6259902e796e427e5384f7dc
|
class PlayerState: <NEW_LINE> <INDENT> def __init__(self, world, player_index, cards=[]): <NEW_LINE> <INDENT> self.world = world <NEW_LINE> self.player_index = player_index <NEW_LINE> self.cards = cards.copy() <NEW_LINE> self.world.n_cards[self.player_index] = len(self.cards) <NEW_LINE> <DEDENT> def _add_cards(self, cards_to_add): <NEW_LINE> <INDENT> self.cards += cards_to_add <NEW_LINE> self.world.n_cards[self.player_index] = len(self.cards) <NEW_LINE> <DEDENT> def _remove_cards(self, cards_to_remove): <NEW_LINE> <INDENT> for card in cards_to_remove: <NEW_LINE> <INDENT> self.cards.remove(card) <NEW_LINE> <DEDENT> self.world.n_cards[self.player_index] = len(self.cards) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> my_territories = self.my_territories <NEW_LINE> my_armies = sum(self.world.armies[t] for t in my_territories) <NEW_LINE> return 'PlayerState[index={}, territories={}/{}, armies={}/{}, cards={}]'.format( self.player_index, len(my_territories), self.map.n_territories, my_armies, sum(self.world.armies), len(self.cards), ) <NEW_LINE> <DEDENT> def _repr_svg_(self): <NEW_LINE> <INDENT> return _View.to_svg(_View.world_to_graph(self.world, player_index=self.player_index)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def map(self): <NEW_LINE> <INDENT> return self.world.map <NEW_LINE> <DEDENT> @property <NEW_LINE> def my_territories(self): <NEW_LINE> <INDENT> return self.world.territories_belonging_to(self.player_index)
|
The current world's state, as viewed by a specific player.
|
6259902e0a366e3fb87dda48
|
class PropertiesDifference(Difference): <NEW_LINE> <INDENT> modified_properties = DiffResultDescriptor("diff_properties") <NEW_LINE> def diff_properties(self): <NEW_LINE> <INDENT> self.modified_properties = [] <NEW_LINE> if self.left_policy.handle_unknown != self.right_policy.handle_unknown: <NEW_LINE> <INDENT> self.modified_properties.append( modified_properties_record("handle_unknown", self.right_policy.handle_unknown, self.left_policy.handle_unknown)) <NEW_LINE> <DEDENT> if self.left_policy.mls != self.right_policy.mls: <NEW_LINE> <INDENT> self.modified_properties.append( modified_properties_record("MLS", self.right_policy.mls, self.left_policy.mls)) <NEW_LINE> <DEDENT> if self.left_policy.version != self.right_policy.version: <NEW_LINE> <INDENT> self.modified_properties.append( modified_properties_record("version", self.right_policy.version, self.left_policy.version)) <NEW_LINE> <DEDENT> <DEDENT> def _reset_diff(self): <NEW_LINE> <INDENT> self.log.debug("Resetting property differences") <NEW_LINE> self.modified_properties = None
|
Determine the difference in policy properties
(unknown permissions, MLS, etc.) between two policies.
|
6259902ec432627299fa4055
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.