code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class AppErr(Exception): <NEW_LINE> <INDENT> pass
|
An exception for all application errors.
|
625990410fa83653e46f617f
|
class PathError(FileSysObjectsError): <NEW_LINE> <INDENT> pass
|
Path error.
|
62599041b57a9660fecd2d20
|
class DurationEdit(QtWidgets.QFormLayout): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.hours_widget = QtWidgets.QSpinBox() <NEW_LINE> self.addRow("Hours", self.hours_widget) <NEW_LINE> self.minutes_widget = QtWidgets.QSpinBox() <NEW_LINE> self.minutes_widget.setRange(0, 59) <NEW_LINE> self.addRow("Minutes", self.minutes_widget) <NEW_LINE> self.seconds_widget = QtWidgets.QDoubleSpinBox() <NEW_LINE> self.seconds_widget.setRange(0, 59.99) <NEW_LINE> self.addRow("Seconds", self.seconds_widget) <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> return timedelta( hours=self.hours_widget.value(), minutes=self.minutes_widget.value(), seconds=self.seconds_widget.value(), ) <NEW_LINE> <DEDENT> def set_value(self, new: timedelta): <NEW_LINE> <INDENT> hours, minutes, seconds = times.hours_minutes_seconds(new) <NEW_LINE> self.hours_widget.setValue(hours) <NEW_LINE> self.minutes_widget.setValue(minutes) <NEW_LINE> self.seconds_widget.setValue(seconds)
|
A widget to allow editing an hours minute seconds duration.
|
6259904121bff66bcd723f0f
|
class OUNoise: <NEW_LINE> <INDENT> def __init__( self, size: int, mu: float = 0.0, theta: float = 0.15, sigma: float = 0.2 ): <NEW_LINE> <INDENT> self.state = np.float64(0.0) <NEW_LINE> self.mu = mu * np.ones(size) <NEW_LINE> self.theta = theta <NEW_LINE> self.sigma = sigma <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.state = copy.copy(self.mu) <NEW_LINE> <DEDENT> def sample(self) -> float: <NEW_LINE> <INDENT> x = self.state <NEW_LINE> dx = self.theta * (self.mu - x) + self.sigma * np.array( [random.random() for _ in range(len(x))] ) <NEW_LINE> self.state = x + dx <NEW_LINE> return self.state
|
Ornstein-Uhlenbeck process.
Taken from Udacity deep-reinforcement-learning github repository:
https://github.com/udacity/deep-reinforcement-learning/blob/master/
ddpg-pendulum/ddpg_agent.py
|
625990411f5feb6acb163e98
|
class LongGapAGNMetric(BaseMetric): <NEW_LINE> <INDENT> def __init__(self, metricName='longGapAGNMetric', mjdcol='observationStartMJD', units='days', xgaps=10, badval=-666, **kwargs): <NEW_LINE> <INDENT> cols = [mjdcol] <NEW_LINE> super(LongGapAGNMetric, self).__init__(cols, metricName, units=units, **kwargs) <NEW_LINE> self.badval = badval <NEW_LINE> self.mjdcol = mjdcol <NEW_LINE> self.xgaps = xgaps <NEW_LINE> self.units = units <NEW_LINE> <DEDENT> def run(self, dataslice, slicePoint=None): <NEW_LINE> <INDENT> metricval = np.diff(dataslice[self.mjdcol]) <NEW_LINE> return metricval <NEW_LINE> <DEDENT> def reduceMaxGap(self, metricval): <NEW_LINE> <INDENT> if metricval.size > 0: <NEW_LINE> <INDENT> result = np.max(metricval) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self.badval <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def reduceAverageLongestXGaps(self, metricval): <NEW_LINE> <INDENT> if np.size(metricval)-self.xgaps > 0: <NEW_LINE> <INDENT> return np.average(np.sort(metricval)[np.size(metricval)-self.xgaps:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.badval
|
max delta-t and average of the top-10 longest gaps.
|
6259904130c21e258be99ab2
|
class FuelTypes (models.Model): <NEW_LINE> <INDENT> class Meta (object): <NEW_LINE> <INDENT> db_table = 'fuel_types' <NEW_LINE> <DEDENT> id = models.AutoField (primary_key = True) <NEW_LINE> name = models.CharField (max_length = 30, null = False) <NEW_LINE> comment = models.CharField (max_length = 100) <NEW_LINE> def __str__ (self): <NEW_LINE> <INDENT> return self.name
|
Типы топлива
|
6259904124f1403a9268621f
|
class EdgeError(GraphError): <NEW_LINE> <INDENT> def __init__(self, edge): <NEW_LINE> <INDENT> self.edge = edge
|
Consider raising any child of :class:`EdgeError` instead.
|
625990418da39b475be04493
|
class Binding(object): <NEW_LINE> <INDENT> def __init__(self, variable, constant): <NEW_LINE> <INDENT> super(Binding, self).__init__() <NEW_LINE> self.variable = variable <NEW_LINE> self.constant = constant <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Binding({!r}, {!r})'.format(self.variable, self.constant) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.variable.element.upper() + " : " + self.constant.element
|
Represents a binding of a constant to a variable, e.g. 'Nosliw' might be
bound to'?d'
Attributes:
variable (Variable): The name of the variable associated with this binding
constant (Constant): The value of the variable
|
6259904123e79379d538d7a4
|
class GetAccountInfoResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'request_id': 'str', 'took': 'float', 'data': 'AccountInfo' } <NEW_LINE> attribute_map = { 'request_id': 'requestId', 'took': 'took', 'data': 'data' } <NEW_LINE> def __init__(self, request_id=None, took=0.0, data=None): <NEW_LINE> <INDENT> self._request_id = None <NEW_LINE> self._took = None <NEW_LINE> self._data = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.request_id = request_id <NEW_LINE> self.took = took <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def request_id(self): <NEW_LINE> <INDENT> return self._request_id <NEW_LINE> <DEDENT> @request_id.setter <NEW_LINE> def request_id(self, request_id): <NEW_LINE> <INDENT> if request_id is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `request_id`, must not be `None`") <NEW_LINE> <DEDENT> self._request_id = request_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def took(self): <NEW_LINE> <INDENT> return self._took <NEW_LINE> <DEDENT> @took.setter <NEW_LINE> def took(self, took): <NEW_LINE> <INDENT> if took is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `took`, must not be `None`") <NEW_LINE> <DEDENT> self._took = took <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, GetAccountInfoResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259904107f4c71912bb06d7
|
class StdOut: <NEW_LINE> <INDENT> def __init__(self, stream, modulenames): <NEW_LINE> <INDENT> self.stdout = stream <NEW_LINE> self.modulenames = modulenames <NEW_LINE> <DEDENT> def __getattr__(self, attribute): <NEW_LINE> <INDENT> if attribute not in self.__dict__ or attribute == '__doc__': <NEW_LINE> <INDENT> return getattr(self.stdout, attribute) <NEW_LINE> <DEDENT> return self.__dict__[attribute] <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def write(self, inline): <NEW_LINE> <INDENT> frame = inspect.currentframe().f_back <NEW_LINE> if frame: <NEW_LINE> <INDENT> mod = frame.f_globals.get('__name__') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mod = sys._getframe(0).f_globals.get('__name__') <NEW_LINE> <DEDENT> if not mod in self.modulenames: <NEW_LINE> <INDENT> self.stdout.write(inline) <NEW_LINE> <DEDENT> <DEDENT> def writelines(self, inline): <NEW_LINE> <INDENT> for line in inline: <NEW_LINE> <INDENT> self.write(line)
|
Filter stdout or stderr from specific modules
So far this is just used for pkg_resources
|
62599041d164cc617582221c
|
class Region(MPTTModel): <NEW_LINE> <INDENT> parent = TreeForeignKey( to='self', on_delete=models.CASCADE, related_name='children', blank=True, null=True, db_index=True ) <NEW_LINE> name = models.CharField( max_length=50, unique=True ) <NEW_LINE> slug = models.SlugField( unique=True ) <NEW_LINE> class MPTTMeta: <NEW_LINE> <INDENT> order_insertion_by = ['name'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return "{}?region={}".format(reverse('organisation:location_list'), self.slug)
|
Country/Regin/City where place location. Every instance of this class can be parent for the other one
|
625990414e696a045264e774
|
class GetVideoByReadableIDResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
|
Retrieve the value for the "Response" output from this choreography execution. ((json) The response from Khan Academy.)
|
62599041b5575c28eb71361c
|
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> alpha = float("-inf") <NEW_LINE> beta = float("inf") <NEW_LINE> def max_value(gameState, depth, alpha, beta): <NEW_LINE> <INDENT> actions = gameState.getLegalActions(0) <NEW_LINE> if not actions or depth < 0: <NEW_LINE> <INDENT> return (self.evaluationFunction(gameState), None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = float("-inf") <NEW_LINE> best_action = None <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> temp = min_value(1, gameState.generateSuccessor(0, action), depth, alpha, beta)[0] <NEW_LINE> if v < temp: <NEW_LINE> <INDENT> v = temp <NEW_LINE> best_action = action <NEW_LINE> <DEDENT> if v > beta: <NEW_LINE> <INDENT> return (v, best_action) <NEW_LINE> <DEDENT> alpha = max(alpha, v) <NEW_LINE> <DEDENT> return (v, best_action) <NEW_LINE> <DEDENT> <DEDENT> def min_value(agentIndex, gameState, depth, alpha, beta): <NEW_LINE> <INDENT> actions = gameState.getLegalActions(agentIndex) <NEW_LINE> if not actions or depth < 0: <NEW_LINE> <INDENT> return (self.evaluationFunction(gameState), None) <NEW_LINE> <DEDENT> next_agent = (agentIndex + 1) % gameState.getNumAgents() <NEW_LINE> v = float("inf") <NEW_LINE> best_action = None <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> temp = next_value(next_agent, gameState.generateSuccessor(agentIndex, action), depth, alpha, beta)[0] <NEW_LINE> if v > temp: <NEW_LINE> <INDENT> v = temp <NEW_LINE> best_action = action <NEW_LINE> <DEDENT> if v < alpha: <NEW_LINE> <INDENT> return (v, best_action) <NEW_LINE> <DEDENT> beta = min(beta, v) <NEW_LINE> <DEDENT> return (v, best_action) <NEW_LINE> <DEDENT> def next_value(agentIndex, gameState, depth, alpha, beta): <NEW_LINE> <INDENT> if agentIndex == 0: <NEW_LINE> <INDENT> return max_value(gameState, depth - 1, alpha, beta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return min_value(agentIndex, gameState, depth, alpha, beta) <NEW_LINE> <DEDENT> <DEDENT> return next_value(self.index, gameState, self.depth, alpha, beta)[1]
|
Your minimax agent with alpha-beta pruning (question 3)
|
6259904163b5f9789fe86411
|
class KMP: <NEW_LINE> <INDENT> def __init__(self,pattern,text): <NEW_LINE> <INDENT> self.pattern = pattern <NEW_LINE> self.text = text <NEW_LINE> self._prefix = [] <NEW_LINE> <DEDENT> def build_prefix(self): <NEW_LINE> <INDENT> pattern = self.pattern <NEW_LINE> m = len(pattern) <NEW_LINE> p = [None]*m <NEW_LINE> p[0] = 0 <NEW_LINE> k = 0 <NEW_LINE> for i in range(1,m): <NEW_LINE> <INDENT> while k > 0 and pattern[i] != pattern[k]: <NEW_LINE> <INDENT> k = p[k-1] <NEW_LINE> <DEDENT> if pattern[k] == pattern[i]: <NEW_LINE> <INDENT> k = k+1 <NEW_LINE> <DEDENT> p[i] = k <NEW_LINE> <DEDENT> self._prefix = p <NEW_LINE> <DEDENT> def match(self): <NEW_LINE> <INDENT> results = [] <NEW_LINE> pattern = self.pattern <NEW_LINE> text = self.text <NEW_LINE> m = len(self.pattern) <NEW_LINE> n = len(self.text) <NEW_LINE> p = self._prefix <NEW_LINE> k = 0 <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> while k > 0 and text[i] != pattern[k]: <NEW_LINE> <INDENT> k = p[k-1] <NEW_LINE> <DEDENT> if pattern[k] == text[i]: <NEW_LINE> <INDENT> k = k+1 <NEW_LINE> <DEDENT> if k == m: <NEW_LINE> <INDENT> results.append(i-m+1) <NEW_LINE> k = p[k-1] <NEW_LINE> <DEDENT> <DEDENT> return results
|
KMP(text,pattern) -> find all pattern matches in text
Atributes:
text Text
pattern Pattern
prefix Prefix function for pattern
|
6259904123849d37ff852360
|
class SleepIQData(object): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self.beds = {} <NEW_LINE> self.update() <NEW_LINE> <DEDENT> @Throttle(MIN_TIME_BETWEEN_UPDATES) <NEW_LINE> def update(self): <NEW_LINE> <INDENT> self._client.login() <NEW_LINE> beds = self._client.beds_with_sleeper_status() <NEW_LINE> self.beds = {bed.bed_id: bed for bed in beds}
|
Gets the latest data from SleepIQ.
|
62599041507cdc57c63a6042
|
class HelperTests(TestCase): <NEW_LINE> <INDENT> skip = cryptSkip or dependencySkip <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.mockos = MockOS() <NEW_LINE> <DEDENT> def test_verifyCryptedPassword(self): <NEW_LINE> <INDENT> password = 'secret string' <NEW_LINE> salt = 'salty' <NEW_LINE> crypted = crypt.crypt(password, salt) <NEW_LINE> self.assertTrue( checkers.verifyCryptedPassword(crypted, password), '%r supposed to be valid encrypted password for %r' % ( crypted, password)) <NEW_LINE> <DEDENT> def test_verifyCryptedPasswordMD5(self): <NEW_LINE> <INDENT> password = 'password' <NEW_LINE> salt = '$1$salt' <NEW_LINE> crypted = crypt.crypt(password, salt) <NEW_LINE> self.assertTrue( checkers.verifyCryptedPassword(crypted, password), '%r supposed to be valid encrypted password for %s' % ( crypted, password)) <NEW_LINE> <DEDENT> def test_refuteCryptedPassword(self): <NEW_LINE> <INDENT> password = 'string secret' <NEW_LINE> wrong = 'secret string' <NEW_LINE> crypted = crypt.crypt(password, password) <NEW_LINE> self.assertFalse( checkers.verifyCryptedPassword(crypted, wrong), '%r not supposed to be valid encrypted password for %s' % ( crypted, wrong)) <NEW_LINE> <DEDENT> def test_pwdGetByName(self): <NEW_LINE> <INDENT> userdb = UserDatabase() <NEW_LINE> userdb.addUser( 'alice', 'secrit', 1, 2, 'first last', '/foo', '/bin/sh') <NEW_LINE> self.patch(checkers, 'pwd', userdb) <NEW_LINE> self.assertEqual( checkers._pwdGetByName('alice'), userdb.getpwnam('alice')) <NEW_LINE> <DEDENT> def test_pwdGetByNameWithoutPwd(self): <NEW_LINE> <INDENT> self.patch(checkers, 'pwd', None) <NEW_LINE> self.assertIsNone(checkers._pwdGetByName('alice')) <NEW_LINE> <DEDENT> def test_shadowGetByName(self): <NEW_LINE> <INDENT> userdb = ShadowDatabase() <NEW_LINE> userdb.addUser('bob', 'passphrase', 1, 2, 3, 4, 5, 6, 7) <NEW_LINE> self.patch(checkers, 'spwd', userdb) <NEW_LINE> self.mockos.euid = 2345 <NEW_LINE> self.mockos.egid = 1234 <NEW_LINE> self.patch(util, 'os', self.mockos) <NEW_LINE> self.assertEqual( checkers._shadowGetByName('bob'), userdb.getspnam('bob')) <NEW_LINE> self.assertEqual(self.mockos.seteuidCalls, [0, 2345]) <NEW_LINE> self.assertEqual(self.mockos.setegidCalls, [0, 1234]) <NEW_LINE> <DEDENT> def test_shadowGetByNameWithoutSpwd(self): <NEW_LINE> <INDENT> self.patch(checkers, 'spwd', None) <NEW_LINE> self.assertIsNone(checkers._shadowGetByName('bob')) <NEW_LINE> self.assertEqual(self.mockos.seteuidCalls, []) <NEW_LINE> self.assertEqual(self.mockos.setegidCalls, [])
|
Tests for helper functions L{verifyCryptedPassword}, L{_pwdGetByName} and
L{_shadowGetByName}.
|
625990411d351010ab8f4dc4
|
class TemporaryFilter(QObject): <NEW_LINE> <INDENT> def __init__(self, filepath): <NEW_LINE> <INDENT> super(TemporaryFilter, self).__init__() <NEW_LINE> filepath = os.path.abspath(filepath) <NEW_LINE> if not os.path.isfile(filepath): <NEW_LINE> <INDENT> raise IOError("Assertion Error: os.path.isfile(filepath)") <NEW_LINE> <DEDENT> self.filepath = filepath <NEW_LINE> <DEDENT> def eventFilter(self, obj, event): <NEW_LINE> <INDENT> def is_colors_dialog(): <NEW_LINE> <INDENT> return isinstance( obj, QDialog) and 'IDA Colors' in obj.windowTitle() <NEW_LINE> <DEDENT> if isinstance(event, QShowEvent) and is_colors_dialog(): <NEW_LINE> <INDENT> qApp.removeEventFilter(self) <NEW_LINE> obj.windowHandle().setOpacity(0) <NEW_LINE> buttons = [widget for widget in obj.children() if isinstance( widget, QDialogButtonBox)][0] <NEW_LINE> button = [widget for widget in buttons.buttons() if widget.text() == '&Import'][0] <NEW_LINE> with NativeHook(ask_file=self.ask_file_handler): <NEW_LINE> <INDENT> button.click() <NEW_LINE> <DEDENT> QTimer.singleShot(0, lambda: obj.accept()) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def ask_file_handler(self): <NEW_LINE> <INDENT> return create_string_buffer(self.filepath)
|
Temporary event filter installed at qApp to catch events
while executing QDialog::exec.
The filter automatically clicks &Import button,
and automatically selects file by using native ui hooks.
|
62599041cad5886f8bdc59d0
|
class NotInstalled(PloneSite): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUp(cls): <NEW_LINE> <INDENT> fiveconfigure.debug_mode = True <NEW_LINE> import quintagroup.captcha.core <NEW_LINE> import quintagroup.plonecaptchas <NEW_LINE> zcml.load_config('configure.zcml', quintagroup.captcha.core) <NEW_LINE> zcml.load_config('configure.zcml', quintagroup.plonecaptchas) <NEW_LINE> fiveconfigure.debug_mode = False <NEW_LINE> ztc.installPackage('quintagroup.captcha.core') <NEW_LINE> ztc.installPackage('quintagroup.plonecaptchas')
|
Only package register, without installation into portal
|
6259904129b78933be26aa16
|
class VcsCfg(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.b_key = "vcs-cfg" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.vcs_status = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
|
This class does not support CRUD Operations please use parent.
:param vcs_status: {"default": 0, "type": "number", "description": "Display VCS status in prompt, eg. vMaster, vBlade", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
|
6259904123e79379d538d7a5
|
class LSTMEncoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_features, hidden_size: int = 200, batch_first: bool = True, bidirectional: bool = True): <NEW_LINE> <INDENT> super(LSTMEncoder, self).__init__() <NEW_LINE> self.lstm = nn.LSTM(in_features, hidden_size, batch_first=batch_first, bidirectional=bidirectional) <NEW_LINE> <DEDENT> def forward(self, x, mask, lengths): <NEW_LINE> <INDENT> packed_sequence = pack_padded_sequence(x, lengths, batch_first=True) <NEW_LINE> outputs, (hx, cx) = self.lstm(packed_sequence) <NEW_LINE> outputs, _ = pad_packed_sequence(outputs, batch_first=True) <NEW_LINE> if self.lstm.bidirectional: <NEW_LINE> <INDENT> final = torch.cat([hx[-2], hx[-1]], dim=-1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> final = hx[-1] <NEW_LINE> <DEDENT> return outputs, final
|
This module encodes a sequence into a single vector using an LSTM.
|
6259904126068e7796d4dbed
|
class EventCRCError(Exception): <NEW_LINE> <INDENT> pass
|
Error for when a event hass a mismatched CRC.
|
6259904115baa72349463238
|
class ClusterModule(MultyvacModule): <NEW_LINE> <INDENT> def get(self, id): <NEW_LINE> <INDENT> r = self.multyvac._ask(Multyvac._ASK_GET, '/cluster/%s' % id) <NEW_LINE> c = r['cluster'] <NEW_LINE> if c['requested_at']: <NEW_LINE> <INDENT> c['requested_at'] = MultyvacModule.convert_str_to_datetime(c['requested_at']) <NEW_LINE> <DEDENT> if c['provisioned_at']: <NEW_LINE> <INDENT> c['provisioned_at'] = MultyvacModule.convert_str_to_datetime(c['provisioned_at']) <NEW_LINE> <DEDENT> if c['released_at']: <NEW_LINE> <INDENT> c['released_at'] = MultyvacModule.convert_str_to_datetime(c['released_at']) <NEW_LINE> <DEDENT> return Cluster(multyvac=self.multyvac, **r['cluster']) <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> r = self.multyvac._ask(Multyvac._ASK_GET, '/cluster') <NEW_LINE> for c in r['clusters']: <NEW_LINE> <INDENT> if c['requested_at']: <NEW_LINE> <INDENT> c['requested_at'] = MultyvacModule.convert_str_to_datetime(c['requested_at']) <NEW_LINE> <DEDENT> if c['provisioned_at']: <NEW_LINE> <INDENT> c['provisioned_at'] = MultyvacModule.convert_str_to_datetime(c['provisioned_at']) <NEW_LINE> <DEDENT> if c['released_at']: <NEW_LINE> <INDENT> c['released_at'] = MultyvacModule.convert_str_to_datetime(c['released_at']) <NEW_LINE> <DEDENT> <DEDENT> return [Cluster(multyvac=self.multyvac, **cluster) for cluster in r['clusters']] <NEW_LINE> <DEDENT> def provision(self, core, core_count, max_duration=None): <NEW_LINE> <INDENT> cluster = {'core': core, 'core_count': core_count, 'max_duration': max_duration, } <NEW_LINE> MultyvacModule.clear_null_entries(cluster) <NEW_LINE> payload = {'cluster': cluster} <NEW_LINE> headers = {'content-type': 'application/json'} <NEW_LINE> r = self.multyvac._ask(Multyvac._ASK_POST, '/cluster', data=json.dumps(payload), headers=headers) <NEW_LINE> return r['id']
|
Top-level Cluster module. Use this through ``multyvac.cluster``.
|
62599041a79ad1619776b326
|
class LoanRequestForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = LoanRequest <NEW_LINE> fields = ['dni', 'first_name', 'last_name', 'email', 'gender', 'loan_amount'] <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super(LoanRequestForm, self).clean() <NEW_LINE> email = cleaned_data.get('email') <NEW_LINE> first_name = cleaned_data.get('first_name') <NEW_LINE> last_name = cleaned_data.get('last_name') <NEW_LINE> gender = cleaned_data.get('gender') <NEW_LINE> dni = cleaned_data.get('dni') <NEW_LINE> loan_amount = cleaned_data.get('loan_amount') <NEW_LINE> if not all([email, first_name, last_name, gender, dni, loan_amount]): <NEW_LINE> <INDENT> raise ValidationError('All fields are mandatory!')
|
Loan request form.
|
6259904130c21e258be99ab4
|
class Bundle(FileWrapperMixin): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def from_file(file_obj): <NEW_LINE> <INDENT> if not isinstance(file_obj, File): <NEW_LINE> <INDENT> file_obj = File(file_obj, binary=False) <NEW_LINE> <DEDENT> return Bundle(file_obj) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fromFile(file_obj): <NEW_LINE> <INDENT> warnings.warn("Use from_file function", DeprecationWarning) <NEW_LINE> return Bundle.from_file(file_obj) <NEW_LINE> <DEDENT> def package_specs(self): <NEW_LINE> <INDENT> self._file.seek(0) <NEW_LINE> bundle = None <NEW_LINE> try: <NEW_LINE> <INDENT> bundle = json.load(self._file) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if bundle is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> bundle = yaml.safe_load(self._file) <NEW_LINE> <DEDENT> except yaml.error.YAMLError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if bundle is None or 'Packages' not in bundle: <NEW_LINE> <INDENT> raise ValueError("Can't parse bundle contents") <NEW_LINE> <DEDENT> for package in bundle['Packages']: <NEW_LINE> <INDENT> if 'Name' not in package: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> yield package <NEW_LINE> <DEDENT> <DEDENT> def packages(self, base_url='', path=None): <NEW_LINE> <INDENT> for package in self.package_specs(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pkg_obj = Package.from_location( package['Name'], version=package.get('Version'), url=package.get('Url'), path=path, base_url=base_url, ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.error("Error {0} occurred while obtaining " "package {1}".format(e, package['Name'])) <NEW_LINE> continue <NEW_LINE> <DEDENT> yield pkg_obj
|
Represents murano bundle contents.
|
62599041d99f1b3c44d06944
|
class OperationParametersWidget(QWidget): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(OperationParametersWidget, self).__init__(parent=parent) <NEW_LINE> <DEDENT> @property <NEW_LINE> def params(self): <NEW_LINE> <INDENT> return {}
|
A base class for widgets that specify operation parameters.
|
62599041baa26c4b54d50550
|
class GoogleActionsSyncView(OpenPeerPowerView): <NEW_LINE> <INDENT> url = "/api/cloud/google_actions/sync" <NEW_LINE> name = "api:cloud:google_actions/sync" <NEW_LINE> @_handle_cloud_errors <NEW_LINE> async def post(self, request): <NEW_LINE> <INDENT> opp = request.app["opp"] <NEW_LINE> cloud: Cloud = opp.data[DOMAIN] <NEW_LINE> gconf = await cloud.client.get_google_config() <NEW_LINE> status = await gconf.async_sync_entities(gconf.agent_user_id) <NEW_LINE> return self.json({}, status_code=status)
|
Trigger a Google Actions Smart Home Sync.
|
6259904115baa72349463239
|
class AuditEvent(rdf_structs.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = jobs_pb2.AuditEvent <NEW_LINE> rdf_deps = [ rdfvalue.RDFDatetime, rdfvalue.RDFURN, ] <NEW_LINE> def __init__(self, initializer=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(initializer=initializer, **kwargs) <NEW_LINE> if not self.id: <NEW_LINE> <INDENT> self.id = random.UInt32() <NEW_LINE> <DEDENT> if not self.timestamp: <NEW_LINE> <INDENT> self.timestamp = rdfvalue.RDFDatetime.Now()
|
An RDF wrapper for the `AuditEvent` protobuf.
|
625990416fece00bbacccc58
|
class SpecialMixerComponent(MixerComponent): <NEW_LINE> <INDENT> def _create_strip(self): <NEW_LINE> <INDENT> return SelectChanStripComponent()
|
Class encompassing several selecting channel strips to form a mixer
|
62599041097d151d1a2c230f
|
class CXRDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, image_dir, mask_dir,type="train",split_ratio=0.2, transform=None): <NEW_LINE> <INDENT> self.image_dir = image_dir <NEW_LINE> self.mask_dir = mask_dir <NEW_LINE> self.transform = transform <NEW_LINE> self.masks = os.listdir(mask_dir) <NEW_LINE> if type=="train": <NEW_LINE> <INDENT> self.masks = self.masks[:int(len(self.masks)*(1-split_ratio))] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.masks = self.masks[int(len(self.masks)*(1-split_ratio)):] <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.masks) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> mask_path = os.path.join(self.mask_dir, self.masks[index]) <NEW_LINE> img_path = os.path.join(self.image_dir, self.masks[index].replace("_mask.png", ".png")) <NEW_LINE> image = np.array(Image.open(img_path).convert("RGB")) <NEW_LINE> mask = np.array(Image.open(mask_path).convert("L"), dtype=np.float32) <NEW_LINE> mask[mask == 255.0] = 1.0 <NEW_LINE> if self.transform is not None: <NEW_LINE> <INDENT> augmentations = self.transform(image=image, mask=mask) <NEW_LINE> image = augmentations["image"] <NEW_LINE> mask = augmentations["mask"] <NEW_LINE> <DEDENT> return image, mask
|
CAUTON: Some masks of the images from img_dir are missing. Hence, only processing those images whose masks are available
|
62599041d99f1b3c44d06945
|
class I4pProfile(UserenaLanguageBaseProfile, AskbotBaseProfile): <NEW_LINE> <INDENT> GENDER_TYPE = ( ('M', _('male')), ('F', _('female')) ) <NEW_LINE> user = models.ForeignKey(User, related_name='profile') <NEW_LINE> gender = models.CharField(max_length=1, choices=GENDER_TYPE, null=True, blank=True) <NEW_LINE> motto = models.TextField(_("motto"), null=True, blank=True) <NEW_LINE> about = models.TextField(_("about"), null=True, blank=True) <NEW_LINE> birthday = models.DateField(_("birthday"), null=True, blank=True) <NEW_LINE> website = models.URLField(verbose_name=_('website'), max_length=200, blank=True) <NEW_LINE> linkedin = models.URLField(verbose_name=_('linkedin'), max_length=200, blank=True) <NEW_LINE> twitter = models.URLField(verbose_name=_('twitter'), max_length=200, blank=True) <NEW_LINE> facebook = models.URLField(verbose_name=_('facebook'), max_length=200, blank=True) <NEW_LINE> address = models.TextField(_("address"), null=True, blank=True) <NEW_LINE> country = CountryField(_("country"), null=True, blank=True, choices=I4P_COUNTRIES) <NEW_LINE> registration_site = models.ForeignKey(Site, verbose_name=_("registration site"), default=1) <NEW_LINE> location = models.OneToOneField(Location, verbose_name=_('location'), null=True, blank=True) <NEW_LINE> @models.permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('userena_profile_detail', [self.user.username])
|
Userena Profile with language switch
|
6259904196565a6dacd2d8de
|
class DrilldownExecutionTest(unittest.TestCase): <NEW_LINE> <INDENT> dir_path = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> with open("./cpi/events/power8.yaml") as f: <NEW_LINE> <INDENT> clean_output = [] <NEW_LINE> events_list = f.read().replace(" ", "").splitlines() <NEW_LINE> for event in events_list: <NEW_LINE> <INDENT> if 'PM' in event: <NEW_LINE> <INDENT> clean_output.append(event.split(':')[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def execution_test(self): <NEW_LINE> <INDENT> self.assertTrue(len(self.clean_output) == 46) <NEW_LINE> if core.cmdexists('cpi'): <NEW_LINE> <INDENT> for event in self.clean_output: <NEW_LINE> <INDENT> status = core.execute('cpi drilldown -e ' + event + " /bin/sleep 1")
|
Class to test drilldown execution
|
625990413c8af77a43b68890
|
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class Level(): <NEW_LINE> <INDENT> def __init__(self, country_id=0): <NEW_LINE> <INDENT> self.product = self.factory_method() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def factory_method(self): <NEW_LINE> <INDENT> pass
|
Declare the factory method, which returns an object of type Product.
Creator may also define a default implementation of the factory
method that returns a default ConcreteProduct object.
Call the factory method to create a Product object.
|
6259904163b5f9789fe86413
|
class Station(Producer): <NEW_LINE> <INDENT> key_schema = avro.load(f"{Path(__file__).parents[0]}/schemas/arrival_key.json") <NEW_LINE> value_schema = avro.load(f"{Path(__file__).parents[0]}/schemas/arrival_value.json") <NEW_LINE> def __init__(self, station_id, name, color, direction_a=None, direction_b=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> station_name = ( self.name.lower() .replace("/", "_and_") .replace(" ", "_") .replace("-", "_") .replace("'", "") ) <NEW_LINE> topic_name = f"org.chicago.cta.station.arrivals.{station_name}" <NEW_LINE> super().__init__( topic_name, key_schema=Station.key_schema, value_schema=Station.value_schema, num_partitions=1, num_replicas=1, ) <NEW_LINE> self.station_id = int(station_id) <NEW_LINE> self.color = color <NEW_LINE> self.dir_a = direction_a <NEW_LINE> self.dir_b = direction_b <NEW_LINE> self.a_train = None <NEW_LINE> self.b_train = None <NEW_LINE> self.turnstile = Turnstile(self) <NEW_LINE> <DEDENT> def run(self, train, direction, prev_station_id, prev_direction): <NEW_LINE> <INDENT> self.producer.produce( topic=self.topic_name, key={"timestamp": self.time_millis()}, value=dict( station_id=self.station_id, train_id=train.train_id, direction=direction, line=self.color.name, train_status=train.status.name, prev_stations_id=prev_station_id, prev_direction=prev_direction, ), ) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Station | {:^5} | {:<30} | Direction A: | {:^5} | departing to {:<30} | Direction B: | {:^5} | departing to {:<30} | ".format( self.station_id, self.name, self.a_train.train_id if self.a_train is not None else "---", self.dir_a.name if self.dir_a is not None else "---", self.b_train.train_id if self.b_train is not None else "---", self.dir_b.name if self.dir_b is not None else "---", ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def arrive_a(self, train, prev_station_id, prev_direction): <NEW_LINE> <INDENT> self.a_train = train <NEW_LINE> self.run(train, "a", prev_station_id, prev_direction) <NEW_LINE> <DEDENT> def arrive_b(self, train, prev_station_id, prev_direction): <NEW_LINE> <INDENT> self.b_train = train <NEW_LINE> self.run(train, "b", prev_station_id, prev_direction) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.turnstile.close() <NEW_LINE> super(Station, self).close()
|
Defines a single station
|
6259904123849d37ff852362
|
class StaticShipsProvider(ShipsProvider): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> ShipsProvider.__init__(self, config) <NEW_LINE> self._ships = dict( (k, entities.Ship( k, ip=v['ip'], endpoint=v.get('endpoint'), docker_port=self._from_ship_or_defaults(v, 'docker_port'), socket_path=self._from_ship_or_defaults(v, 'socket_path'), ssh_tunnel=self._from_ship_or_defaults(v, 'ssh_tunnel'), timeout=self._from_ship_or_defaults(v, 'timeout'), tls=v.get('tls', False), tls_cert=v.get('tls_cert', None), tls_key=v.get('tls_key', None), tls_verify=v.get('tls_verify', False), tls_ca_cert=v.get('tls_ca_cert', None), ssl_version=v.get('ssl_version', None))) for k, v in self._config['ships'].items()) <NEW_LINE> <DEDENT> def ships(self): <NEW_LINE> <INDENT> return self._ships
|
Static ship provider.
Provides a set of ships defined by static configuration in the 'ships' YAML
map. This is the default provider and most commonly used. It is also the
original way of defining ships in Maestro.
|
625990416e29344779b018fa
|
class MeraMusicalEntity(MusicalEntity): <NEW_LINE> <INDENT> def __init__(self, labelled_canonical): <NEW_LINE> <INDENT> if TYPE_SET not in self.__dict__: <NEW_LINE> <INDENT> self.__dict__[TYPE_SET] = set() <NEW_LINE> <DEDENT> if MeraMusicalEntity not in self.__dict__[TYPE_SET]: <NEW_LINE> <INDENT> self.__dict__[TYPE_SET].add(MeraMusicalEntity) <NEW_LINE> super(MeraMusicalEntity, self).__init__(canonical=labelled_canonical[0]) <NEW_LINE> self._labelled_canonical = labelled_canonical <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def canonical_source(self): <NEW_LINE> <INDENT> return self._labelled_canonical[1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def canonical_tuple(self): <NEW_LINE> <INDENT> return self._labelled_canonical <NEW_LINE> <DEDENT> @property <NEW_LINE> def identifying_form_tuples(self): <NEW_LINE> <INDENT> yield self._labelled_canonical <NEW_LINE> <DEDENT> @property <NEW_LINE> def identifying_forms(self): <NEW_LINE> <INDENT> yield self._canonical
|
Abstract. Every musical entity should inherit from this in order to have a canonical name and source
|
6259904176d4e153a661dbc8
|
class MultitenantOrgFilter(admin.RelatedFieldListFilter): <NEW_LINE> <INDENT> multitenant_lookup = 'pk__in' <NEW_LINE> def field_choices(self, field, request, model_admin): <NEW_LINE> <INDENT> if request.user.is_superuser: <NEW_LINE> <INDENT> return super(MultitenantOrgFilter, self).field_choices(field, request, model_admin) <NEW_LINE> <DEDENT> organizations = request.user.organizations_pk <NEW_LINE> return field.get_choices(include_blank=False, limit_choices_to={self.multitenant_lookup: organizations})
|
Admin filter that shows only organizations the current
user is associated with in its available choices
|
62599041cad5886f8bdc59d1
|
class SimpleTextQuestionViewReviewer(QuestionViewReviewer): <NEW_LINE> <INDENT> name = 'simple_text_question' <NEW_LINE> template_name = 'revisor/simple_text_question.html'
|
SimpleTextQuestionViewReviewer class represents the implementation of the reviewer view for a question object
This view is built to be extended from the different question types of the Derb system
By itself, this view shows the simple question created by the template administrator, including
the question text and help set by the user. Additionally, if a responsable user has answered the question,
it shows the answer text and annotations provided. Finally, if one or more reviewer users applied
observations to the question, it shows such observations.
.. note::
* Extends from the Question class, so if you want to take a look to the extended methods and attributes,
you can find it in :mod:`report_builder.Question.QuestionView.Question`
|
6259904129b78933be26aa17
|
class RunMessage(Base): <NEW_LINE> <INDENT> __tablename__ = 'run_log' <NEW_LINE> run_id = Column( String(32), ForeignKey('workflow_run.run_id'), primary_key=True ) <NEW_LINE> pos = Column(Integer, primary_key=True) <NEW_LINE> message = Column(Text, nullable=False) <NEW_LINE> run = relationship('RunObject', back_populates='log')
|
Log for messages created by workflow runs. Primarily used for error
messages by now.
|
6259904123e79379d538d7a7
|
@pytest.mark.ckan_config("ckan.plugins", "example_iresourcecontroller") <NEW_LINE> @pytest.mark.usefixtures("clean_db", "with_plugins", "with_request_context") <NEW_LINE> class TestExampleIResourceController(object): <NEW_LINE> <INDENT> def test_resource_controller_plugin_create(self): <NEW_LINE> <INDENT> user = factories.Sysadmin() <NEW_LINE> package = factories.Dataset(user=user) <NEW_LINE> plugin = ckan.plugins.get_plugin("example_iresourcecontroller") <NEW_LINE> res = helpers.call_action( "resource_create", package_id=package["id"], name="test-resource", url="http://resource.create/", apikey=user["apikey"], ) <NEW_LINE> assert plugin.counter["before_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["after_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["before_update"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["after_update"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["before_delete"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["after_delete"] == 0, plugin.counter <NEW_LINE> <DEDENT> def test_resource_controller_plugin_update(self): <NEW_LINE> <INDENT> user = factories.Sysadmin() <NEW_LINE> resource = factories.Resource(user=user) <NEW_LINE> plugin = ckan.plugins.get_plugin("example_iresourcecontroller") <NEW_LINE> res = helpers.call_action( "resource_update", id=resource["id"], url="http://resource.updated/", apikey=user["apikey"], ) <NEW_LINE> assert plugin.counter["before_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["after_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["before_update"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["after_update"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["before_delete"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["after_delete"] == 0, plugin.counter <NEW_LINE> <DEDENT> def test_resource_controller_plugin_delete(self): <NEW_LINE> <INDENT> user = factories.Sysadmin() <NEW_LINE> resource = factories.Resource(user=user) <NEW_LINE> plugin = ckan.plugins.get_plugin("example_iresourcecontroller") <NEW_LINE> res = helpers.call_action( "resource_delete", id=resource["id"], apikey=user["apikey"] ) <NEW_LINE> assert plugin.counter["before_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["after_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["before_update"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["after_update"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["before_delete"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["after_delete"] == 1, plugin.counter <NEW_LINE> <DEDENT> def test_resource_controller_plugin_show(self): <NEW_LINE> <INDENT> user = factories.Sysadmin() <NEW_LINE> package = factories.Dataset(user=user) <NEW_LINE> resource = factories.Resource(user=user, package_id=package["id"]) <NEW_LINE> plugin = ckan.plugins.get_plugin("example_iresourcecontroller") <NEW_LINE> res = helpers.call_action("package_show", name_or_id=package["id"]) <NEW_LINE> assert plugin.counter["before_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["after_create"] == 1, plugin.counter <NEW_LINE> assert plugin.counter["before_update"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["after_update"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["before_delete"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["after_delete"] == 0, plugin.counter <NEW_LINE> assert plugin.counter["before_show"] == 5, plugin.counter
|
Tests for the plugin that uses IResourceController.
|
6259904126068e7796d4dbef
|
class Update: <NEW_LINE> <INDENT> __slots__ = ["_dev", "id", "name", "grp", "_fullname", "_sigs", "_func", "_ext_args", "_logger", "_old"] <NEW_LINE> def __init__(self, dev, id_, name, sigs, func, ext_args=None, grp=""): <NEW_LINE> <INDENT> self._dev = dev <NEW_LINE> self.id = id_ <NEW_LINE> self.name = name <NEW_LINE> self.grp = grp <NEW_LINE> if sigs: <NEW_LINE> <INDENT> self._sigs = list(args_to_sequence(sigs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._sigs = [] <NEW_LINE> <DEDENT> self._func = func <NEW_LINE> if ext_args: <NEW_LINE> <INDENT> self._ext_args = list(args_to_sequence(ext_args)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ext_args = [] <NEW_LINE> <DEDENT> self._logger = None <NEW_LINE> self._fullname = None <NEW_LINE> self._old = None <NEW_LINE> <DEDENT> def init_logger(self): <NEW_LINE> <INDENT> self._logger = self._dev._logger <NEW_LINE> if self.grp: <NEW_LINE> <INDENT> self._fullname = '.'.join( (self._dev._fullname, self.grp, self.name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._fullname = '.'.join((self._dev._fullname, self.name)) <NEW_LINE> <DEDENT> <DEDENT> def _log_info(self, msg, **kargs): <NEW_LINE> <INDENT> self._logger.info(self._fullname, msg, **kargs) <NEW_LINE> <DEDENT> def _log_warning(self, msg, **kargs): <NEW_LINE> <INDENT> self._logger.warning(self._fullname, msg, **kargs) <NEW_LINE> <DEDENT> def _log_error(self, msg, **kargs): <NEW_LINE> <INDENT> self._logger.error(self._fullname, msg, **kargs) <NEW_LINE> <DEDENT> def _log_exception(self, msg, **kargs): <NEW_LINE> <INDENT> self._logger.exception(self._fullname, msg, **kargs) <NEW_LINE> <DEDENT> def get_update(self, dev_link): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> new = self._func() <NEW_LINE> if self._sigs: <NEW_LINE> <INDENT> if new == self._old or None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._old = new <NEW_LINE> link = dev_link.links.add() <NEW_LINE> link.id = self.id <NEW_LINE> link.args.extend(args_to_sequence(new)) <NEW_LINE> return link <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> self._log_exception("Failed to update.") <NEW_LINE> <DEDENT> <DEDENT> def get_full_update(self, dev_link): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> new = self._func() <NEW_LINE> if self._sigs: <NEW_LINE> <INDENT> link = dev_link.links.add() <NEW_LINE> link.id = self.id <NEW_LINE> link.args.extend(args_to_sequence(new)) <NEW_LINE> return link <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> self._log_exception("Failed to update.") <NEW_LINE> <DEDENT> <DEDENT> def get_desc(self, dev_link): <NEW_LINE> <INDENT> if self._sigs: <NEW_LINE> <INDENT> link = dev_link.links.add() <NEW_LINE> link.type = Link.UPDATE <NEW_LINE> link.id = self.id <NEW_LINE> link.name = self.name <NEW_LINE> link.group = self.grp <NEW_LINE> link.sigs.extend(self._sigs) <NEW_LINE> link.args.extend(self._ext_args) <NEW_LINE> return link <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
|
Update is the type of operation executed on node to display the result on
control.
|
62599041a79ad1619776b328
|
class SparseFaceReaderAdapter: <NEW_LINE> <INDENT> sparse_reader: SparseFaceReader <NEW_LINE> def __init__(self, sparse_reader: SparseFaceReader): <NEW_LINE> <INDENT> self.sparse_reader = sparse_reader <NEW_LINE> <DEDENT> def __iter__(self) -> Iterator[Dict[str, List[Any]]]: <NEW_LINE> <INDENT> return self.iter_from(0) <NEW_LINE> <DEDENT> def iter_from(self, start_frame: int) -> Iterator[Dict[str, List[Any]]]: <NEW_LINE> <INDENT> sparse_it = peekable(self.sparse_reader) <NEW_LINE> while sparse_it.peek()[0][0] < start_frame: <NEW_LINE> <INDENT> next(sparse_it) <NEW_LINE> <DEDENT> for frame_num in count(start_frame): <NEW_LINE> <INDENT> result: Dict[str, List[Any]] = {} <NEW_LINE> while sparse_it.peek()[0][0] == frame_num: <NEW_LINE> <INDENT> (_, pers_id), face = next(sparse_it) <NEW_LINE> for key, arr in face.items(): <NEW_LINE> <INDENT> if key not in result: <NEW_LINE> <INDENT> result[key] = [] <NEW_LINE> <DEDENT> while len(result[key]) < pers_id: <NEW_LINE> <INDENT> result[key].append(None) <NEW_LINE> <DEDENT> result[key].append(arr) <NEW_LINE> <DEDENT> <DEDENT> yield result
|
Adapts a sparse face embedding dump to behave like a dense one.
Warning: the resulting iterator is infinite.
|
625990418a43f66fc4bf343a
|
class Mutation(graphene.ObjectType): <NEW_LINE> <INDENT> send_model = SendModels.Field()
|
Root mutation.
|
6259904166673b3332c316a2
|
class DescriptorError(SqlAlchemyMediaException): <NEW_LINE> <INDENT> pass
|
A sub-class instance of this exception may raised when an error has
occurred in :class:`.BaseDescriptor` and it's subtypes.
|
62599041b830903b9686edce
|
class UserRoleDetail(generics.GenericAPIView): <NEW_LINE> <INDENT> permission_classes = (IsOwnerOrReadOnly,) <NEW_LINE> def get_user_role_object(self, role_id): <NEW_LINE> <INDENT> return Role.get_object(pk=role_id) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form = UserRoleDetailForm(request.data) <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> return Response({'Detail': form.errors}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> cld = form.cleaned_data <NEW_LINE> instance = self.get_user_role_object(cld['id']) <NEW_LINE> if isinstance(instance, Exception): <NEW_LINE> <INDENT> return Response({'Detail': instance.args}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> serializer = UserRoleSerializer(instance) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK)
|
用户角色详情
|
6259904116aa5153ce401796
|
class PersistentLocalsFunction(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self._locals = {} <NEW_LINE> self._func = new.instancemethod(func, self, PersistentLocalsFunction) <NEW_LINE> signature = inspect.getargspec(func) <NEW_LINE> signature[0].pop(0) <NEW_LINE> signature = inspect.formatargspec(*signature) <NEW_LINE> docprefix = func.func_name + signature <NEW_LINE> default_doc = '<no docstring>' <NEW_LINE> self.__doc__ = (docprefix + '\n\n' + (func.__doc__ or default_doc) + _docpostfix) <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._func(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def locals(self): <NEW_LINE> <INDENT> return self._locals
|
Wrapper class for the 'persistent_locals' decorator.
Refer to the docstring of instances for help about the wrapped
function.
|
62599041d99f1b3c44d06947
|
@blueprint.route('/<dataset:dataset>/resources/new/upload', endpoint='upload_new_resource') <NEW_LINE> class UploadNewResourceView(ProtectedDatasetView, UploadNewResource): <NEW_LINE> <INDENT> pass
|
Handle upload on POST if authorized.
|
62599041379a373c97d9a2d3
|
@tag('rate-limit-note') <NEW_LINE> class TestNoteViewRateLimit(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> new_project = Project(gitlab_id=747) <NEW_LINE> new_project.save() <NEW_LINE> new_user = UserIdentifier.objects.create( user_identifier = 'duo-atlas-hypnotism-curry-creatable-rubble' ) <NEW_LINE> posted_issue = Issue.objects.create ( title = 'A posted issue', description = 'A posted issue description', linked_project = new_project, linked_user = new_user, gitlab_iid = 1, reviewer_status = 'A', posted_to_GitLab = True ) <NEW_LINE> self.client=Client() <NEW_LINE> self.new_user = new_user <NEW_LINE> self.project = new_project <NEW_LINE> self.issue = posted_issue <NEW_LINE> <DEDENT> def test_note_create_view_POST_RATE_LIMIT(self): <NEW_LINE> <INDENT> url = reverse('create-note', args=[ self.new_user, self.project.slug, self.issue.gitlab_iid]) <NEW_LINE> form_data = { 'body': """A new note body.""" } <NEW_LINE> form=None <NEW_LINE> response = run_rate_limit_test(self, self.client, url, form, form_data) <NEW_LINE> self.assertEqual(response.status_code, 403) <NEW_LINE> self.assertTemplateUsed('anonticket/rate_limit.html') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> cache.clear()
|
Test the ratelimiting for NoteCreateView.
|
6259904176d4e153a661dbc9
|
class CoreThread(util.KegbotThread): <NEW_LINE> <INDENT> def __init__(self, kb_env, name): <NEW_LINE> <INDENT> util.KegbotThread.__init__(self, name) <NEW_LINE> self._kb_env = kb_env <NEW_LINE> <DEDENT> def PostEvent(self, event): <NEW_LINE> <INDENT> if isinstance(event, kbevent.QuitEvent): <NEW_LINE> <INDENT> self._logger.info('got quit event, quitting') <NEW_LINE> self.Quit()
|
Convenience wrapper around a threading.Thread
|
6259904115baa7234946323c
|
class SCDOM_RESULT(enum.IntEnum): <NEW_LINE> <INDENT> SCDOM_OK = 0 <NEW_LINE> SCDOM_INVALID_HWND = 1 <NEW_LINE> SCDOM_INVALID_HANDLE = 2 <NEW_LINE> SCDOM_PASSIVE_HANDLE = 3 <NEW_LINE> SCDOM_INVALID_PARAMETER = 4 <NEW_LINE> SCDOM_OPERATION_FAILED = 5 <NEW_LINE> SCDOM_OK_NOT_HANDLED = (-1)
|
Result value for Sciter DOM functions.
|
62599041711fe17d825e15f2
|
class ProtocolHandler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._handlers = dict() <NEW_LINE> self._handlers['PULL-DOC-REQ'] = ServerSession.pull <NEW_LINE> self._handlers['PUSH-DOC'] = ServerSession.push <NEW_LINE> self._handlers['PATCH-DOC'] = ServerSession.patch <NEW_LINE> self._handlers['SERVER-INFO-REQ'] = self._server_info_req <NEW_LINE> self._handlers['EVENT'] = ServerSession.event <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def handle(self, message, connection): <NEW_LINE> <INDENT> handler = self._handlers.get((message.msgtype, message.revision)) <NEW_LINE> if handler is None: <NEW_LINE> <INDENT> handler = self._handlers.get(message.msgtype) <NEW_LINE> <DEDENT> if handler is None: <NEW_LINE> <INDENT> raise ProtocolError("%s not expected on server" % message) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> work = yield handler(message, connection) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.error("error handling message %r: %r", message, e) <NEW_LINE> log.debug(" message header %r content %r", message.header, message.content, exc_info=1) <NEW_LINE> work = connection.error(message, repr(e)) <NEW_LINE> <DEDENT> raise gen.Return(work) <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def _server_info_req(self, message, connection): <NEW_LINE> <INDENT> raise gen.Return(connection.protocol.create('SERVER-INFO-REPLY', message.header['msgid']))
|
A Bokeh server may be expected to receive any of the following protocol
messages:
* ``EVENT``
* ``PATCH-DOC``
* ``PULL-DOC-REQ``
* ``PUSH-DOC``
* ``SERVER-INFO-REQ``
The job of ``ProtocolHandler`` is to direct incoming messages to the right
specialized handler for each message type. When the server receives a new
message on a connection it will call ``handler`` with the message and the
connection that the message arrived on. Most messages are ultimately
handled by the ``ServerSession`` class, but some simpler messages types
such as ``SERVER-INFO-REQ`` may be handled directly by ``ProtocolHandler``.
Any unexpected messages will result in a ``ProtocolError``.
|
62599041d53ae8145f919708
|
class UserGameProfile(models.Model): <NEW_LINE> <INDENT> game = models.ForeignKey(GameDetail) <NEW_LINE> game_user_name = models.CharField(max_length=100) <NEW_LINE> user = models.ForeignKey(UserProfile, related_name="games") <NEW_LINE> region = models.CharField(max_length=10) <NEW_LINE> external_user_id = models.IntegerField(null=True, blank=True) <NEW_LINE> is_in_error_state = models.BooleanField(default=0) <NEW_LINE> updates_on_demand = models.IntegerField(default=1) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.game_user_name + "'s " + self.game.shorthand_name + ' profile'
|
Used to bind the user profile to each game the user participates in.
Related name field is for the serializers to relate back to there and get the list of all the games a user has.
|
625990414e696a045264e777
|
class WorkingCopy(BaseWorkingCopy): <NEW_LINE> <INDENT> DIRECTORY = '.mockvcs' <NEW_LINE> def lock(self, path): <NEW_LINE> <INDENT> log.info("simulated lock on: {}...".format(path)) <NEW_LINE> <DEDENT> def save(self, message=None): <NEW_LINE> <INDENT> log.info("simulated save") <NEW_LINE> <DEDENT> @property <NEW_LINE> def ignores(self): <NEW_LINE> <INDENT> return ("*/env/*", "*/apidocs/*", "*/build/lib/*")
|
Simulated working copy.
|
62599041b5575c28eb71361f
|
class DeleteTokenView(View): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def post(request): <NEW_LINE> <INDENT> token_id = json.loads(request.body).get("id", None) <NEW_LINE> if not token_id or not GeyeTokenModel.instance.is_exist(token_id): <NEW_LINE> <INDENT> return JsonResponse({"code": 1004, "message": "token id不存在"}) <NEW_LINE> <DEDENT> obj = GeyeTokenModel.instance.fake_delete(token_id) <NEW_LINE> if obj: <NEW_LINE> <INDENT> return JsonResponse({"code": 1001, "message": "删除成功!"}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JsonResponse({"code": 1002, "message": "删除失败!"})
|
删除某条token
|
6259904196565a6dacd2d8e0
|
class ConsoleFormatter(logging.Formatter): <NEW_LINE> <INDENT> date_format = "%H:%M:%S" <NEW_LINE> default_format = "%(asctime)s [%(levelname)s] %(msg)s" <NEW_LINE> info_format = "%(msg)s" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__(fmt=ConsoleFormatter.default_format, datefmt=ConsoleFormatter.date_format, style='%') <NEW_LINE> <DEDENT> def format(self, record): <NEW_LINE> <INDENT> format_orig = self._style._fmt <NEW_LINE> if record.levelno == logging.INFO: <NEW_LINE> <INDENT> self._style._fmt = ConsoleFormatter.info_format <NEW_LINE> <DEDENT> result = logging.Formatter.format(self, record) <NEW_LINE> self._style._fmt = format_orig <NEW_LINE> return result
|
კლასით განვსაზღვრავთ ტერმინალში გამოტანილი მესიჯის ფორმატს.
|
6259904163b5f9789fe86417
|
class PlayfieldView(GridLayout): <NEW_LINE> <INDENT> def __init__(self, game, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.game = game <NEW_LINE> self.cols = game.columns <NEW_LINE> self.spacing = ['1dp'] <NEW_LINE> for row in range(game.rows-1, -1, -1): <NEW_LINE> <INDENT> for column in range(game.columns): <NEW_LINE> <INDENT> self.add_widget(CellView(row, column)) <NEW_LINE> <DEDENT> <DEDENT> Logger.debug("PlayfieldView: Initialized") <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> for cell in self.walk(restrict=True): <NEW_LINE> <INDENT> if cell != self: <NEW_LINE> <INDENT> cell.draw()
|
This is a view into the game's Playfield.
Rows and columns are counted from 0.
|
625990411d351010ab8f4dca
|
class Future(threading.local): <NEW_LINE> <INDENT> def __init__(self, cell_datetime_objects=False, netcdf_promote=False, strict_grib_load=False, netcdf_no_unlimited=False, clip_latitudes=False): <NEW_LINE> <INDENT> self.__dict__['cell_datetime_objects'] = cell_datetime_objects <NEW_LINE> self.__dict__['netcdf_promote'] = netcdf_promote <NEW_LINE> self.__dict__['strict_grib_load'] = strict_grib_load <NEW_LINE> self.__dict__['netcdf_no_unlimited'] = netcdf_no_unlimited <NEW_LINE> self.__dict__['clip_latitudes'] = clip_latitudes <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> msg = ('Future(cell_datetime_objects={}, netcdf_promote={}, ' 'strict_grib_load={}, netcdf_no_unlimited={}, ' 'clip_latitudes={})') <NEW_LINE> return msg.format(self.cell_datetime_objects, self.netcdf_promote, self.strict_grib_load, self.netcdf_no_unlimited, self.clip_latitudes) <NEW_LINE> <DEDENT> deprecated_options = { 'strict_grib_load': ('This is because "iris.fileformats.grib" is now ' 'deprecated : Please install the "iris_grib" ' 'package instead.')} <NEW_LINE> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name in self.deprecated_options: <NEW_LINE> <INDENT> reason = self.deprecated_options[name] <NEW_LINE> msg = ("the 'Future' object property {!r} is now deprecated. " "Please remove code which uses this. {}") <NEW_LINE> warn_deprecated(msg.format(name, reason)) <NEW_LINE> <DEDENT> if name not in self.__dict__: <NEW_LINE> <INDENT> msg = "'Future' object has no attribute {!r}".format(name) <NEW_LINE> raise AttributeError(msg) <NEW_LINE> <DEDENT> self.__dict__[name] = value <NEW_LINE> <DEDENT> @contextlib.contextmanager <NEW_LINE> def context(self, **kwargs): <NEW_LINE> <INDENT> current_state = self.__dict__.copy() <NEW_LINE> for name, value in six.iteritems(kwargs): <NEW_LINE> <INDENT> setattr(self, name, value) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.__dict__.clear() <NEW_LINE> self.__dict__.update(current_state)
|
Run-time configuration controller.
|
62599041379a373c97d9a2d5
|
class ListNetworksResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) <NEW_LINE> <DEDENT> def get_NewAccessToken(self): <NEW_LINE> <INDENT> return self._output.get('NewAccessToken', None)
|
A ResultSet with methods tailored to the values returned by the ListNetworks Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
|
6259904176d4e153a661dbca
|
class OverloadedFuncDef(FuncBase, SymbolNode, Statement): <NEW_LINE> <INDENT> __slots__ = ('items', 'unanalyzed_items', 'impl') <NEW_LINE> items: List[OverloadPart] <NEW_LINE> unanalyzed_items: List[OverloadPart] <NEW_LINE> impl: Optional[OverloadPart] <NEW_LINE> def __init__(self, items: List['OverloadPart']) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.items = items <NEW_LINE> self.unanalyzed_items = items.copy() <NEW_LINE> self.impl = None <NEW_LINE> if len(items) > 0: <NEW_LINE> <INDENT> self.set_line(items[0].line, items[0].column) <NEW_LINE> <DEDENT> self.is_final = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> if self.items: <NEW_LINE> <INDENT> return self.items[0].name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert self.impl is not None <NEW_LINE> return self.impl.name <NEW_LINE> <DEDENT> <DEDENT> def accept(self, visitor: StatementVisitor[T]) -> T: <NEW_LINE> <INDENT> return visitor.visit_overloaded_func_def(self) <NEW_LINE> <DEDENT> def serialize(self) -> JsonDict: <NEW_LINE> <INDENT> return {'.class': 'OverloadedFuncDef', 'items': [i.serialize() for i in self.items], 'type': None if self.type is None else self.type.serialize(), 'fullname': self._fullname, 'impl': None if self.impl is None else self.impl.serialize(), 'flags': get_flags(self, FUNCBASE_FLAGS), } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize(cls, data: JsonDict) -> 'OverloadedFuncDef': <NEW_LINE> <INDENT> assert data['.class'] == 'OverloadedFuncDef' <NEW_LINE> res = OverloadedFuncDef([ cast(OverloadPart, SymbolNode.deserialize(d)) for d in data['items']]) <NEW_LINE> if data.get('impl') is not None: <NEW_LINE> <INDENT> res.impl = cast(OverloadPart, SymbolNode.deserialize(data['impl'])) <NEW_LINE> if len(res.items) > 0: <NEW_LINE> <INDENT> res.set_line(res.impl.line) <NEW_LINE> <DEDENT> <DEDENT> if data.get('type') is not None: <NEW_LINE> <INDENT> typ = mypy.types.deserialize_type(data['type']) <NEW_LINE> assert isinstance(typ, mypy.types.ProperType) <NEW_LINE> res.type = typ <NEW_LINE> <DEDENT> res._fullname = data['fullname'] <NEW_LINE> set_flags(res, data['flags']) <NEW_LINE> return res
|
A logical node representing all the variants of a multi-declaration function.
A multi-declaration function is often an @overload, but can also be a
@property with a setter and a/or a deleter.
This node has no explicit representation in the source program.
Overloaded variants must be consecutive in the source file.
|
62599041b57a9660fecd2d27
|
class UserRegistration(APIView): <NEW_LINE> <INDENT> def post(self, request, format=None): <NEW_LINE> <INDENT> user = request.data <NEW_LINE> user['username'] = 'sf' + str(random.randint(1, 100)) + '_' + user['first_name'] + '_' + user['last_name'] <NEW_LINE> user_serializer = UserSerializer(data=user) <NEW_LINE> if user_serializer.is_valid(): <NEW_LINE> <INDENT> user_serializer.save() <NEW_LINE> return Response(user_serializer.data) <NEW_LINE> <DEDENT> return Response(user_serializer.errors)
|
List the details of user
|
6259904123e79379d538d7ab
|
class SafeTranslationsTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> if 'LANG' in iter(list(os.environ.keys())): <NEW_LINE> <INDENT> os.environ.__delitem__('LANG') <NEW_LINE> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> if 'LANG' in iter(list(os.environ.keys())): <NEW_LINE> <INDENT> os.environ.__delitem__('LANG') <NEW_LINE> <DEDENT> <DEDENT> def test_impact_summary_words(self): <NEW_LINE> <INDENT> os.environ['LANG'] = 'id' <NEW_LINE> phrase_list = [] <NEW_LINE> message = 'Specific words checked for translation:\n' <NEW_LINE> for phrase in phrase_list: <NEW_LINE> <INDENT> if phrase == tr(phrase): <NEW_LINE> <INDENT> message += 'FAIL: %s' % phrase <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message += 'PASS: %s' % phrase <NEW_LINE> <DEDENT> <DEDENT> self.assertNotIn('FAIL', message, message) <NEW_LINE> os.environ['LANG'] = 'en' <NEW_LINE> <DEDENT> def test_qgis_translations(self): <NEW_LINE> <INDENT> file_path = safe_dir('i18n/inasafe_id.qm') <NEW_LINE> translator = QTranslator() <NEW_LINE> translator.load(file_path) <NEW_LINE> QCoreApplication.installTranslator(translator) <NEW_LINE> expected_message = ( 'Tidak ada informasi gaya yang ditemukan pada lapisan %s') <NEW_LINE> real_message = QCoreApplication.translate( '@default', 'No styleInfo was found for layer %s') <NEW_LINE> message = 'expected %s but got %s' % (expected_message, real_message) <NEW_LINE> self.assertEqual(expected_message, real_message, message) <NEW_LINE> <DEDENT> @unittest.skipIf( os.environ.get('ON_TRAVIS', False), 'Travis recognize QgsApplication as a pyqtWrapperType object.') <NEW_LINE> def test_qgis_app_locale(self): <NEW_LINE> <INDENT> from safe.definitions.constants import no_field <NEW_LINE> self.assertEqual(no_field, u'No Field') <NEW_LINE> _ = get_qgis_app('id', INASAFE_TEST) <NEW_LINE> from safe.definitions.constants import no_field <NEW_LINE> self.assertNotEqual(no_field, u'No Field') <NEW_LINE> expected_locale = 'id' <NEW_LINE> self.assertEqual(locale(INASAFE_TEST), expected_locale) <NEW_LINE> expected_message = ( 'Tidak ada informasi gaya yang ditemukan pada lapisan %s') <NEW_LINE> real_message = tr( 'No styleInfo was found for layer %s') <NEW_LINE> message = 'expected %s but got %s' % (expected_message, real_message) <NEW_LINE> self.assertEqual(expected_message, real_message, message) <NEW_LINE> _ = get_qgis_app(qsetting=INASAFE_TEST) <NEW_LINE> expected_locale = 'en' <NEW_LINE> self.assertEqual(locale(INASAFE_TEST), expected_locale) <NEW_LINE> expected_message = ( 'No styleInfo was found for layer %s') <NEW_LINE> real_message = tr( 'No styleInfo was found for layer %s') <NEW_LINE> message = 'expected %s but got %s' % (expected_message, real_message) <NEW_LINE> self.assertEqual(expected_message, real_message, message) <NEW_LINE> os.environ['LANG'] = 'en'
|
Test translations work.
|
6259904126068e7796d4dbf3
|
class gitpostreceiveinputparser(gitreceiveinputparser): <NEW_LINE> <INDENT> pass
|
input parser for the 'post-receive' phase
available fields:
- reporoot (str) => root of the repo
- receivedrevs =>
(list of tuples: (<old-value> <new-value> <ref-name>))
- head (str) => sha1 of HEAD
|
6259904115baa7234946323e
|
class HelperLowBatIP(HMDevice): <NEW_LINE> <INDENT> def __init__(self, device_description, proxy, resolveparamsets=False): <NEW_LINE> <INDENT> super().__init__(device_description, proxy, resolveparamsets) <NEW_LINE> self.ATTRIBUTENODE.update({"LOW_BAT": [0]}) <NEW_LINE> <DEDENT> def low_batt(self, channel=None): <NEW_LINE> <INDENT> return self.getAttributeData("LOW_BAT", 0)
|
This Helper adds easy access to read the LOWBAT state
|
62599041596a897236128f05
|
class FuzzyEnum(Enum): <NEW_LINE> <INDENT> case_sensitive = False <NEW_LINE> substring_matching = False <NEW_LINE> def __init__(self, values, default_value=Undefined, case_sensitive=False, substring_matching=False, **kwargs): <NEW_LINE> <INDENT> self.case_sensitive = case_sensitive <NEW_LINE> self.substring_matching = substring_matching <NEW_LINE> values = [cast_unicode_py2(value) for value in values] <NEW_LINE> super(FuzzyEnum, self).__init__(values, default_value=default_value, **kwargs) <NEW_LINE> <DEDENT> def validate(self, obj, value): <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> value = cast_unicode_py2(value) <NEW_LINE> <DEDENT> if not isinstance(value, six.string_types): <NEW_LINE> <INDENT> self.error(obj, value) <NEW_LINE> <DEDENT> conv_func = (lambda c: c) if self.case_sensitive else lambda c: c.lower() <NEW_LINE> substring_matching = self.substring_matching <NEW_LINE> match_func = ((lambda v, c: v in c) if substring_matching else (lambda v, c: c.startswith(v))) <NEW_LINE> value = conv_func(value) <NEW_LINE> choices = self.values <NEW_LINE> matches = [match_func(value, conv_func(c)) for c in choices] <NEW_LINE> if sum(matches) == 1: <NEW_LINE> <INDENT> for v, m in zip(choices, matches): <NEW_LINE> <INDENT> if m: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.error(obj, value) <NEW_LINE> <DEDENT> def _info(self, as_rst=False): <NEW_LINE> <INDENT> none = (' or %s' % ('`None`' if as_rst else 'None') if self.allow_none else '') <NEW_LINE> case = 'sensitive' if self.case_sensitive else 'insensitive' <NEW_LINE> substr = 'substring' if self.substring_matching else 'prefix' <NEW_LINE> return 'any case-%s %s of %s%s' % (case, substr, self._choices_str(as_rst), none) <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> return self._info(as_rst=False) <NEW_LINE> <DEDENT> def info_rst(self): <NEW_LINE> <INDENT> return self._info(as_rst=True)
|
An case-ignoring enum matching choices by unique prefixes/substrings.
|
6259904116aa5153ce40179a
|
class RandomPairs(PairsGenerator): <NEW_LINE> <INDENT> def __init__(self, spk1, spk2, no_silent=False, no_auto=True): <NEW_LINE> <INDENT> PairsGenerator.__init__(self, spk1, spk2, no_silent) <NEW_LINE> self.no_auto = no_auto <NEW_LINE> <DEDENT> def get_pairs(self, nb_pairs): <NEW_LINE> <INDENT> cells1 = numpy.array(list(self.ids_1), int) <NEW_LINE> cells2 = numpy.array(list(self.ids_2), int) <NEW_LINE> pairs = numpy.zeros((0,2), int) <NEW_LINE> N1 = len(cells1) <NEW_LINE> N2 = len(cells2) <NEW_LINE> T = min(N1,N2) <NEW_LINE> while len(pairs) < nb_pairs: <NEW_LINE> <INDENT> N = min(nb_pairs-len(pairs), T) <NEW_LINE> tmp_pairs = numpy.zeros((N, 2),int) <NEW_LINE> tmp_pairs[:,0] = cells1[numpy.floor(numpy.random.uniform(0, N1, N)).astype(int)] <NEW_LINE> tmp_pairs[:,1] = cells2[numpy.floor(numpy.random.uniform(0, N2, N)).astype(int)] <NEW_LINE> if self.no_auto: <NEW_LINE> <INDENT> idx = numpy.where(tmp_pairs[:,0] == tmp_pairs[:,1])[0] <NEW_LINE> pairs = numpy.concatenate((pairs, numpy.delete(tmp_pairs, idx, axis=0))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pairs = numpy.concatenate((pairs, tmp_pairs)) <NEW_LINE> <DEDENT> <DEDENT> return pairs
|
RandomPairs(SpikeList, SpikeList, no_silent, no_auto). Inherits from PairsGenerator.
Generator that will return random pairs of elements.
Inputs:
spk1 - First SpikeList object to take cells from
spk2 - Second SpikeList object to take cells from
no_silent - Boolean to say if only non silent cells should
be considered. False by default
no_auto - Boolean to say if pairs with the same element (id,id) should
be remove. True by default, i.e those pairs are discarded
Examples:
>> p = RandomPairs(spk1, spk1, True, False)
>> p.get_pairs(4)
[[1,3],[2,5],[1,4],[5,5]]
>> p = RandomPairs(spk1, spk1, True, True)
>> p.get_pairs(3)
[[1,3],[2,5],[1,4]]
See also RandomPairs, CustomPairs, DistantDependentPairs
|
6259904107d97122c4217f4c
|
class Message(models.Model): <NEW_LINE> <INDENT> patient = models.ForeignKey(Patient, on_delete=models.CASCADE, related_name = 'message_patient') <NEW_LINE> clinicien = models.ForeignKey(Clinicien, on_delete=models.CASCADE, related_name = 'message_clinicien') <NEW_LINE> message = models.CharField(max_length=255) <NEW_LINE> isClinicien = models.BooleanField(default=False) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta : <NEW_LINE> <INDENT> ordering =["patient", "-created_at"]
|
Voir pour assembler message et agenda ?
|
62599041d53ae8145f91970a
|
class SubsectionGradeBase(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, subsection): <NEW_LINE> <INDENT> self.location = subsection.location <NEW_LINE> self.display_name = escape(block_metadata_utils.display_name_with_default(subsection)) <NEW_LINE> self.url_name = block_metadata_utils.url_name_for_block(subsection) <NEW_LINE> self.format = getattr(subsection, 'format', '') <NEW_LINE> self.due = getattr(subsection, 'due', None) <NEW_LINE> self.graded = getattr(subsection, 'graded', False) <NEW_LINE> self.show_correctness = getattr(subsection, 'show_correctness', '') <NEW_LINE> self.course_version = getattr(subsection, 'course_version', None) <NEW_LINE> self.subtree_edited_timestamp = getattr(subsection, 'subtree_edited_on', None) <NEW_LINE> self.override = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def attempted(self): <NEW_LINE> <INDENT> assert self.all_total is not None, ( "SubsectionGrade not fully populated yet. Call init_from_structure or init_from_model " "before use." ) <NEW_LINE> return self.all_total.attempted <NEW_LINE> <DEDENT> def show_grades(self, has_staff_access): <NEW_LINE> <INDENT> return ShowCorrectness.correctness_available(self.show_correctness, self.due, has_staff_access) <NEW_LINE> <DEDENT> @property <NEW_LINE> def attempted_graded(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def percent_graded(self): <NEW_LINE> <INDENT> raise NotImplementedError
|
Abstract base class for Subsection Grades.
|
6259904107f4c71912bb06df
|
class TestInit(TestJobList): <NEW_LINE> <INDENT> def test_init_no_job_list_provided(self): <NEW_LINE> <INDENT> endpoint = JobsList(self.session, self.request) <NEW_LINE> self.assertIsInstance(endpoint.job_list, JobListInterface) <NEW_LINE> <DEDENT> @given(job_lists()) <NEW_LINE> def test_init_job_list_provided(self, job_list: JobListInterface) -> None: <NEW_LINE> <INDENT> endpoint = JobsList(self.session, self.request, job_list) <NEW_LINE> self.assertEqual(endpoint.job_list, job_list)
|
Contains unit tests for the ``__init__`` method of the endpoint
|
62599041d99f1b3c44d0694a
|
class FFmpegPCMAudio(AudioSource): <NEW_LINE> <INDENT> def __init__(self, source, *, executable='ffmpeg', pipe=False, stderr=None, before_options=None, options=None): <NEW_LINE> <INDENT> stdin = None if not pipe else source <NEW_LINE> args = [executable] <NEW_LINE> if isinstance(before_options, str): <NEW_LINE> <INDENT> args.extend(shlex.split(before_options)) <NEW_LINE> <DEDENT> args.append('-i') <NEW_LINE> args.append('-' if pipe else source) <NEW_LINE> args.extend(('-f', 's16le', '-ar', '48000', '-ac', '2', '-loglevel', 'warning')) <NEW_LINE> if isinstance(options, str): <NEW_LINE> <INDENT> args.extend(shlex.split(options)) <NEW_LINE> <DEDENT> args.append('pipe:1') <NEW_LINE> try: <NEW_LINE> <INDENT> self._process = subprocess.Popen(args, stdin=stdin, stdout=subprocess.PIPE, stderr=stderr) <NEW_LINE> self._stdout = self._process.stdout <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> raise ClientException(executable + ' was not found.') from None <NEW_LINE> <DEDENT> except subprocess.SubprocessError as e: <NEW_LINE> <INDENT> raise ClientException('Popen failed: {0.__class__.__name__}: {0}'.format(e)) from e <NEW_LINE> <DEDENT> <DEDENT> def read(self): <NEW_LINE> <INDENT> ret = self._stdout.read(OpusEncoder.FRAME_SIZE) <NEW_LINE> if len(ret) != OpusEncoder.FRAME_SIZE: <NEW_LINE> <INDENT> return b'' <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> proc = self._process <NEW_LINE> if proc is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> log.info('Preparing to terminate ffmpeg process %s.', proc.pid) <NEW_LINE> proc.kill() <NEW_LINE> if proc.poll() is None: <NEW_LINE> <INDENT> log.info('ffmpeg process %s has not terminated. Waiting to terminate...', proc.pid) <NEW_LINE> proc.communicate() <NEW_LINE> log.info('ffmpeg process %s should have terminated with a return code of %s.', proc.pid, proc.returncode) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.info('ffmpeg process %s successfully terminated with return code of %s.', proc.pid, proc.returncode) <NEW_LINE> <DEDENT> self._process = None
|
An audio source from FFmpeg (or AVConv).
This launches a sub-process to a specific input file given.
.. warning::
You must have the ffmpeg or avconv executable in your path environment
variable in order for this to work.
Parameters
------------
source: Union[str, BinaryIO]
The input that ffmpeg will take and convert to PCM bytes.
If ``pipe`` is True then this is a file-like object that is
passed to the stdin of ffmpeg.
executable: str
The executable name (and path) to use. Defaults to ``ffmpeg``.
pipe: bool
If true, denotes that ``source`` parameter will be passed
to the stdin of ffmpeg. Defaults to ``False``.
stderr: Optional[BinaryIO]
A file-like object to pass to the Popen constructor.
Could also be an instance of ``subprocess.PIPE``.
options: Optional[str]
Extra command line arguments to pass to ffmpeg after the ``-i`` flag.
before_options: Optional[str]
Extra command line arguments to pass to ffmpeg before the ``-i`` flag.
Raises
--------
ClientException
The subprocess failed to be created.
|
625990411d351010ab8f4dcc
|
class is_keyword(parser.keyword): <NEW_LINE> <INDENT> def __init__(self, sString): <NEW_LINE> <INDENT> parser.keyword.__init__(self, sString)
|
unique_id = interface_subprogram_declaration : is_keyword
|
62599041d53ae8145f91970b
|
class StubSource(PlayerBase): <NEW_LINE> <INDENT> def read(self): <NEW_LINE> <INDENT> return ZEROS <NEW_LINE> <DEDENT> def skip(self, amount: int): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def previous(self, amount: int): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def resume(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def pause(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def queue(self, arg: str): <NEW_LINE> <INDENT> pass
|
Source meant to initiate audio players when constructing voice clients.
|
6259904176d4e153a661dbcb
|
class IssueForm(Form): <NEW_LINE> <INDENT> name = StringField('Issue', [validators.Required(message='We need an issue.'), validators.Length( max=70, message='Your \subject is a tad long.' ) ] ) <NEW_LINE> description = TextAreaField('Issue Description', [validators.required( message='Please describe your issue.')]) <NEW_LINE> priority = SelectField('Priority', choices=[ ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')]) <NEW_LINE> department = SelectField('Department', [validators.Required( message='Department required.')], coerce=int) <NEW_LINE> submit = SubmitField('Post Issue') <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(IssueForm, self).__init__(*args, **kwargs) <NEW_LINE> self.department.choices = [ (dept.id, dept.name) for dept in Department.query.all()]
|
This class creates an IssueForm
object.
|
625990410a366e3fb87ddc93
|
class BaxterPermutations_all(DisjointUnionEnumeratedSets, BaxterPermutations): <NEW_LINE> <INDENT> def __init__(self, n=None): <NEW_LINE> <INDENT> self.element_class = Permutations().element_class <NEW_LINE> from sage.categories.examples.infinite_enumerated_sets import NonNegativeIntegers <NEW_LINE> from sage.sets.family import Family <NEW_LINE> DisjointUnionEnumeratedSets.__init__(self, Family(NonNegativeIntegers(), BaxterPermutations_size), facade=False, keepkey=False) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Baxter permutations" <NEW_LINE> <DEDENT> def __contains__(self, x): <NEW_LINE> <INDENT> if not x in Permutations(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return x in BaxterPermutations(len(x)) <NEW_LINE> <DEDENT> def to_pair_of_twin_binary_trees(self, p): <NEW_LINE> <INDENT> from sage.combinat.binary_tree import LabelledBinaryTree <NEW_LINE> left = LabelledBinaryTree(None) <NEW_LINE> right = LabelledBinaryTree(None) <NEW_LINE> for a in p: <NEW_LINE> <INDENT> left = left.binary_search_insert(a) <NEW_LINE> <DEDENT> for a in reversed(p): <NEW_LINE> <INDENT> right = right.binary_search_insert(a) <NEW_LINE> <DEDENT> return (left, right)
|
The enumerated set of all Baxter permutations.
See :class:`BaxterPermutations` for the definition of Baxter
permutations.
EXAMPLES::
sage: from sage.combinat.baxter_permutations import BaxterPermutations_all
sage: BaxterPermutations_all()
Baxter permutations
|
6259904123e79379d538d7ad
|
class SaltObject(object): <NEW_LINE> <INDENT> def __init__(self, salt): <NEW_LINE> <INDENT> _mods = {} <NEW_LINE> for full_func in salt: <NEW_LINE> <INDENT> mod, func = full_func.split('.') <NEW_LINE> if mod not in _mods: <NEW_LINE> <INDENT> _mods[mod] = {} <NEW_LINE> <DEDENT> _mods[mod][func] = salt[full_func] <NEW_LINE> <DEDENT> self.mods = {} <NEW_LINE> for mod in _mods.keys(): <NEW_LINE> <INDENT> mod_name = '{0}Module'.format(str(mod).capitalize()) <NEW_LINE> mod_object = namedtuple(mod_name, _mods[mod].keys()) <NEW_LINE> self.mods[mod] = mod_object(**_mods[mod]) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, mod): <NEW_LINE> <INDENT> if mod not in self.mods: <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> return self.mods[mod]
|
Object based interface to the functions in __salt__
.. code-block:: python
:linenos:
Salt = SaltObject(__salt__)
Salt.cmd.run(bar)
|
6259904126068e7796d4dbf5
|
class P4Register(P4State): <NEW_LINE> <INDENT> def __init__(self, p4, cli, name, n, m, timer_ms): <NEW_LINE> <INDENT> super(P4Register, self).__init__(p4, name, n, m, timer_ms) <NEW_LINE> self.cli = cli <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> self.logger.debug("register_read %s", self.name) <NEW_LINE> result = self.cli.register_read(self.name) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> result = result.split(',') <NEW_LINE> for i in range(0, len(result)): <NEW_LINE> <INDENT> self.values[i] = int(result[i].strip()) <NEW_LINE> <DEDENT> self.display() <NEW_LINE> <DEDENT> <DEDENT> def read_index(self, index): <NEW_LINE> <INDENT> self.logger.debug("register_read_index %s %s", self.name, index) <NEW_LINE> result = self.cli.register_read_index(self.name, str(index)) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> self.values[index] = int(result.strip()) <NEW_LINE> self.display() <NEW_LINE> <DEDENT> <DEDENT> def display(self, index = None): <NEW_LINE> <INDENT> if index is not None: <NEW_LINE> <INDENT> self.logger.info("%20s[%4d] - %s", self.name, index, self.values[index]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.n == 1: <NEW_LINE> <INDENT> self.logger.info("%-24s: [" + ", ".join("%4d" % v for v in self.values) + "]",self.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info("%-24s: ", self.name) <NEW_LINE> for row in range(0, self.n): <NEW_LINE> <INDENT> self.logger.info("%-23s[%3d]: [" + ", ".join("%4d" % v for v in self.values[row*self.m:(row*self.m + self.m)]) + "]", " ", row)
|
Abstraction of a P4 register
|
625990418e71fb1e983bcd7c
|
class opaque_type_registrator_t( code_creator.code_creator_t , declaration_based.declaration_based_t ): <NEW_LINE> <INDENT> def __init__( self, pointee ): <NEW_LINE> <INDENT> code_creator.code_creator_t.__init__( self ) <NEW_LINE> declaration_based.declaration_based_t.__init__( self, pointee ) <NEW_LINE> self.works_on_instance = False <NEW_LINE> <DEDENT> def _create_impl(self): <NEW_LINE> <INDENT> return 'BOOST_PYTHON_OPAQUE_SPECIALIZED_TYPE_ID( %s )' % self.decl_identifier <NEW_LINE> <DEDENT> def _get_system_files_impl( self ): <NEW_LINE> <INDENT> return []
|
This class creates code that register static sized array
|
625990418e05c05ec3f6f7b2
|
class VideoGrayscale(object): <NEW_LINE> <INDENT> def __init__(self, num_output_channels=1): <NEW_LINE> <INDENT> assert num_output_channels in (1, 3) <NEW_LINE> self.num_output_channels = num_output_channels <NEW_LINE> <DEDENT> def __call__(self, video): <NEW_LINE> <INDENT> C, L, H, W = video.size() <NEW_LINE> grayscaled_video = torch.FloatTensor(self.num_output_channels, L, H, W) <NEW_LINE> transform = torchvision.transforms.Compose([ torchvision.transforms.ToPILImage(), torchvision.transforms.Grayscale(self.num_output_channels), torchvision.transforms.ToTensor(), ]) <NEW_LINE> for l in range(L): <NEW_LINE> <INDENT> frame = video[:, l, :, :] <NEW_LINE> frame = transform(frame) <NEW_LINE> grayscaled_video[:, l, :, :] = frame <NEW_LINE> <DEDENT> return grayscaled_video
|
Convert video (C x L x H x W) to grayscale (C' x L x H x W, C' = 1 or 3)
Args:
num_output_channels (int): (1 or 3) number of channels desired for output video
|
6259904130dc7b76659a0ae0
|
class Room(models.Model): <NEW_LINE> <INDENT> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> title = models.CharField(max_length=100, blank=False, null=False) <NEW_LINE> description = models.CharField(max_length=600, blank=False, null=False) <NEW_LINE> location = models.CharField(max_length=100, blank=True, default='') <NEW_LINE> price = models.IntegerField() <NEW_LINE> room_type = models.CharField( choices=ROOM_CHOICES, default='Single', max_length=100 ) <NEW_LINE> image = models.ImageField(null=True, upload_to="images/room/images/") <NEW_LINE> owner = models.ForeignKey( 'auth.User', related_name='rooms', on_delete=models.CASCADE) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['created']
|
Represents a room
|
62599041004d5f362081f93c
|
class FenicsVector(VectorInterface): <NEW_LINE> <INDENT> def __init__(self, impl): <NEW_LINE> <INDENT> self.impl = impl <NEW_LINE> <DEDENT> def make_zeros(cls, subtype): <NEW_LINE> <INDENT> impl = df.Vector(*subtype) <NEW_LINE> return cls(impl) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dim(self): <NEW_LINE> <INDENT> return self.impl.size() <NEW_LINE> <DEDENT> @property <NEW_LINE> def subtype(self): <NEW_LINE> <INDENT> impl = self.impl <NEW_LINE> return FenicsVectorSubtype((impl.mpi_comm(), self.impl.size())) <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self.impl.array() <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return FenicsVector(self.impl.copy()) <NEW_LINE> <DEDENT> def scal(self, alpha): <NEW_LINE> <INDENT> self.impl *= alpha <NEW_LINE> <DEDENT> def axpy(self, alpha, x): <NEW_LINE> <INDENT> if x is self: <NEW_LINE> <INDENT> self.scal(1. + alpha) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.impl.axpy(alpha, x.impl) <NEW_LINE> <DEDENT> <DEDENT> def dot(self, other): <NEW_LINE> <INDENT> return self.impl.inner(other.impl) <NEW_LINE> <DEDENT> def l1_norm(self): <NEW_LINE> <INDENT> return self.impl.norm('l1') <NEW_LINE> <DEDENT> def l2_norm(self): <NEW_LINE> <INDENT> return self.impl.norm('l2') <NEW_LINE> <DEDENT> def sup_norm(self): <NEW_LINE> <INDENT> return self.impl.norm('linf') <NEW_LINE> <DEDENT> def components(self, component_indices): <NEW_LINE> <INDENT> component_indices = np.array(component_indices, dtype=np.intc) <NEW_LINE> if len(component_indices) == 0: <NEW_LINE> <INDENT> return np.array([], dtype=np.intc) <NEW_LINE> <DEDENT> assert 0 <= np.min(component_indices) <NEW_LINE> assert np.max(component_indices) < self.dim <NEW_LINE> x = df.Vector() <NEW_LINE> self.impl.gather(x, component_indices) <NEW_LINE> return x.array() <NEW_LINE> <DEDENT> def amax(self): <NEW_LINE> <INDENT> A = np.abs(self.impl.array()) <NEW_LINE> max_ind = np.argmax(A) <NEW_LINE> max_val = A[max_ind] <NEW_LINE> return max_ind, max_val <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return FenicsVector(self.impl + other.impl) <NEW_LINE> <DEDENT> def __iadd__(self, other): <NEW_LINE> <INDENT> self.impl += other.impl <NEW_LINE> return self <NEW_LINE> <DEDENT> __radd__ = __add__ <NEW_LINE> def __sub__(self, other): <NEW_LINE> <INDENT> return FenicsVector(self.impl - other.impl) <NEW_LINE> <DEDENT> def __isub__(self, other): <NEW_LINE> <INDENT> self.impl -= other.impl <NEW_LINE> return self <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> return FenicsVector(self.impl * other) <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> return FenicsVector(-self.impl)
|
Wraps a FEniCS vector to make it usable with ListVectorArray.
|
625990418a43f66fc4bf3440
|
class PatchtestOEError(Exception): <NEW_LINE> <INDENT> def __init__(self, message, exitcode=1): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> self.exitcode = exitcode
|
Exception for handling patchtest-oe errors
|
6259904126238365f5fade08
|
class VerseSearchViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> serializer_class = VerseSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> results = Verse.objects.all() <NEW_LINE> if 'query' in self.request.query_params: <NEW_LINE> <INDENT> results = results.filter(text__search=self.request.query_params['query']) <NEW_LINE> <DEDENT> return results
|
Lists verses and provides basic searching capability.
To search, provide the `query` query string (e.g. `.../verses/?query=heart`)
|
62599041baa26c4b54d50558
|
class RecoverShard(base.Task): <NEW_LINE> <INDENT> def __init__(self, executor, context, begin, end): <NEW_LINE> <INDENT> super(RecoverShard, self).__init__(executor) <NEW_LINE> self.context = context <NEW_LINE> self.begin_shard = begin <NEW_LINE> self.end_shard = end <NEW_LINE> <DEDENT> def _get_zones(self): <NEW_LINE> <INDENT> criterion = { 'shard': "BETWEEN %s,%s" % (self.begin_shard, self.end_shard), 'status': 'ERROR' } <NEW_LINE> error_zones = self.storage.find_zones(self.context, criterion) <NEW_LINE> current = utils.increment_serial() <NEW_LINE> stale_criterion = { 'shard': "BETWEEN %s,%s" % (self.begin_shard, self.end_shard), 'status': 'PENDING', 'serial': "<%s" % (current - self.max_prop_time) } <NEW_LINE> stale_zones = self.storage.find_zones(self.context, stale_criterion) <NEW_LINE> if stale_zones: <NEW_LINE> <INDENT> LOG.warning('Found %(len)d zones PENDING for more than %(sec)d ' 'seconds', { 'len': len(stale_zones), 'sec': self.max_prop_time }) <NEW_LINE> error_zones.extend(stale_zones) <NEW_LINE> <DEDENT> return error_zones <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> zones = self._get_zones() <NEW_LINE> for zone in zones: <NEW_LINE> <INDENT> if zone.action == 'CREATE': <NEW_LINE> <INDENT> self.worker_api.create_zone(self.context, zone) <NEW_LINE> <DEDENT> elif zone.action == 'UPDATE': <NEW_LINE> <INDENT> self.worker_api.update_zone(self.context, zone) <NEW_LINE> <DEDENT> elif zone.action == 'DELETE': <NEW_LINE> <INDENT> self.worker_api.delete_zone(self.context, zone)
|
Given a beginning and ending shard, create the work to recover any
zones in an undesirable state within those shards.
:return: No return value
|
62599041d99f1b3c44d0694c
|
class GSE2BulletinSyntaxError(Exception): <NEW_LINE> <INDENT> pass
|
Raised when the file is not a valid GSE2 file
|
62599041d10714528d69efe4
|
class ILeadImage(model.Schema): <NEW_LINE> <INDENT> image = NamedBlobImage( title=_(u'Lead image'), required = False, )
|
Marker/Form interface for LeadImage
|
625990411d351010ab8f4dcd
|
class RestOfPacketString(MySQLDataType): <NEW_LINE> <INDENT> def __init__(self, val): <NEW_LINE> <INDENT> super(RestOfPacketString, self).__init__() <NEW_LINE> self.val = bytes(val) <NEW_LINE> self.length = len(self.val) <NEW_LINE> <DEDENT> def read_in(self, fde): <NEW_LINE> <INDENT> self.val = bytes(fde.read()) <NEW_LINE> self.length = len(self.val) <NEW_LINE> return self.length <NEW_LINE> <DEDENT> def write_out(self, fde): <NEW_LINE> <INDENT> fde.write(bytes(self.val)) <NEW_LINE> return len(bytes(self.val))
|
AKA the EOF string
|
625990413c8af77a43b68894
|
class ITask(IDueDate, IAssignmentsAware): <NEW_LINE> <INDENT> title = schema.TextLine( title = _('Title'), description = _('Task title.'), required = True) <NEW_LINE> milestone = schema.Choice( title = _(u'Milestone'), description = _(u'Assign task to milestone.'), vocabulary = 'project.milestone.active', required = False) <NEW_LINE> text = RichText( title = _(u'Information'), description = _(u'Task detailed information.'), required = True)
|
project task
|
62599041d53ae8145f91970d
|
class IgesHandler(NurbsHandler): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(IgesHandler, self).__init__() <NEW_LINE> self.extensions = ['.iges', '.igs'] <NEW_LINE> <DEDENT> def load_shape_from_file(self, filename): <NEW_LINE> <INDENT> self._check_filename_type(filename) <NEW_LINE> self._check_extension(filename) <NEW_LINE> reader = IGESControl_Reader() <NEW_LINE> return_reader = reader.ReadFile(filename) <NEW_LINE> if return_reader == IFSelect_RetDone: <NEW_LINE> <INDENT> return_transfer = reader.TransferRoots() <NEW_LINE> if return_transfer: <NEW_LINE> <INDENT> shape = reader.OneShape() <NEW_LINE> <DEDENT> <DEDENT> return shape <NEW_LINE> <DEDENT> def write_shape_to_file(self, shape, filename): <NEW_LINE> <INDENT> self._check_filename_type(filename) <NEW_LINE> self._check_extension(filename) <NEW_LINE> IGESControl_Controller_Init() <NEW_LINE> writer = IGESControl_Writer() <NEW_LINE> writer.AddShape(shape) <NEW_LINE> writer.Write(filename)
|
Iges file handler class
:cvar string infile: name of the input file to be processed.
:cvar string outfile: name of the output file where to write in.
:cvar list extensions: list of extensions of the input/output files.
It is equal to ['.iges', '.igs'].
:cvar list control_point_position: index of the first NURBS control point (or pole)
of each face of the iges file.
:cvar float tolerance: tolerance for the construction of the faces and wires
in the write function. Default value is 1e-6.
:cvar TopoDS_Shape shape: shape meant for modification.
.. warning::
- For non trivial geometries it could be necessary to increase the tolerance.
Linking edges into a single wire and then trimming the surface with the wire
can be hard for the software, especially when the starting CAD has not been
made for analysis but for design purposes.
|
62599041507cdc57c63a604c
|
class PrivateIngredientsApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> self.user = get_user_model().objects.create_user( 'test@test.com', 'testpass' ) <NEW_LINE> self.client.force_authenticate(self.user) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_list(self): <NEW_LINE> <INDENT> Ingredient.objects.create(user=self.user, name='kale') <NEW_LINE> Ingredient.objects.create(user=self.user, name='salt') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> ingredients = Ingredient.objects.all().order_by('-name') <NEW_LINE> serializer = IngredientSerializer(ingredients, many=True) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(res.data, serializer.data) <NEW_LINE> <DEDENT> def test_ingredients_limited_to_user(self): <NEW_LINE> <INDENT> user2 = get_user_model().objects.create_user( 'other@londonappdev.com', 'testpass' ) <NEW_LINE> Ingredient.objects.create(user=user2, name='Vinegar') <NEW_LINE> ingredient = Ingredient.objects.create(user=self.user, name='tumeric') <NEW_LINE> res = self.client.get(INGREDIENTS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(len(res.data), 1) <NEW_LINE> self.assertEqual(res.data[0]['name'], ingredient.name) <NEW_LINE> <DEDENT> def test_create_ingredient_successful(self): <NEW_LINE> <INDENT> payload = {'name': 'Fish'} <NEW_LINE> self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> exists = Ingredient.objects.filter( user=self.user, name=payload['name'] ).exists() <NEW_LINE> self.assertTrue(exists) <NEW_LINE> <DEDENT> def test_create_ingredient_invalid(self): <NEW_LINE> <INDENT> payload = {'name': ''} <NEW_LINE> res = self.client.post(INGREDIENTS_URL, payload) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_retrieve_ingredients_assigned_to_recipes(self): <NEW_LINE> <INDENT> ingredient1 = Ingredient.objects.create( user=self.user, name='Apples' ) <NEW_LINE> ingredient2 = Ingredient.objects.create( user=self.user, name='Turkey' ) <NEW_LINE> recipe = Recipe.objects.create( title='Apple crumble', time_minutes=5, price=10.00, user=self.user ) <NEW_LINE> recipe.ingredients.add(ingredient1) <NEW_LINE> res = self.client.get(INGREDIENTS_URL, {'assigned_only': 1}) <NEW_LINE> serializer1 = IngredientSerializer(ingredient1) <NEW_LINE> serializer2 = IngredientSerializer(ingredient2) <NEW_LINE> self.assertIn(serializer1.data, res.data) <NEW_LINE> self.assertNotIn(serializer2.data, res.data) <NEW_LINE> <DEDENT> def test_retrieve_ingredient_assigned_unique(self): <NEW_LINE> <INDENT> ingredient = Ingredient.objects.create(user=self.user, name='Eggs') <NEW_LINE> Ingredient.objects.create(user=self.user, name='Cheese') <NEW_LINE> recipe1 = Recipe.objects.create( title='Eggs benedict', time_minutes=30, price=12.00, user=self.user ) <NEW_LINE> recipe1.ingredients.add(ingredient) <NEW_LINE> recipe2 = Recipe.objects.create( title='Green eggs on toast', time_minutes=20, price=5.00, user=self.user ) <NEW_LINE> recipe2.ingredients.add(ingredient) <NEW_LINE> res = self.client.get(INGREDIENTS_URL, {'assigned_only': 1}) <NEW_LINE> self.assertEqual(len(res.data), 1)
|
Test the authorized user ingredients API
|
62599041b57a9660fecd2d2b
|
@validation_decorators.AuditsExisting(base_models.BaseModel) <NEW_LINE> class ValidateBaseModelId(beam.DoFn): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ValidateBaseModelId, self).__init__() <NEW_LINE> self._pattern = BASE_MODEL_ID_PATTERN <NEW_LINE> <DEDENT> def process(self, entity): <NEW_LINE> <INDENT> cloned_entity = job_utils.clone_model(entity) <NEW_LINE> if not re.match(self._pattern, cloned_entity.id): <NEW_LINE> <INDENT> yield base_validation_errors.ModelIdRegexError( cloned_entity, self._pattern)
|
DoFn to validate model ids.
IMPORTANT: Models with special ID checks should derive from this class and
override __init__() to assign a different value to self._regex, or replace
the process() method entirely. Be sure to decorate the new class with that
specific model type.
|
6259904173bcbd0ca4bcb53b
|
class svn_repos_file_rev_handler_t: <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_file_rev_handler_t, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_file_rev_handler_t, name) <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def set_parent_pool(self, parent_pool=None): <NEW_LINE> <INDENT> import libsvn.core, weakref <NEW_LINE> self.__dict__["_parent_pool"] = parent_pool or libsvn.core.application_pool; <NEW_LINE> if self.__dict__["_parent_pool"]: <NEW_LINE> <INDENT> self.__dict__["_is_valid"] = weakref.ref( self.__dict__["_parent_pool"]._is_valid) <NEW_LINE> <DEDENT> <DEDENT> def assert_valid(self): <NEW_LINE> <INDENT> if "_is_valid" in self.__dict__: <NEW_LINE> <INDENT> assert self.__dict__["_is_valid"](), "Variable has already been deleted" <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> self.assert_valid() <NEW_LINE> value = _swig_getattr(self, self.__class__, name) <NEW_LINE> members = self.__dict__.get("_members") <NEW_LINE> if members is not None: <NEW_LINE> <INDENT> _copy_metadata_deep(value, members.get(name)) <NEW_LINE> <DEDENT> _assert_valid_deep(value) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> self.assert_valid() <NEW_LINE> self.__dict__.setdefault("_members",{})[name] = value <NEW_LINE> return _swig_setattr(self, self.__class__, name, value) <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> return svn_repos_invoke_file_rev_handler(self, *args)
|
Proxy of C svn_repos_file_rev_handler_t struct
|
6259904150485f2cf55dc235
|
class HorizontalTail(): <NEW_LINE> <INDENT> def __init__(self, attachment = None, sweep = None, taper_ratio = None, aspect_ratio = None, t_o_c = None, dihedral = None, volume = None, lever_arm = None, area = None, span = None, mac = None, net_wetted_area = None, mass = None, c_g = None, x_axe = None, z_axe = None, c_axe = None, x_tip = None, y_tip = None, z_tip = None, c_tip = None, x_mac = None, y_mac = None): <NEW_LINE> <INDENT> self.attachment = attachment <NEW_LINE> self.sweep = sweep <NEW_LINE> self.taper_ratio = taper_ratio <NEW_LINE> self.aspect_ratio = aspect_ratio <NEW_LINE> self.t_o_c = t_o_c <NEW_LINE> self.dihedral = dihedral <NEW_LINE> self.volume = volume <NEW_LINE> self.lever_arm = lever_arm <NEW_LINE> self.area = area <NEW_LINE> self.span = span <NEW_LINE> self.mac = mac <NEW_LINE> self.net_wetted_area = net_wetted_area <NEW_LINE> self.mass = mass <NEW_LINE> self.c_g = c_g <NEW_LINE> self.x_axe = x_axe <NEW_LINE> self.z_axe = z_axe <NEW_LINE> self.c_axe = c_axe <NEW_LINE> self.x_tip = x_tip <NEW_LINE> self.y_tip = y_tip <NEW_LINE> self.z_tip = z_tip <NEW_LINE> self.c_tip = c_tip <NEW_LINE> self.x_mac = x_mac <NEW_LINE> self.y_mac = y_mac
|
Horizontal tail plane characteristics
|
6259904115baa72349463242
|
class ManageUserView(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> authentication_classes = (authentication.TokenAuthentication,) <NEW_LINE> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.request.user.is_artist: <NEW_LINE> <INDENT> serializer_class = ArtistSerializer <NEW_LINE> <DEDENT> elif self.request.user.is_promoter: <NEW_LINE> <INDENT> serializer_class = PromoterSerializer <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> serializer_class = UserSerializer <NEW_LINE> <DEDENT> return serializer_class <NEW_LINE> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> if self.request.user.is_artist: <NEW_LINE> <INDENT> return self.request.user.artist <NEW_LINE> <DEDENT> elif self.request.user.is_promoter: <NEW_LINE> <INDENT> return self.request.user.promoter <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.request.user
|
Manage the authenticated user.
|
62599041e64d504609df9d29
|
class ThinLVMVolumeDriver(LVMISCSIDriver): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ThinLVMVolumeDriver, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def check_for_setup_error(self): <NEW_LINE> <INDENT> out, err = self._execute('lvs', '--option', 'name', '--noheadings', run_as_root=True) <NEW_LINE> pool_name = "%s-pool" % FLAGS.volume_group <NEW_LINE> if pool_name not in out: <NEW_LINE> <INDENT> if not FLAGS.pool_size: <NEW_LINE> <INDENT> out, err = self._execute('vgs', FLAGS.volume_group, '--noheadings', '--options', 'name,size', run_as_root=True) <NEW_LINE> size = re.sub(r'[\.][\d][\d]', '', out.split()[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> size = "%s" % FLAGS.pool_size <NEW_LINE> <DEDENT> pool_path = '%s/%s' % (FLAGS.volume_group, pool_name) <NEW_LINE> out, err = self._execute('lvcreate', '-T', '-L', size, pool_path, run_as_root=True) <NEW_LINE> <DEDENT> <DEDENT> def _do_lvm_snapshot(self, src_lvm_name, dest_vref, is_cinder_snap=True): <NEW_LINE> <INDENT> if is_cinder_snap: <NEW_LINE> <INDENT> new_name = self._escape_snapshot(dest_vref['name']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_name = dest_vref['name'] <NEW_LINE> <DEDENT> self._try_execute('lvcreate', '-s', '-n', new_name, src_lvm_name, run_as_root=True) <NEW_LINE> <DEDENT> def create_volume(self, volume): <NEW_LINE> <INDENT> sizestr = self._sizestr(volume['size']) <NEW_LINE> vg_name = ("%s/%s-pool" % (FLAGS.volume_group, FLAGS.volume_group)) <NEW_LINE> self._try_execute('lvcreate', '-T', '-V', sizestr, '-n', volume['name'], vg_name, run_as_root=True) <NEW_LINE> <DEDENT> def delete_volume(self, volume): <NEW_LINE> <INDENT> if self._volume_not_present(volume['name']): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self._try_execute('lvremove', '-f', "%s/%s" % (FLAGS.volume_group, self._escape_snapshot(volume['name'])), run_as_root=True) <NEW_LINE> <DEDENT> def create_cloned_volume(self, volume, src_vref): <NEW_LINE> <INDENT> LOG.info(_('Creating clone of volume: %s') % src_vref['id']) <NEW_LINE> orig_lv_name = "%s/%s" % (FLAGS.volume_group, src_vref['name']) <NEW_LINE> self._do_lvm_snapshot(orig_lv_name, volume, False) <NEW_LINE> <DEDENT> def create_snapshot(self, snapshot): <NEW_LINE> <INDENT> orig_lv_name = "%s/%s" % (FLAGS.volume_group, snapshot['volume_name']) <NEW_LINE> self._do_lvm_snapshot(orig_lv_name, snapshot)
|
Subclass for thin provisioned LVM's.
|
625990411f5feb6acb163ea4
|
class Build: <NEW_LINE> <INDENT> def __init__(self, environment): <NEW_LINE> <INDENT> self.project_name = 'name of master project' <NEW_LINE> self.project_version = None <NEW_LINE> self.environment = environment <NEW_LINE> self.projects = {} <NEW_LINE> self.targets = {} <NEW_LINE> self.compilers = [] <NEW_LINE> self.cross_compilers = [] <NEW_LINE> self.global_args = {} <NEW_LINE> self.global_link_args = {} <NEW_LINE> self.tests = [] <NEW_LINE> self.benchmarks = [] <NEW_LINE> self.headers = [] <NEW_LINE> self.man = [] <NEW_LINE> self.data = [] <NEW_LINE> self.static_linker = None <NEW_LINE> self.static_cross_linker = None <NEW_LINE> self.subprojects = {} <NEW_LINE> self.install_scripts = [] <NEW_LINE> self.postconf_scripts = [] <NEW_LINE> self.install_dirs = [] <NEW_LINE> self.dep_manifest_name = None <NEW_LINE> self.dep_manifest = {} <NEW_LINE> self.cross_stdlibs = {} <NEW_LINE> <DEDENT> def has_language(self, language): <NEW_LINE> <INDENT> for i in self.compilers: <NEW_LINE> <INDENT> if i.get_language() == language: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def add_compiler(self, compiler): <NEW_LINE> <INDENT> if self.static_linker is None and compiler.needs_static_linker(): <NEW_LINE> <INDENT> self.static_linker = self.environment.detect_static_linker(compiler) <NEW_LINE> <DEDENT> if self.has_language(compiler.get_language()): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.compilers.append(compiler) <NEW_LINE> <DEDENT> def add_cross_compiler(self, compiler): <NEW_LINE> <INDENT> if len(self.cross_compilers) == 0: <NEW_LINE> <INDENT> self.static_cross_linker = self.environment.detect_static_linker(compiler) <NEW_LINE> <DEDENT> for i in self.cross_compilers: <NEW_LINE> <INDENT> if i.get_language() == compiler.get_language(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> self.cross_compilers.append(compiler) <NEW_LINE> <DEDENT> def get_project(self): <NEW_LINE> <INDENT> return self.projects[''] <NEW_LINE> <DEDENT> def get_targets(self): <NEW_LINE> <INDENT> return self.targets <NEW_LINE> <DEDENT> def get_tests(self): <NEW_LINE> <INDENT> return self.tests <NEW_LINE> <DEDENT> def get_benchmarks(self): <NEW_LINE> <INDENT> return self.benchmarks <NEW_LINE> <DEDENT> def get_headers(self): <NEW_LINE> <INDENT> return self.headers <NEW_LINE> <DEDENT> def get_man(self): <NEW_LINE> <INDENT> return self.man <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def get_install_subdirs(self): <NEW_LINE> <INDENT> return self.install_dirs <NEW_LINE> <DEDENT> def get_global_args(self, compiler): <NEW_LINE> <INDENT> return self.global_args.get(compiler.get_language(), []) <NEW_LINE> <DEDENT> def get_global_link_args(self, compiler): <NEW_LINE> <INDENT> return self.global_link_args.get(compiler.get_language(), [])
|
A class that holds the status of one build including
all dependencies and so on.
|
62599041a79ad1619776b330
|
class queue: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.items = list() <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> return self.items == [] <NEW_LINE> <DEDENT> def enqueue(self, item): <NEW_LINE> <INDENT> self.items.insert(0,item) <NEW_LINE> <DEDENT> def deque(self): <NEW_LINE> <INDENT> return self.items.pop() <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return len(self.items)
|
implementing the Queue Abstract data structure
|
625990413eb6a72ae038b917
|
class Solution: <NEW_LINE> <INDENT> def combine(self, n, k): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for i in range(k): <NEW_LINE> <INDENT> d[i+1] = [] <NEW_LINE> <DEDENT> for num in range(1, n+1): <NEW_LINE> <INDENT> for i in range(1, k): <NEW_LINE> <INDENT> items = d[k-i] <NEW_LINE> for item in items: <NEW_LINE> <INDENT> t = item[0:] <NEW_LINE> t.append(num) <NEW_LINE> d[k-i+1].append(t) <NEW_LINE> <DEDENT> <DEDENT> d[1].append([num]) <NEW_LINE> <DEDENT> return d[k]
|
@see https://oj.leetcode.com/problems/combinations/
|
6259904123e79379d538d7b0
|
class JSONConfig(XMLConfig): <NEW_LINE> <INDENT> EXTENSION = '.json' <NEW_LINE> def raw_to_object(self, raw): <NEW_LINE> <INDENT> return json.loads(raw.decode('utf-8'), object_hook=from_json) <NEW_LINE> <DEDENT> def to_raw(self): <NEW_LINE> <INDENT> return json.dumps(self, indent=2, default=to_json) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return dict.__getitem__(self, key) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self.defaults[key] <NEW_LINE> <DEDENT> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return dict.__getitem__(self, key) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self.defaults.get(key, default) <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, val): <NEW_LINE> <INDENT> dict.__setitem__(self, key, val) <NEW_LINE> self.commit()
|
JSONConfig
|
62599041596a897236128f07
|
class UserRegister(Resource): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument("username", required=True, type=str) <NEW_LINE> parser.add_argument("password", required=True, type=str) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> data = UserRegister.parser.parse_args() <NEW_LINE> if UserModel.get_user_by_username(data["username"]): <NEW_LINE> <INDENT> return {"message" : "user already exists"}, 400 <NEW_LINE> <DEDENT> user = UserModel(**data) <NEW_LINE> user.save_to_db() <NEW_LINE> return {"message" : "user added succesfully"}, 201
|
Endpoint for registering user. Data will come in this format {"username": "Lukas", "password" : "p@55w0rd"}
|
6259904130c21e258be99abe
|
class IntervalQuerySet(QuerySet): <NEW_LINE> <INDENT> def between(self, start: DateOrDatetime, end: DateOrDatetime, include_end_date=True) -> QuerySet: <NEW_LINE> <INDENT> if not isinstance(start, datetime.datetime): <NEW_LINE> <INDENT> start = utils.datetime_from_date(start) <NEW_LINE> <DEDENT> if not isinstance(end, datetime.datetime): <NEW_LINE> <INDENT> if include_end_date: <NEW_LINE> <INDENT> end += datetime.timedelta(days=1) <NEW_LINE> <DEDENT> end = utils.datetime_from_date(end) <NEW_LINE> <DEDENT> return self.filter(Q(start__lte=start, end__gte=end) | Q(start__range=(start, end)) | Q(end__range=(start, end))) .exclude(start=end).exclude(end=start) <NEW_LINE> <DEDENT> def at_date(self, dt: DateOrDatetime) -> QuerySet: <NEW_LINE> <INDENT> if not isinstance(dt, datetime.datetime): <NEW_LINE> <INDENT> dt = utils.datetime_from_date(dt) <NEW_LINE> <DEDENT> return self.filter(Q(start__lt=dt, end__gt=dt)) <NEW_LINE> <DEDENT> def similar(self, interval: 'Interval') -> QuerySet: <NEW_LINE> <INDENT> q = Q(resource=interval.resource, kind=interval.kind, organization=interval.organization, manager=interval.manager) <NEW_LINE> qs = self.filter(q) <NEW_LINE> if interval.id: <NEW_LINE> <INDENT> qs = qs.exclude(id=interval.id) <NEW_LINE> <DEDENT> return qs <NEW_LINE> <DEDENT> def is_continuous(self, start: datetime.datetime, end: datetime.datetime) -> bool: <NEW_LINE> <INDENT> existing = [] <NEW_LINE> for interval in self: <NEW_LINE> <INDENT> interval.join_with_existing(existing, timedelta=0) <NEW_LINE> existing.append(interval) <NEW_LINE> <DEDENT> return len(existing) == 1 and existing[0].start <= start and existing[0].end >= end <NEW_LINE> <DEDENT> def managers(self) -> QuerySet: <NEW_LINE> <INDENT> q = Q(manager__isnull=False) & (Q(kind=Interval.Kind_ManagerReserved) | Q(kind=Interval.Kind_OrganizationReserved)) <NEW_LINE> manager_ids = self.filter(q).order_by('manager').distinct('manager').values_list('manager', flat=True) <NEW_LINE> return Manager.objects.filter(id__in=manager_ids)
|
Менеджер объектов для модели Interval.
|
62599041be383301e0254ac9
|
class ChecklistResource (ModelResource): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> queryset = Checklist.objects.all() <NEW_LINE> resource_name = 'checklistresource' <NEW_LINE> authorization = Authorization()
|
Checklist webservice
|
62599041d53ae8145f91970f
|
class ExposureCN(AbstractPropertyMap): <NEW_LINE> <INDENT> def __init__(self, model, radius=12.0, offset=0): <NEW_LINE> <INDENT> assert(offset >= 0) <NEW_LINE> ppb = CaPPBuilder() <NEW_LINE> ppl = ppb.build_peptides(model) <NEW_LINE> fs_map = {} <NEW_LINE> fs_list = [] <NEW_LINE> fs_keys = [] <NEW_LINE> for pp1 in ppl: <NEW_LINE> <INDENT> for i in range(0, len(pp1)): <NEW_LINE> <INDENT> fs = 0 <NEW_LINE> r1 = pp1[i] <NEW_LINE> if not is_aa(r1) or not r1.has_id('CA'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ca1 = r1['CA'] <NEW_LINE> for pp2 in ppl: <NEW_LINE> <INDENT> for j in range(0, len(pp2)): <NEW_LINE> <INDENT> if pp1 is pp2 and abs(i - j) <= offset: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> r2 = pp2[j] <NEW_LINE> if not is_aa(r2) or not r2.has_id('CA'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ca2 = r2['CA'] <NEW_LINE> d = (ca2 - ca1) <NEW_LINE> if d < radius: <NEW_LINE> <INDENT> fs += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> res_id = r1.get_id() <NEW_LINE> chain_id = r1.get_parent().get_id() <NEW_LINE> fs_map[(chain_id, res_id)] = fs <NEW_LINE> fs_list.append((r1, fs)) <NEW_LINE> fs_keys.append((chain_id, res_id)) <NEW_LINE> r1.xtra['EXP_CN'] = fs <NEW_LINE> <DEDENT> <DEDENT> AbstractPropertyMap.__init__(self, fs_map, fs_keys, fs_list)
|
Residue exposure as number of CA atoms around its CA atom.
|
62599041e76e3b2f99fd9cbd
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.