code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class UserLeavesAndHolidays(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> user = models.ForeignKey(User,on_delete=models.CASCADE) <NEW_LINE> leaveType = models.ForeignKey(LeavesAndHoliDays,on_delete=models.CASCADE) <NEW_LINE> startDate = models.DateField(blank=True) <NEW_LINE> endDate = models.DateField(blank=True) <NEW_LINE> description = models.CharField(max_length=50) <NEW_LINE> approved = models.BooleanField(default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s %s' % (self.user, self.leaveType)
|
a type of leave as served by a user or as applied by a user
|
625990278c3a8732951f74ba
|
class CityView(ModelViewSet): <NEW_LINE> <INDENT> queryset = City.objects.all() <NEW_LINE> permission_classes = [IsAdminOrReadOnly] <NEW_LINE> serializer_class = CityPlaceSerializer <NEW_LINE> filter_backends = [DjangoFilterBackend] <NEW_LINE> filter_fields = ['region']
|
View для городов
CRUD для администратора
Read-only для всех
|
6259902715baa72349462efb
|
@command_lib.PublicParser <NEW_LINE> class EggHuntCommand(command_lib.BasePublicCommand): <NEW_LINE> <INDENT> DEFAULT_PARAMS = params_lib.MergeParams( command_lib.BasePublicCommand.DEFAULT_PARAMS, { 'find_chance': 0.05, }) <NEW_LINE> def _Handle(self, channel: channel_pb2.Channel, user: user_pb2.User, message: Text) -> hype_types.CommandResponse: <NEW_LINE> <INDENT> if random.random() < self._params.find_chance: <NEW_LINE> <INDENT> item = inventory_lib.Create('HypeEgg', self._core, user, {}) <NEW_LINE> self._core.inventory.AddItem(user, item) <NEW_LINE> return '%s found a(n) %s' % (user, item.human_name)
|
Gotta find them all.
|
62599027a8ecb03325872180
|
@exporter <NEW_LINE> class SeqWordCharLabelDataFeed(ExampleDataFeed): <NEW_LINE> <INDENT> def __init__(self, examples, batchsz, **kwargs): <NEW_LINE> <INDENT> super(SeqWordCharLabelDataFeed, self).__init__(examples, batchsz, **kwargs) <NEW_LINE> <DEDENT> def _batch(self, i): <NEW_LINE> <INDENT> return self.examples.batch(i, self.batchsz, self.trim, self.vec_alloc, self.vec_shape)
|
Feed object for sequential prediction training data
|
625990271d351010ab8f4a79
|
class EchoCloudSubVideo(EchoCloudVideo): <NEW_LINE> <INDENT> def __init__(self, video_json, driver, hostname, group_name, alternative_feeds): <NEW_LINE> <INDENT> super(EchoCloudSubVideo, self).__init__( video_json, driver, hostname, alternative_feeds ) <NEW_LINE> self.group_name = group_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> if type(self._title) != str: <NEW_LINE> <INDENT> self._title = self._title.encode("utf-8") <NEW_LINE> <DEDENT> return "{} - {}".format(self.group_name, self._title)
|
Some video in echo360 cloud is multi-part and this represents it.
|
62599027507cdc57c63a5d0a
|
class TheRobot(Robot): <NEW_LINE> <INDENT> def initialize(self): <NEW_LINE> <INDENT> self._state = 'FORWARD' <NEW_LINE> self._fwdfor_ticks = None <NEW_LINE> self._rightfor_ticks = None <NEW_LINE> self._unstick_freq = 12 <NEW_LINE> self._unstickfor_ticks = None <NEW_LINE> <DEDENT> def respond(self): <NEW_LINE> <INDENT> self.controller() <NEW_LINE> tick = self.sensors['TICK'] <NEW_LINE> if not tick % 20: <NEW_LINE> <INDENT> self.log(tick, self._state) <NEW_LINE> <DEDENT> <DEDENT> def controller(self): <NEW_LINE> <INDENT> self.shoot() <NEW_LINE> if self._state == 'FORWARD': <NEW_LINE> <INDENT> if self._fwdfor_ticks is not None: <NEW_LINE> <INDENT> self.fwdfor() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = 'RIGHT' <NEW_LINE> self.rightfor(0.8) <NEW_LINE> <DEDENT> <DEDENT> elif self._state == 'RIGHT': <NEW_LINE> <INDENT> if self._rightfor_ticks is not None: <NEW_LINE> <INDENT> self.rightfor() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._unstick_freq -= 1 <NEW_LINE> if self._unstick_freq < 0: <NEW_LINE> <INDENT> self._state = 'UNSTICK' <NEW_LINE> self.unstickfor(1.6) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = 'FORWARD' <NEW_LINE> self.fwdfor(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif self._state == 'UNSTICK': <NEW_LINE> <INDENT> if self._unstickfor_ticks is not None: <NEW_LINE> <INDENT> self._unstick_freq = 12 <NEW_LINE> self.unstickfor() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = 'FORWARD' <NEW_LINE> self.fwdfor(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def shoot(self): <NEW_LINE> <INDENT> if not random.randrange(3): <NEW_LINE> <INDENT> self.fire() <NEW_LINE> <DEDENT> <DEDENT> def fwdfor(self, s=None): <NEW_LINE> <INDENT> if s is None: <NEW_LINE> <INDENT> t = self._fwdfor_ticks <NEW_LINE> t -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = 60 * s <NEW_LINE> <DEDENT> if t > 0: <NEW_LINE> <INDENT> self.force(100) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = None <NEW_LINE> self.force(0) <NEW_LINE> <DEDENT> self._fwdfor_ticks = t <NEW_LINE> return t <NEW_LINE> <DEDENT> def rightfor(self, s=None): <NEW_LINE> <INDENT> if s is None: <NEW_LINE> <INDENT> t = self._rightfor_ticks <NEW_LINE> t -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = 60 * s <NEW_LINE> <DEDENT> if t > 0: <NEW_LINE> <INDENT> self.torque(100) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = None <NEW_LINE> self.torque(0) <NEW_LINE> <DEDENT> self._rightfor_ticks = t <NEW_LINE> return t <NEW_LINE> <DEDENT> def unstickfor(self, s=None): <NEW_LINE> <INDENT> if s is None: <NEW_LINE> <INDENT> t = self._unstickfor_ticks <NEW_LINE> t -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log('unstick!') <NEW_LINE> t = 60 * s <NEW_LINE> <DEDENT> if t > 0: <NEW_LINE> <INDENT> self.force(-60) <NEW_LINE> self.torque(-100) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t = None <NEW_LINE> self.force(0) <NEW_LINE> self.torque(0) <NEW_LINE> <DEDENT> self._unstickfor_ticks = t <NEW_LINE> return t
|
Strategy:
Drive around in sort of a circle,
Shoot randomly.
|
625990276fece00bbaccc91c
|
class AutonomousSystem(_Observable): <NEW_LINE> <INDENT> _type = 'autonomous-system' <NEW_LINE> _properties = OrderedDict([ ('type', TypeProperty(_type, spec_version='2.1')), ('spec_version', StringProperty(fixed='2.1')), ('id', IDProperty(_type, spec_version='2.1')), ('number', IntegerProperty(required=True)), ('name', StringProperty()), ('rir', StringProperty()), ('object_marking_refs', ListProperty(ReferenceProperty(valid_types='marking-definition', spec_version='2.1'))), ('granular_markings', ListProperty(GranularMarking)), ('defanged', BooleanProperty(default=lambda: False)), ('extensions', ExtensionsProperty(spec_version='2.1')), ]) <NEW_LINE> _id_contributing_properties = ["number"]
|
For more detailed information on this object's properties, see
`the STIX 2.1 specification <https://docs.oasis-open.org/cti/stix/v2.1/os/stix-v2.1-os.html#_27gux0aol9e3>`__.
|
625990278c3a8732951f74bb
|
class RPNCaculator(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.stack = Stack() <NEW_LINE> <DEDENT> def push(self,item): <NEW_LINE> <INDENT> if not (isinstance(item,int) or isinstance(item,float) ): <NEW_LINE> <INDENT> raise TypeError('Give me a number please') <NEW_LINE> <DEDENT> self.stack.push(item) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if len(self.stack) ==0: <NEW_LINE> <INDENT> raise IndexError('The Stack is already empty') <NEW_LINE> <DEDENT> return self.stack.pop() <NEW_LINE> <DEDENT> def add(self): <NEW_LINE> <INDENT> if len(self.stack) < 2: <NEW_LINE> <INDENT> raise IndexError ('Less than 2 numbers inside the stack') <NEW_LINE> <DEDENT> a= self.stack.pop() <NEW_LINE> b = self.stack.pop() <NEW_LINE> c = b+a <NEW_LINE> self.stack.push(c) <NEW_LINE> <DEDENT> def sub(self): <NEW_LINE> <INDENT> if len(self.stack) < 2: <NEW_LINE> <INDENT> raise IndexError ('Less than 2 numbers inside the stack') <NEW_LINE> <DEDENT> a= self.stack.pop() <NEW_LINE> b = self.stack.pop() <NEW_LINE> c = b-a <NEW_LINE> self.stack.push(c) <NEW_LINE> <DEDENT> def mul(self): <NEW_LINE> <INDENT> if len(self.stack) < 2: <NEW_LINE> <INDENT> raise IndexError ('Less than 2 numbers inside the stack') <NEW_LINE> <DEDENT> a= self.stack.pop() <NEW_LINE> b = self.stack.pop() <NEW_LINE> c = b*a <NEW_LINE> self.stack.push(c) <NEW_LINE> <DEDENT> def div(self): <NEW_LINE> <INDENT> if len(self.stack) < 2: <NEW_LINE> <INDENT> raise IndexError ('Less than 2 numbers inside the stack') <NEW_LINE> <DEDENT> a= self.stack.pop() <NEW_LINE> b = self.stack.pop() <NEW_LINE> d = b%a <NEW_LINE> if d==0: <NEW_LINE> <INDENT> c = b//a <NEW_LINE> self.stack.push(c) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> c = b/a <NEW_LINE> self.stack.push(c) <NEW_LINE> <DEDENT> <DEDENT> def eval(self,task): <NEW_LINE> <INDENT> OPS = ['+','-','*', '/'] <NEW_LINE> if not isinstance(task, str): <NEW_LINE> <INDENT> raise TypeError(' Enter a string man') <NEW_LINE> <DEDENT> task = str.split(task) <NEW_LINE> for i in task: <NEW_LINE> <INDENT> if i not in OPS and i != ' ': <NEW_LINE> <INDENT> self.push(ast.literal_eval(i)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(self.stack)<2: <NEW_LINE> <INDENT> raise ValueError ('The string you enter might not be in correct RPN Form') <NEW_LINE> <DEDENT> if i == '+': <NEW_LINE> <INDENT> self.add() <NEW_LINE> <DEDENT> if i == '-': <NEW_LINE> <INDENT> self.sub() <NEW_LINE> <DEDENT> if i == '*': <NEW_LINE> <INDENT> self.mul() <NEW_LINE> <DEDENT> if i == '/': <NEW_LINE> <INDENT> self.div() <NEW_LINE> <DEDENT> <DEDENT> if len(self.stack) == 0: <NEW_LINE> <INDENT> raise ValueError ('The string you enter might not be in correct RPN Form') <NEW_LINE> <DEDENT> <DEDENT> return self.stack.top() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.stack)
|
Information assumptions:
Maintains a stack containing current RPN expression evaluation values;
these are always numbers, but never the operators from an RPN expression.
|
625990271f5feb6acb163b54
|
class PackedNode(frozenset, Node): <NEW_LINE> <INDENT> __slots__ = ()
|
PackedNodes hold other nodes, including in some cases PackedNodes.
Inheriting from frozenset prevents PackedNodes from duplicating
nodes but costs memory and means that nodes are returned in an
arbitrary order. It may be better to use a list instead.
(Alternately, I should check and see if Hettinger ever actually
added memory-efficient hash tables to set.) Note that frozensets
can be weak-referenced so I don't need to add __weakref__ to
slots.
|
6259902756b00c62f0fb3824
|
class Solution: <NEW_LINE> <INDENT> def insertionSortList(self, head): <NEW_LINE> <INDENT> dummy = ListNode(0) <NEW_LINE> cur = head <NEW_LINE> pre = dummy <NEW_LINE> next = None <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> pre = dummy <NEW_LINE> next = cur.next <NEW_LINE> while pre.next is not None and pre.next.val < cur.val: <NEW_LINE> <INDENT> pre = pre.next <NEW_LINE> <DEDENT> cur.next = pre.next <NEW_LINE> pre.next = cur <NEW_LINE> cur = next <NEW_LINE> <DEDENT> return dummy.next
|
@param head: The first node of linked list.
@return: The head of linked list.
|
62599027bf627c535bcb241b
|
class UnifiProtectSensor(UnifiProtectEntity, Entity): <NEW_LINE> <INDENT> def __init__(self, upv_object, protect_data, server_info, device_id, sensor): <NEW_LINE> <INDENT> super().__init__(upv_object, protect_data, server_info, device_id, sensor) <NEW_LINE> sensor_type = SENSOR_TYPES[sensor] <NEW_LINE> self._name = f"{sensor_type[_SENSOR_NAME]} {self._device_data['name']}" <NEW_LINE> self._units = sensor_type[_SENSOR_UNITS] <NEW_LINE> self._icons = sensor_type[_SENSOR_ICONS] <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> if self._device_type == DEVICE_TYPE_LIGHT: <NEW_LINE> <INDENT> return self._device_data["motion_mode"] <NEW_LINE> <DEDENT> return self._device_data["recording_mode"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> if self._device_type == DEVICE_TYPE_LIGHT: <NEW_LINE> <INDENT> icon_id = _ICON_ON if self.state != TYPE_RECORD_OFF else _ICON_OFF <NEW_LINE> return f"mdi:{self._icons[icon_id]}" <NEW_LINE> <DEDENT> icon_id = _ICON_ON if self.state != TYPE_RECORD_NEVER else _ICON_OFF <NEW_LINE> return f"mdi:{self._icons[icon_id]}" <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._units <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_class(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> attr = { ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION, ATTR_DEVICE_MODEL: self._model, } <NEW_LINE> if self._device_type == DEVICE_TYPE_LIGHT: <NEW_LINE> <INDENT> attr[ATTR_ENABLED_AT] = self._device_data["motion_mode_enabled_at"] <NEW_LINE> <DEDENT> return attr
|
A Ubiquiti Unifi Protect Sensor.
|
625990278c3a8732951f74bd
|
class InstructionError(Exception): <NEW_LINE> <INDENT> pass
|
Base exception type for all instruction related errors.
|
6259902715baa72349462eff
|
class FamilySchema(ma.ModelSchema): <NEW_LINE> <INDENT> samples = fields.Nested(SampleSchema, many=True, only=['id']) <NEW_LINE> subfamilies = fields.Nested('FamilySchema', many=True, only=['id', 'name', 'subfamilies', 'status']) <NEW_LINE> parents = fields.Nested('FamilySchema', many=True, only=['id', 'name']) <NEW_LINE> users = fields.Nested('UserSchema', many=True, only=["id", "nickname"]) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> fields = ('id', 'name', 'parent_id', 'subfamilies', 'samples', 'abstract', 'status', 'TLP_sensibility', 'users')
|
Schema for exporting by marshalling in JSON.
|
625990278a349b6b4368719e
|
class SecurityGroupNetworkInterface(Model): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'security_rule_associations': {'key': 'securityRuleAssociations', 'type': 'SecurityRuleAssociations'}, } <NEW_LINE> def __init__(self, *, id: str=None, security_rule_associations=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(SecurityGroupNetworkInterface, self).__init__(**kwargs) <NEW_LINE> self.id = id <NEW_LINE> self.security_rule_associations = security_rule_associations
|
Network interface and all its associated security rules.
:param id: ID of the network interface.
:type id: str
:param security_rule_associations:
:type security_rule_associations:
~azure.mgmt.network.v2018_08_01.models.SecurityRuleAssociations
|
62599027d164cc6175821edd
|
@ModuleDocstringParser <NEW_LINE> class DocStringExample: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', filename=None, filemode='w') <NEW_LINE> self.logger = logging.getLogger(self.__class__.__name__) <NEW_LINE> self.configuration_data = {}
|
- module: DocStringExample
string: # <default: "TestString"; type: string; is: optional>
int: # <default: 1; type: integer; is: optional>
dict: # <default: {'filed1': 'value1'}; type: dict; is: optional>
list: # <default: ['field2', 'field3']; type: list; is: optional>
none: # <default: None; type: None||string; is: optional>
bool: # <default: True; type: boolean; is: optional>
...
receivers:
- ModuleName
- ModuleAlias
|
625990279b70327d1c57fce8
|
class Visits(Metrics): <NEW_LINE> <INDENT> id = "visits" <NEW_LINE> name = "Visits" <NEW_LINE> desc = "Number of visits" <NEW_LINE> data_source = DownloadsDS <NEW_LINE> def _get_sql(self, evolutionary): <NEW_LINE> <INDENT> if self.filters.period != 'month': <NEW_LINE> <INDENT> msg = 'Period %s not valid. Currently, only "month" is supported' % self.filters.period <NEW_LINE> return ValueError(msg) <NEW_LINE> <DEDENT> fields = "SUM(visits) visits" <NEW_LINE> tables = "visits_month v" <NEW_LINE> filters = "" <NEW_LINE> query = self.db.BuildQuery(self.filters.period, self.filters.startdate, self.filters.enddate, " date ", fields, tables, filters, evolutionary) <NEW_LINE> return query
|
Number of visits
|
6259902756b00c62f0fb3826
|
class NoOutputAsyncMutator(BaseAsyncMutator): <NEW_LINE> <INDENT> def Format(self, args): <NEW_LINE> <INDENT> return 'none'
|
Base class for mutating subcommands that don't display resources.
|
62599027507cdc57c63a5d0e
|
class MuProcedure(UserDefinedProcedure): <NEW_LINE> <INDENT> def __init__(self, formals, body): <NEW_LINE> <INDENT> self.formals = formals <NEW_LINE> self.body = body <NEW_LINE> <DEDENT> "*** REPLACE THIS LINE ***" <NEW_LINE> def make_call_frame(self, args, env): <NEW_LINE> <INDENT> return env.make_child_frame(self.formals, args) <NEW_LINE> <DEDENT> LambdaProcedure.make_call_frame <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(Pair('mu', Pair(self.formals, self.body))) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'MuProcedure({0}, {1})'.format( repr(self.formals), repr(self.body))
|
A procedure defined by a mu expression, which has dynamic scope.
_________________
< Scheme is cool! >
-----------------
\ ^__^
\ (oo)\_______
(__)\ )\/ ||----w |
|| ||
|
625990278c3a8732951f74bf
|
class InstanceViewStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'level': {'key': 'level', 'type': 'str'}, 'display_status': {'key': 'displayStatus', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'time': {'key': 'time', 'type': 'iso-8601'}, } <NEW_LINE> def __init__( self, *, code: Optional[str] = None, level: Optional[Union[str, "StatusLevelTypes"]] = None, display_status: Optional[str] = None, message: Optional[str] = None, time: Optional[datetime.datetime] = None, **kwargs ): <NEW_LINE> <INDENT> super(InstanceViewStatus, self).__init__(**kwargs) <NEW_LINE> self.code = code <NEW_LINE> self.level = level <NEW_LINE> self.display_status = display_status <NEW_LINE> self.message = message <NEW_LINE> self.time = time
|
Instance view status.
:ivar code: The status code.
:vartype code: str
:ivar level: The level code. Possible values include: "Info", "Warning", "Error".
:vartype level: str or ~azure.mgmt.compute.v2018_10_01.models.StatusLevelTypes
:ivar display_status: The short localizable label for the status.
:vartype display_status: str
:ivar message: The detailed status message, including for alerts and error messages.
:vartype message: str
:ivar time: The time of the status.
:vartype time: ~datetime.datetime
|
6259902726238365f5fadab9
|
class ExponentialPdf(EquivUnary): <NEW_LINE> <INDENT> name = 'exponential_pdf' <NEW_LINE> ranking = ('gsl',) <NEW_LINE> tests = ( Test([1.2,0.1,0.5],mu=1.0) ** [0.30119421,0.90483742,0.60653066], ) <NEW_LINE> @staticmethod <NEW_LINE> def gsl(arg,mu=0.0,out=None): <NEW_LINE> <INDENT> arg = as_num_array(arg) <NEW_LINE> if not out: <NEW_LINE> <INDENT> out = arg.new() <NEW_LINE> <DEDENT> out[:] = exponential_pdf(arg,mu) <NEW_LINE> return out
|
Exponential probability distribution function
>>> func = ExponentialPdf().gsl
>>> assert allclose(func([1.2,0.1,0.5],mu=1.0),[0.30119421,0.90483742,0.60653066])
|
62599027d99f1b3c44d0660a
|
class ErrorMessages: <NEW_LINE> <INDENT> not_found = "Could not find SuperHub, please ensure the correct IP address is set"; <NEW_LINE> login_failed = "Could not login to SuperHub, password may be incorrect"; <NEW_LINE> firmware_warn = "Couldn't check firmware version, something must have went wrong with the login.";
|
Index of various exception error messages
|
62599027287bf620b6272b57
|
class Gtklock: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def __enter__(): <NEW_LINE> <INDENT> Gdk.threads_enter() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __exit__(*args): <NEW_LINE> <INDENT> Gdk.threads_leave()
|
A context manger for the Gdk.threads_*
can be used like this
>> with Gtklock:
>> pass
|
6259902773bcbd0ca4bcb1f8
|
@dataclass(frozen=True) <NEW_LINE> class TrajectoryParameters: <NEW_LINE> <INDENT> turning_radius: float <NEW_LINE> x_offset: float <NEW_LINE> y_offset: float <NEW_LINE> end_point: np.array <NEW_LINE> start_angle: float <NEW_LINE> end_angle: float <NEW_LINE> left_turn: bool <NEW_LINE> arc_start_point: float <NEW_LINE> arc_end_point: float <NEW_LINE> @property <NEW_LINE> def arc_length(self): <NEW_LINE> <INDENT> return self.turning_radius * angle_difference( self.start_angle, self.end_angle, self.left_turn ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def start_straight_length(self): <NEW_LINE> <INDENT> return np.linalg.norm(self.arc_start_point) <NEW_LINE> <DEDENT> @property <NEW_LINE> def end_straight_length(self): <NEW_LINE> <INDENT> return np.linalg.norm(self.end_point - self.arc_end_point) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_length(self): <NEW_LINE> <INDENT> return self.arc_length + self.start_straight_length + self.end_straight_length <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def no_arc(end_point, start_angle, end_angle): <NEW_LINE> <INDENT> return TrajectoryParameters( turning_radius=0.0, x_offset=0.0, y_offset=0.0, end_point=end_point, start_angle=start_angle, end_angle=end_angle, left_turn=True, arc_start_point=end_point, arc_end_point=end_point, )
|
A dataclass that holds the data needed to create the path for a trajectory.
turning_radius: The radius of the circle used to generate
the arc of the path
x_offset: The x coordinate of the circle used to generate
the arc of the path
y_offset: They y coordinate of the circle used to generate
the arc of the path
end_point: The end coordinate of the path
start_angle: The starting angle of the path
- given in radians from -pi to pi where 0 radians is along
the positive x axis
end_angle: The end angle of the path
- given in radians from -pi to pi where 0 radians is along
the positive x axis
left_turn: Whether the arc in the path turns to the left
arc_start_point: Coordinates of the starting position of the arc
arc_end_point: Coordinates of the ending position of the arc
|
62599027d164cc6175821edf
|
class GetCommEventLogRequest(ModbusRequest): <NEW_LINE> <INDENT> function_code = 0x0c <NEW_LINE> _rtu_frame_size = 4 <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> ModbusRequest.__init__(self, **kwargs) <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> return b'' <NEW_LINE> <DEDENT> def decode(self, data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> results = { 'status': True, 'message_count': _MCB.Counter.BusMessage, 'event_count': _MCB.Counter.Event, 'events': _MCB.get_event(), } <NEW_LINE> return GetCommEventLogResponse(**results) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'GetCommEventLogRequest({0})'.format(self.function_code)
|
This function code is used to get a status word, event count, message
count, and a field of event bytes from the remote device.
The status word and event counts are identical to that returned by
the Get Communications Event Counter function (11, 0B hex).
The message counter contains the quantity of messages processed by the
remote device since its last restart, clear counters operation, or
power-up. This count is identical to that returned by the Diagnostic
function (code 08), sub-function Return Bus Message Count (code 11,
0B hex).
The event bytes field contains 0-64 bytes, with each byte corresponding
to the status of one MODBUS send or receive operation for the remote
device. The remote device enters the events into the field in
chronological order. Byte 0 is the most recent event. Each new byte
flushes the oldest byte from the field.
|
625990278c3a8732951f74c0
|
class LSTMModel(nn.Module): <NEW_LINE> <INDENT> def __init__(self, ntoken, ninp, nhid, nlayers, dropout=0.5): <NEW_LINE> <INDENT> super(LSTMModel, self).__init__() <NEW_LINE> self.drop = nn.Dropout(dropout) <NEW_LINE> self.encoder = nn.Embedding(ntoken, ninp) <NEW_LINE> self.rnn = nn.LSTM(ninp, nhid, nlayers, dropout=dropout) <NEW_LINE> self.decoder = nn.Linear(nhid, ntoken) <NEW_LINE> self.init_weights() <NEW_LINE> self.nhid = nhid <NEW_LINE> self.nlayers = nlayers <NEW_LINE> <DEDENT> def init_weights(self): <NEW_LINE> <INDENT> initrange = 0.1 <NEW_LINE> self.encoder.weight.data.uniform_(-initrange, initrange) <NEW_LINE> self.decoder.bias.data.zero_() <NEW_LINE> self.decoder.weight.data.uniform_(-initrange, initrange) <NEW_LINE> <DEDENT> def forward(self, input, hidden): <NEW_LINE> <INDENT> emb = self.drop(self.encoder(input)) <NEW_LINE> output, hidden = self.rnn(emb, hidden) <NEW_LINE> output = self.drop(output) <NEW_LINE> decoded = self.decoder(output) <NEW_LINE> return decoded, hidden <NEW_LINE> <DEDENT> def init_hidden(self, bsz): <NEW_LINE> <INDENT> weight = next(self.parameters()) <NEW_LINE> return (weight.new_zeros(self.nlayers, bsz, self.nhid), weight.new_zeros(self.nlayers, bsz, self.nhid))
|
Container module with an encoder, a recurrent module, and a decoder.
|
625990275e10d32532ce40b8
|
class RandomSampler(Sampler): <NEW_LINE> <INDENT> def __init__(self, data_source): <NEW_LINE> <INDENT> self.num_samples = len(data_source) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(torch.randperm(self.num_samples).long()) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.num_samples
|
Samples elements randomly, without replacement.
Arguments:
data_source (Dataset): dataset to sample from
|
625990271d351010ab8f4a7f
|
class TestResult(UuidMixin, TimeStampedModel): <NEW_LINE> <INDENT> NEW = 'new' <NEW_LINE> IN_PROGRESS = 'in_progress' <NEW_LINE> REQUIRES_REVIEW = 'requires_review' <NEW_LINE> DONE = 'done' <NEW_LINE> STATUS_CHOICES = ( (NEW, _('New')), (IN_PROGRESS, _('In progress')), (REQUIRES_REVIEW, _('Required review')), (DONE, _('Done')), ) <NEW_LINE> test = models.ForeignKey( Test, verbose_name=Test._meta.verbose_name, related_name='solved_tests', on_delete=models.CASCADE ) <NEW_LINE> user = models.ForeignKey( User, verbose_name=User._meta.verbose_name, related_name='tests', on_delete=models.CASCADE ) <NEW_LINE> result = models.DecimalField( max_digits=6, decimal_places=3, verbose_name=_('result'), null=True, blank=True ) <NEW_LINE> start_time = models.DateTimeField(_('start time'), null=True, blank=True) <NEW_LINE> end_time = models.DateTimeField(_('end time'), null=True, blank=True) <NEW_LINE> status = models.CharField( _('status'), max_length=20, choices=STATUS_CHOICES, default=NEW ) <NEW_LINE> uuid = models.CharField( max_length=36, unique=True, db_index=True, default=generate_uuid, verbose_name=_('Unique Identifier'), help_text=_("The unique identifier for this object")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('test answers') <NEW_LINE> verbose_name_plural = _('test answers') <NEW_LINE> ordering = ('pk',) <NEW_LINE> unique_together = ('test', 'user') <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.test.name <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> if self.status != self.NEW: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> self.start_time = timezone.now() <NEW_LINE> self.status = self.IN_PROGRESS <NEW_LINE> self.save(update_fields=['start_time', 'status']) <NEW_LINE> return <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> if self.status != self.IN_PROGRESS: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> self.end_time = timezone.now() <NEW_LINE> if self.test.questions.filter(manual_check=True).exists(): <NEW_LINE> <INDENT> self.status = self.REQUIRES_REVIEW <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.status = self.DONE <NEW_LINE> <DEDENT> self.calculate_results() <NEW_LINE> self.save(update_fields=['end_time', 'status']) <NEW_LINE> return self <NEW_LINE> <DEDENT> def calculate_results(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> correct_ans = self.answers.filter(correct=True).count() <NEW_LINE> all_ans = self.test.questions.count() <NEW_LINE> self.result = round(100. / (all_ans / correct_ans), 2) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> self.result = 0 <NEW_LINE> <DEDENT> self.save(update_fields=['result']) <NEW_LINE> <DEDENT> @property <NEW_LINE> def str_status(self): <NEW_LINE> <INDENT> return dict((t[0], t[1][:]) for t in self.STATUS_CHOICES)[self.status] <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_new(self): <NEW_LINE> <INDENT> return self.status == self.NEW <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_in_progress(self): <NEW_LINE> <INDENT> return self.status == self.IN_PROGRESS <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_done(self): <NEW_LINE> <INDENT> return self.status == self.DONE <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'{self.test.name} - {self.user.get_full_name()}' <NEW_LINE> <DEDENT> @property <NEW_LINE> def correct_answers(self): <NEW_LINE> <INDENT> return self.answers.filter(correct=True)
|
Test Answers model
|
6259902766673b3332c31357
|
class url_FormTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.factory = RequestFactory() <NEW_LINE> <DEDENT> def test_valid_form(self): <NEW_LINE> <INDENT> data = {'url':'http://www.google.com'} <NEW_LINE> form = UploadFileForm(data=data) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> <DEDENT> def test_invalid_form(self): <NEW_LINE> <INDENT> data = { 'url': 'Some fake url', } <NEW_LINE> form = UploadFileForm(data=data) <NEW_LINE> self.assertFalse(form.is_valid()) <NEW_LINE> <DEDENT> def test_getmethod(self): <NEW_LINE> <INDENT> request = self.factory.get('/') <NEW_LINE> response = pdf_converter(request) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_postmethod(self): <NEW_LINE> <INDENT> request = self.factory.post('/', {"url": "http://cricinfo.com"}) <NEW_LINE> response = pdf_converter(request) <NEW_LINE> self.assertEqual(response.status_code, 200)
|
Basic tests to check the form validity
|
625990273eb6a72ae038b5cd
|
class DuckTypeEnforcer(tuple): <NEW_LINE> <INDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> return super(DuckTypeEnforcer, cls).__new__(cls, args)
|
An immutable iterable of required attributes to pass a duck typing test.
Literally, this is a wrapper class that passes init arguments to tuple.
|
6259902721a7993f00c66ee6
|
class InlineResponse2009(object): <NEW_LINE> <INDENT> swagger_types = { 'fonenumbers': 'list[FoneNumber]' } <NEW_LINE> attribute_map = { 'fonenumbers': 'fonenumbers' } <NEW_LINE> def __init__(self, fonenumbers=None): <NEW_LINE> <INDENT> self._fonenumbers = None <NEW_LINE> if fonenumbers is not None: <NEW_LINE> <INDENT> self.fonenumbers = fonenumbers <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def fonenumbers(self): <NEW_LINE> <INDENT> return self._fonenumbers <NEW_LINE> <DEDENT> @fonenumbers.setter <NEW_LINE> def fonenumbers(self, fonenumbers): <NEW_LINE> <INDENT> self._fonenumbers = fonenumbers <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InlineResponse2009): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
625990270a366e3fb87dd952
|
class ReplyConsumer(Consumer): <NEW_LINE> <INDENT> def start(self, listener, watchdog=None): <NEW_LINE> <INDENT> self.listener = listener <NEW_LINE> self.watchdog = watchdog or LazyDog() <NEW_LINE> self.blacklist = set() <NEW_LINE> Consumer.start(self) <NEW_LINE> <DEDENT> def dispatch(self, envelope): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> reply = Reply(envelope) <NEW_LINE> if envelope.sn in self.blacklist: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if reply.started(): <NEW_LINE> <INDENT> self.watchdog.started(envelope.sn) <NEW_LINE> reply = Started(envelope) <NEW_LINE> reply.notify(self.listener) <NEW_LINE> return <NEW_LINE> <DEDENT> if reply.progress(): <NEW_LINE> <INDENT> self.watchdog.progress(envelope.sn) <NEW_LINE> reply = Progress(envelope) <NEW_LINE> reply.notify(self.listener) <NEW_LINE> return <NEW_LINE> <DEDENT> if reply.succeeded(): <NEW_LINE> <INDENT> self.blacklist.add(envelope.sn) <NEW_LINE> self.watchdog.completed(envelope.sn) <NEW_LINE> reply = Succeeded(envelope) <NEW_LINE> reply.notify(self.listener) <NEW_LINE> return <NEW_LINE> <DEDENT> if reply.failed(): <NEW_LINE> <INDENT> self.blacklist.add(envelope.sn) <NEW_LINE> self.watchdog.completed(envelope.sn) <NEW_LINE> reply = Failed(envelope) <NEW_LINE> reply.notify(self.listener) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.exception(envelope)
|
A request, reply consumer.
@ivar listener: An reply listener.
@type listener: any
@ivar watchdog: An (optional) watchdog.
@type watchdog: L{WatchDog}
@ivar blacklist: A set of serial numbers to ignore.
@type blacklist: set
|
6259902773bcbd0ca4bcb1fa
|
class DeleteException(S3POException): <NEW_LINE> <INDENT> pass
|
An error while deleting
|
62599027ac7a0e7691f73453
|
class RandomForest(Classifier): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(RandomForest, self).__init__() <NEW_LINE> self.clf = RandomForestClassifier(**kwargs) <NEW_LINE> <DEDENT> def train(self, X, Y): <NEW_LINE> <INDENT> self.clf.fit(X, Y) <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> y_hats = self.clf.predict(X) <NEW_LINE> return y_hats
|
The random forest classifier
|
625990278c3a8732951f74c2
|
class NinjaAnt(Ant): <NEW_LINE> <INDENT> name = 'Ninja' <NEW_LINE> damage = 1 <NEW_LINE> food_cost = 6 <NEW_LINE> implemented = True <NEW_LINE> blocks_path = False <NEW_LINE> def action(self, colony): <NEW_LINE> <INDENT> for bee in self.place.bees[:]: <NEW_LINE> <INDENT> bee.reduce_armor(self.damage)
|
NinjaAnt does not block the path and damages all bees in its place.
|
625990279b70327d1c57fcec
|
class UserBehavior(TaskSet): <NEW_LINE> <INDENT> def on_start(self): <NEW_LINE> <INDENT> self.login() <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> self.client.post('/login', { 'usernmae': 'ellen_key', 'password': 'education' }) <NEW_LINE> <DEDENT> @task(2) <NEW_LINE> def index(self): <NEW_LINE> <INDENT> self.client.get('/') <NEW_LINE> <DEDENT> @task(1) <NEW_LINE> def profile(self): <NEW_LINE> <INDENT> self.client.get('/profile') <NEW_LINE> <DEDENT> @task(1) <NEW_LINE> def my_task(self): <NEW_LINE> <INDENT> print("Locust instance (%r) executing my_task" % (self.locust))
|
"task set
|
62599027a8ecb03325872188
|
class ParameterizedJobProfilerParameter(dbmodels.Model): <NEW_LINE> <INDENT> parameterized_job_profiler = dbmodels.ForeignKey(ParameterizedJobProfiler) <NEW_LINE> parameter_name = dbmodels.CharField(max_length=255) <NEW_LINE> parameter_value = dbmodels.TextField() <NEW_LINE> parameter_type = dbmodels.CharField( max_length=8, choices=model_attributes.ParameterTypes.choices()) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'afe_parameterized_job_profiler_parameters' <NEW_LINE> unique_together = ('parameterized_job_profiler', 'parameter_name') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s - %s' % (self.parameterized_job_profiler.profiler.name, self.parameter_name)
|
A parameter for a profiler in a parameterized job
|
62599027796e427e5384f6e8
|
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class BaseHTTPMixin(stethoscope.configurator.Configurator): <NEW_LINE> <INDENT> config_keys = ( 'URL', ) <NEW_LINE> def _process_arguments(self, payload, **kwargs): <NEW_LINE> <INDENT> url = self.config['URL'] <NEW_LINE> content = json.dumps(payload, default=stethoscope.utils.json_serialize_datetime) <NEW_LINE> headers = kwargs.get('headers', {}) <NEW_LINE> headers.setdefault('Content-Type', 'application/json') <NEW_LINE> headers.setdefault('User-Agent', 'Stethoscope') <NEW_LINE> kwargs['headers'] = headers <NEW_LINE> default_timeout = self.config.get('TIMEOUT', self.config.get('DEFAULT_TIMEOUT')) <NEW_LINE> kwargs.setdefault('timeout', default_timeout) <NEW_LINE> logger.debug("posting\n to: {!s}\n kwargs: {!s}\n content: {!r}", url, kwargs, content) <NEW_LINE> return url, content, kwargs
|
Abstract plugin base class for implementing methods to POST to arbitrary HTTP endpoints.
|
625990271d351010ab8f4a81
|
class InsertDimensionRequest(TypedDict): <NEW_LINE> <INDENT> inheritFromBefore: bool <NEW_LINE> range: DimensionRange
|
Inserts rows or columns in a sheet at a particular index.
|
62599027ac7a0e7691f73455
|
class csvUnicodeHandler(object): <NEW_LINE> <INDENT> def __init__(self, f, encoding=ENCODING, **kwds): <NEW_LINE> <INDENT> self.queue = cStringIO.StringIO() <NEW_LINE> self.writer = csv.writer(self.queue, delimiter=SEP, lineterminator=EOL, **kwds) <NEW_LINE> self.stream = f <NEW_LINE> self.encoder = codecs.getincrementalencoder(encoding)() <NEW_LINE> <DEDENT> def writerow(self, row): <NEW_LINE> <INDENT> self.writer.writerow([s.encode("utf-8") for s in row]) <NEW_LINE> data = self.queue.getvalue() <NEW_LINE> data = data.decode("utf-8") <NEW_LINE> data = self.encoder.encode(data) <NEW_LINE> self.stream.write(data) <NEW_LINE> self.queue.truncate(0) <NEW_LINE> <DEDENT> def writerows(self, rows): <NEW_LINE> <INDENT> for row in rows: <NEW_LINE> <INDENT> self.writerow(row)
|
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
|
62599027a4f1c619b294f560
|
class Chapter(SQLAlchemyObjectType): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = ChapterModel <NEW_LINE> interfaces = (graphene.relay.Node,)
|
Chapter node.
|
62599027bf627c535bcb2423
|
class AttachmentCountNode(BaseAttachmentNode): <NEW_LINE> <INDENT> def get_context_value_from_queryset(self, context, qs): <NEW_LINE> <INDENT> return qs.count()
|
Insert a count of attachments into the context
|
62599027d164cc6175821ee4
|
class Discriminator(nn.Module): <NEW_LINE> <INDENT> def __init__(self, z_dim=10): <NEW_LINE> <INDENT> super(Discriminator, self).__init__() <NEW_LINE> self.z_dim = z_dim <NEW_LINE> self.net = nn.Sequential( nn.Linear(z_dim, 512), nn.ReLU(True), nn.Linear(512, 512), nn.ReLU(True), nn.Linear(512, 1), nn.Sigmoid() ) <NEW_LINE> self.weight_init() <NEW_LINE> <DEDENT> def weight_init(self): <NEW_LINE> <INDENT> for block in self._modules: <NEW_LINE> <INDENT> for m in self._modules[block]: <NEW_LINE> <INDENT> kaiming_init(m) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self, z): <NEW_LINE> <INDENT> return self.net(z)
|
Adversary architecture(Discriminator) for WAE-GAN.
|
625990276e29344779b015bd
|
class _BroadcastSignal(Signal): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def emit(self, **args): <NEW_LINE> <INDENT> raise TypeError('emitting broadcast signals is unsupported')
|
Special broadcast signal. Connect to it to be notified about
all signals. This signal is automatically send with each other
signal.
|
625990276fece00bbaccc926
|
class TradingModel: <NEW_LINE> <INDENT> def __init__(self, trade_simulator): <NEW_LINE> <INDENT> self.trade_simulator = trade_simulator <NEW_LINE> <DEDENT> def simulate_trades(self, df, feature_columns): <NEW_LINE> <INDENT> return self.trade_simulator(df, feature_columns)
|
A TradingModel converts market data into trades and trade outcomes.
Args:
trade_simulator (DataFrame => DataFrame): A function that takes the input
data and returns that data frame with metrics attached to it.
The function should apply the 'outcome' and 'trading_action' columns
to the data frame.
|
625990271f5feb6acb163b5e
|
class Shipping(CRUDObjectItem): <NEW_LINE> <INDENT> pass
|
Shipping object
|
62599027d99f1b3c44d06610
|
class DataClientListener(RedisListener): <NEW_LINE> <INDENT> subscribe_channel = 'client-data:*' <NEW_LINE> def run(self): <NEW_LINE> <INDENT> pubsub = self.client.pubsub() <NEW_LINE> pubsub.psubscribe(self.channel) <NEW_LINE> for item in pubsub.listen(): <NEW_LINE> <INDENT> if item['type'] == 'pmessage': <NEW_LINE> <INDENT> data = item['data'].decode() <NEW_LINE> channel = item['channel'].decode() <NEW_LINE> print(f"Data from {channel}:\n\t{data}")
|
For receiving data from client
|
6259902773bcbd0ca4bcb1fe
|
class Source(source.Source): <NEW_LINE> <INDENT> def get_historical_price(self, ticker, date): <NEW_LINE> <INDENT> commodity, currency = ticker.split(':') <NEW_LINE> trade_date = datetime.combine(date, datetime.max.time()) <NEW_LINE> trade_date = trade_date.replace(tzinfo=pytz.UTC) <NEW_LINE> ts = int(time.mktime(trade_date.timetuple())) <NEW_LINE> url = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym={}&tsyms={}&ts={}'.format(commodity, currency, ts) <NEW_LINE> logging.info("Fetching %s", url) <NEW_LINE> try: <NEW_LINE> <INDENT> response = net_utils.retrying_urlopen(url) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> response = response.read().decode('utf-8').strip() <NEW_LINE> response = json.loads(response) <NEW_LINE> <DEDENT> except error.HTTPError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> price = D(response[commodity][currency]).quantize(D('1.000000000000000000')) <NEW_LINE> return source.SourcePrice(D('0') if price == 0 else price, trade_date, currency) <NEW_LINE> <DEDENT> def get_latest_price(self, ticker): <NEW_LINE> <INDENT> commodity, currency = ticker.split(':') <NEW_LINE> url = 'https://min-api.cryptocompare.com/data/price?fsym={}&tsyms={}'.format(commodity, currency) <NEW_LINE> logging.info("Fetching %s", url) <NEW_LINE> try: <NEW_LINE> <INDENT> response = net_utils.retrying_urlopen(url) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> response = response.read().decode('utf-8').strip() <NEW_LINE> response = json.loads(response) <NEW_LINE> <DEDENT> except error.HTTPError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> price = D(response[currency]).quantize(D('1.000000000000000000')) <NEW_LINE> trade_date = datetime.now() <NEW_LINE> trade_date = trade_date.replace(tzinfo=pytz.UTC) <NEW_LINE> return source.SourcePrice(D('0') if price == 0 else price, trade_date, currency)
|
CryptoCompare API price extractor.
|
6259902721a7993f00c66eec
|
class INRecordBase(TOP): <NEW_LINE> <INDENT> def __init__(self, API_KEY=None, APP_SECRET=None, ENVIRONMENT=None): <NEW_LINE> <INDENT> super(INRecordBase, self).__init__( API_KEY, APP_SECRET, ENVIRONMENT ) <NEW_LINE> self.models = {'date':TOPDate} <NEW_LINE> self.fields = ['pc','click','avg_price','competition','date', 'ctr']
|
数据信息对象
|
625990276e29344779b015bf
|
class Oscilloscope(Task): <NEW_LINE> <INDENT> def __init__(self, pipeline=None): <NEW_LINE> <INDENT> super(Oscilloscope, self).__init__() <NEW_LINE> self.pipeline = pipeline <NEW_LINE> <DEDENT> def prepare_graphics(self, container): <NEW_LINE> <INDENT> self.scope = SignalWidget() <NEW_LINE> container.set_widget(self.scope) <NEW_LINE> <DEDENT> def prepare_daq(self, daqstream): <NEW_LINE> <INDENT> self.daqstream = daqstream <NEW_LINE> self.connect(daqstream.updated, self.update) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.daqstream.start() <NEW_LINE> <DEDENT> def update(self, data): <NEW_LINE> <INDENT> if self.pipeline is not None: <NEW_LINE> <INDENT> data = self.pipeline.process(data) <NEW_LINE> <DEDENT> self.scope.plot(data) <NEW_LINE> <DEDENT> def key_press(self, key): <NEW_LINE> <INDENT> if key == util.key_return: <NEW_LINE> <INDENT> self.daqstream.stop() <NEW_LINE> self.finish()
|
A visualizer for data acquisition devices.
This task connects to the experiment input DAQ and displays each of its
channels on a separate plot. You can optionally pass a :class:`Pipeline`
object to preprocess the input data before displaying it.
Parameters
----------
pipeline : Pipeline, optional
Pipeline to run the input data through before displaying it. Often this
is some preprocessing like filtering. It is often useful to use a
:class:`Windower` in the pipeline to display a larger chunk of data
than is given on each input update of the DAQ. This gives a "scrolling"
view of the input data, which can be helpful for experiment setup (e.g.
placing electrodes, making sure the device is recording properly,
etc.).
|
62599027287bf620b6272b5f
|
class FastFeatureImportance: <NEW_LINE> <INDENT> def __init__(self, estimator): <NEW_LINE> <INDENT> self.estimator = estimator <NEW_LINE> self.importance = [] <NEW_LINE> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> features = list(range(X.shape[1])) <NEW_LINE> length = len(X) <NEW_LINE> self.estimator.fit(X, y) <NEW_LINE> base_score = self.estimator.score(X, y) <NEW_LINE> if isinstance(X, pd.DataFrame): <NEW_LINE> <INDENT> X = X.values <NEW_LINE> <DEDENT> for feature in features: <NEW_LINE> <INDENT> temp = X <NEW_LINE> temp[:, feature] = np.random.normal(0, 0.1, length) <NEW_LINE> self.estimator.fit(temp, y) <NEW_LINE> importance = round(abs(base_score - self.estimator.score(temp, y)), 5) <NEW_LINE> self.importance.append(importance) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def importance_(self): <NEW_LINE> <INDENT> return self.importance
|
Finding feature importance in O(n) by adding random data to features one at a time
|
625990270a366e3fb87dd958
|
class OutputText(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.locked = 0 <NEW_LINE> self.pending = [] <NEW_LINE> <DEDENT> def lock(self): <NEW_LINE> <INDENT> self.locked += 1 <NEW_LINE> <DEDENT> def unlock(self): <NEW_LINE> <INDENT> self.locked -= 1 <NEW_LINE> if self.locked <= 0: <NEW_LINE> <INDENT> for msg in self.pending: <NEW_LINE> <INDENT> self.add_message(msg, self.config.b_allow_auto_scroll) <NEW_LINE> <DEDENT> self.pending = [] <NEW_LINE> self.locked = 0 <NEW_LINE> <DEDENT> <DEDENT> def clear(self, source="", target="", target_display="", source_img="", target_img=""): <NEW_LINE> <INDENT> self.pending = [] <NEW_LINE> <DEDENT> def send_message(self, msg): <NEW_LINE> <INDENT> self.append(msg) <NEW_LINE> <DEDENT> def receive_message(self, msg): <NEW_LINE> <INDENT> self.append(msg) <NEW_LINE> <DEDENT> def information(self, msg): <NEW_LINE> <INDENT> self.append(msg) <NEW_LINE> <DEDENT> def append(self, msg): <NEW_LINE> <INDENT> if self.locked and msg.type != e3.Message.TYPE_OLDMSG: <NEW_LINE> <INDENT> self.pending.append(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.add_message(msg, self.config.b_allow_auto_scroll) <NEW_LINE> <DEDENT> <DEDENT> def add_message(self, msg, scroll): <NEW_LINE> <INDENT> raise NotImplementedError("Method not implemented") <NEW_LINE> <DEDENT> def update_p2p(self, account, _type, *what): <NEW_LINE> <INDENT> raise NotImplementedError("Method not implemented")
|
Base class to display conversation messages
|
625990278c3a8732951f74c8
|
class CountryMiddleware(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> new_ip_address = get_real_ip(request) <NEW_LINE> old_ip_address = request.session.get('ip_address', None) <NEW_LINE> if not new_ip_address and old_ip_address: <NEW_LINE> <INDENT> del request.session['ip_address'] <NEW_LINE> del request.session['country_code'] <NEW_LINE> <DEDENT> elif new_ip_address != old_ip_address: <NEW_LINE> <INDENT> if new_ip_address.find(':') >= 0: <NEW_LINE> <INDENT> country_code = pygeoip.GeoIP(settings.GEOIPV6_PATH).country_code_by_addr(new_ip_address) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> country_code = pygeoip.GeoIP(settings.GEOIP_PATH).country_code_by_addr(new_ip_address) <NEW_LINE> <DEDENT> request.session['country_code'] = country_code <NEW_LINE> request.session['ip_address'] = new_ip_address <NEW_LINE> log.debug('Country code for IP: %s is set to %s', new_ip_address, country_code)
|
Identify the country by IP address.
|
62599027c432627299fa3f63
|
class RegionInfo(object): <NEW_LINE> <INDENT> def __init__(self, connection=None, name=None, endpoint=None): <NEW_LINE> <INDENT> self.connection = connection <NEW_LINE> self.name = name <NEW_LINE> self.endpoint = endpoint <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'RegionInfo:%s' % self.name <NEW_LINE> <DEDENT> def startElement(self, name, attrs, connection): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def endElement(self, name, value, connection): <NEW_LINE> <INDENT> if name == 'regionName': <NEW_LINE> <INDENT> self.name = value <NEW_LINE> <DEDENT> elif name == 'regionEndpoint': <NEW_LINE> <INDENT> self.endpoint = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, name, value) <NEW_LINE> <DEDENT> <DEDENT> def connect(self, **kw_params): <NEW_LINE> <INDENT> from boto.ec2.connection import EC2Connection <NEW_LINE> return EC2Connection(region=self, **kw_params)
|
Represents an EC2 Region
|
625990276e29344779b015c1
|
class NBExtensionHandler(IPythonHandler): <NEW_LINE> <INDENT> @web.authenticated <NEW_LINE> def get(self): <NEW_LINE> <INDENT> yaml_list = [] <NEW_LINE> for root, dirs, files in chain.from_iterable( os.walk(nb_ext_dir, followlinks=True) for nb_ext_dir in nbextension_dirs): <NEW_LINE> <INDENT> dirs[:] = [d for d in dirs if d not in exclude] <NEW_LINE> for filename in files: <NEW_LINE> <INDENT> if filename.endswith('.yaml'): <NEW_LINE> <INDENT> yaml_list.append((root, filename)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> extension_list = [] <NEW_LINE> required_keys = ( 'Type', 'Compatibility', 'Name', 'Main', 'Description') <NEW_LINE> for ext_dir, yaml_filename in sorted(yaml_list): <NEW_LINE> <INDENT> with open(os.path.join(ext_dir, yaml_filename), 'r') as stream: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> extension = yaml.load(stream) <NEW_LINE> <DEDENT> except ScannerError: <NEW_LINE> <INDENT> self.log.warning( 'failed to load yaml file %r', yaml_filename) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> if any(key not in extension for key in required_keys): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not extension['Type'].strip().startswith( 'IPython Notebook Extension'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> compat = extension['Compatibility'].strip() <NEW_LINE> if not compat.startswith( notebook.__version__[:2]): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> idx = ext_dir.find('nbextensions') <NEW_LINE> url = ext_dir[idx::].replace('\\', '/') <NEW_LINE> extension['url'] = url <NEW_LINE> extension_list.append(extension) <NEW_LINE> self.log.info( "Found {} extension {}".format(compat, extension['Name'])) <NEW_LINE> <DEDENT> extension_list_json = json.dumps(extension_list).replace("'", "'") <NEW_LINE> self.write(self.render_template( 'nbextensions.html', base_url=self.base_url, extension_list=extension_list_json, page_title="Notebook Extension Configuration" ))
|
Render the notebook extension configuration interface.
|
625990278c3a8732951f74c9
|
class RegressionModel(PredictionModel, _PredictorParams): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta
|
Model produced by a ``Regressor``.
.. versionadded:: 3.0.0
|
62599027a4f1c619b294f566
|
class Solution: <NEW_LINE> <INDENT> def findRightInterval(self, intervals: List[List[int]]) -> List[int]: <NEW_LINE> <INDENT> invs = sorted((x[0], i) for i, x in enumerate(intervals)) <NEW_LINE> ans = [] <NEW_LINE> for x in intervals: <NEW_LINE> <INDENT> idx = bisect.bisect_right(invs, (x[1],)) <NEW_LINE> ans.append(invs[idx][1] if idx < len(invs) else -1) <NEW_LINE> <DEDENT> return ans
|
按照区间起点排序,然后二分查找
|
6259902766673b3332c31361
|
class Sentence(Sequence): <NEW_LINE> <INDENT> def __init__(self, words): <NEW_LINE> <INDENT> super(Sentence, self).__init__(1 + len(words)) <NEW_LINE> self._words[0] = ROOT <NEW_LINE> for i, word in enumerate(words): <NEW_LINE> <INDENT> self._words[i+1] = tuple(word.get(f, '_') for f in FEATURES) <NEW_LINE> self._heads[i+1] = int(word.get('head', 0)) <NEW_LINE> self._labels[i+1] = word.get('deprel', '_')
|
A Sentence is just a raw Sequence of words.
|
625990275166f23b2e244349
|
class SquareBrackets(TokenList): <NEW_LINE> <INDENT> M_OPEN = T.Punctuation, '[' <NEW_LINE> M_CLOSE = T.Punctuation, ']' <NEW_LINE> @property <NEW_LINE> def _groupable_tokens(self): <NEW_LINE> <INDENT> return self.tokens[1:-1]
|
Tokens between square brackets
|
62599027d164cc6175821eeb
|
@reversion.register() <NEW_LINE> class InstanceIncludeFile(models.Model): <NEW_LINE> <INDENT> instance = models.ForeignKey(CourseInstance) <NEW_LINE> exercises = models.ManyToManyField(ContentPage, blank=True, through='InstanceIncludeFileToExerciseLink', through_fields=('include_file', 'exercise')) <NEW_LINE> default_name = models.CharField(verbose_name='Default name', max_length=255) <NEW_LINE> description = models.TextField(blank=True, null=True) <NEW_LINE> fileinfo = models.FileField(max_length=255, upload_to=get_instancefile_path) <NEW_LINE> def get_file_contents(self): <NEW_LINE> <INDENT> file_contents = None <NEW_LINE> with open(self.fileinfo.path, 'rb') as f: <NEW_LINE> <INDENT> file_contents = f.read() <NEW_LINE> <DEDENT> return file_contents
|
A file that's linked to an instance and can be included in any exercise
that needs it. (File upload, code input, code replace, ...)
|
62599027ac7a0e7691f7345d
|
class SingleVote(models.Model): <NEW_LINE> <INDENT> vote_tally = models.ForeignKey(VoteTally, on_delete=models.CASCADE, related_name='votes') <NEW_LINE> legislator = models.ForeignKey(Legislator, on_delete=models.CASCADE, related_name='votes') <NEW_LINE> value = models.CharField(max_length=1, choices=constants.VOTE_CHOICES) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.legislator}, {self.vote_tally.bill}, vote={self.value}'
|
Single vote from from an individual legislator on an individual tally.
|
625990278e05c05ec3f6f616
|
class EmployeeViewSet(BaseModelViewSet): <NEW_LINE> <INDENT> queryset = Employee.objects.all() <NEW_LINE> serializer_class = EmployeeSerializer
|
API end point for Employee model
|
62599027a8ecb03325872193
|
class _memoized(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.cache = {} <NEW_LINE> self.nev = 0 <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> memoize = kwargs.pop('memoize', True) <NEW_LINE> nothashable = kwargs.pop('nothashable', 'ignore') <NEW_LINE> if memoize: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> index = () <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> index += tuple(arg) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> index += (arg, ) <NEW_LINE> <DEDENT> <DEDENT> for item in kwargs.values(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> index += (float(item), ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if index in self.cache: <NEW_LINE> <INDENT> return self.cache[index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self.func(*args, **kwargs) <NEW_LINE> self.nev += 1 <NEW_LINE> self.cache[index] = value <NEW_LINE> return value <NEW_LINE> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> if nothashable == 'raise': <NEW_LINE> <INDENT> raise TypeError('Not hashable: %s' % str(args)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('not hashable', args) <NEW_LINE> self.nev += 1 <NEW_LINE> return self.func(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.nev += 1 <NEW_LINE> return self.func(*args, **kwargs)
|
Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
Can be turned of by passing `memoize=False` when calling the function.
If the function arguments are not hashable, then no caching is attempted
and the function is evaluated at every call. An error can instead be raised
by passing `nothashable=raise` when calling the function.
|
62599027796e427e5384f6f2
|
class Meta(): <NEW_LINE> <INDENT> model = Account <NEW_LINE> fields = ['first_name', 'last_name', 'pic']
|
.
|
62599027be8e80087fbbffee
|
class PyDeeptools(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://pypi.io/packages/source/d/deepTools" <NEW_LINE> url = "https://pypi.io/packages/source/d/deepTools/deepTools-2.5.2.tar.gz" <NEW_LINE> version('3.2.1', sha256='ccbabb46d6c17c927e96fadc43d8d4770efeaf40b9bcba3b94915a211007378e') <NEW_LINE> version('2.5.2', 'ba8a44c128c6bb1ed4ebdb20bf9ae9c2') <NEW_LINE> depends_on('python@2.7:', type=('build', 'run')) <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-numpy@1.9.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-scipy@0.17.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-py2bit@0.2.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-pybigwig@0.2.1:', type=('build', 'run')) <NEW_LINE> depends_on('py-pysam@0.8.2:', type=('build', 'run')) <NEW_LINE> depends_on('py-matplotlib@1.4.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-numpydoc@0.5:', type=('build', 'run'))
|
deepTools addresses the challenge of handling the large amounts of data
that are now routinely generated from DNA sequencing centers.
|
62599027c432627299fa3f67
|
@register() <NEW_LINE> class aci_rename(crud.Update): <NEW_LINE> <INDENT> NO_CLI = True <NEW_LINE> has_output_params = ( Str('aci', label=_('ACI'), ), ) <NEW_LINE> takes_options = ( _prefix_option, Str('newname', doc=_('New ACI name'), ), ) <NEW_LINE> msg_summary = _('Renamed ACI to "%(value)s"') <NEW_LINE> def execute(self, aciname, **kw): <NEW_LINE> <INDENT> ldap = self.api.Backend.ldap2 <NEW_LINE> entry = ldap.get_entry(self.api.env.basedn, ['aci']) <NEW_LINE> acis = _convert_strings_to_acis(entry.get('aci', [])) <NEW_LINE> aci = _find_aci_by_name(acis, kw['aciprefix'], aciname) <NEW_LINE> for a in acis: <NEW_LINE> <INDENT> prefix, name = _parse_aci_name(a.name) <NEW_LINE> if _make_aci_name(prefix, kw['newname']) == a.name: <NEW_LINE> <INDENT> raise errors.DuplicateEntry() <NEW_LINE> <DEDENT> <DEDENT> newkw = _aci_to_kw(ldap, aci) <NEW_LINE> if 'selfaci' in newkw and newkw['selfaci'] == True: <NEW_LINE> <INDENT> kw['selfaci'] = True <NEW_LINE> <DEDENT> if 'aciname' in newkw: <NEW_LINE> <INDENT> del newkw['aciname'] <NEW_LINE> <DEDENT> newaci = _make_aci(ldap, None, kw['newname'], newkw) <NEW_LINE> self.api.Command['aci_del'](aciname, aciprefix=kw['aciprefix']) <NEW_LINE> result = self.api.Command['aci_add'](kw['newname'], **newkw)['result'] <NEW_LINE> if kw.get('raw', False): <NEW_LINE> <INDENT> result = dict(aci=unicode(newaci)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = _aci_to_kw(ldap, newaci) <NEW_LINE> <DEDENT> return dict( result=result, value=pkey_to_value(kw['newname'], kw), )
|
Rename an ACI.
|
625990271d351010ab8f4a8b
|
class DNSBLRejectedError(BaseError): <NEW_LINE> <INDENT> def message(self, request): <NEW_LINE> <INDENT> return ( "The IP address is being listed in one of DNSBL databases " + "and therefore rejected." ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "dnsbl_rejected" <NEW_LINE> <DEDENT> @property <NEW_LINE> def http_status(self): <NEW_LINE> <INDENT> return "422 Unprocessable Entity"
|
An :class:`Exception` class that will be raised if user request was
blocked due user IP address failed an DNSBL check.
|
62599027d18da76e235b7908
|
class JobInactivate(CreateView): <NEW_LINE> <INDENT> model = JobInactivated <NEW_LINE> template_name = 'jobs/job_inactivate_form.html' <NEW_LINE> form_class = JobInactivateForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> job = Job.objects.get(pk=self.kwargs['pk']) <NEW_LINE> form.instance.job = job <NEW_LINE> job.is_active = False <NEW_LINE> job.save() <NEW_LINE> if form.cleaned_data['send_email']: <NEW_LINE> <INDENT> context = { 'job_title': job.title, 'reason': form.cleaned_data['reason'], 'comment': form.cleaned_data['comment'] } <NEW_LINE> body = render_to_string('jobs/inactivate_job_email.txt', context) <NEW_LINE> email = EmailMessage( subject="[PyAr] Aviso de trabajo dado de baja", to=(job.company.owner.email, ), from_email=getattr(settings, "DEFAULT_FROM_EMAIL"), body=body ) <NEW_LINE> email.send() <NEW_LINE> <DEDENT> return super(JobInactivate, self).form_valid(form)
|
Inactivate Job by moderator
|
6259902791af0d3eaad3ad9d
|
class InfInut1(models.AbstractModel): <NEW_LINE> <INDENT> _description = textwrap.dedent(" %s" % (__doc__,)) <NEW_LINE> _name = 'nfe.40.infinut1' <NEW_LINE> _inherit = 'spec.mixin.nfe' <NEW_LINE> _generateds_type = 'infInutType1' <NEW_LINE> _concrete_rec_name = 'nfe40_Id' <NEW_LINE> nfe40_Id = fields.Char( string="Id") <NEW_LINE> nfe40_tpAmb = fields.Selection( TAMB, string="Identificação do Ambiente", xsd_required=True, help="Identificação do Ambiente:" "\n1 - Produção" "\n2 - Homologação") <NEW_LINE> nfe40_verAplic = fields.Char( string="Versão do Aplicativo que processou a NF", xsd_required=True, help="Versão do Aplicativo que processou a NF-e") <NEW_LINE> nfe40_cStat = fields.Char( string="Código do status da mensagem enviada", xsd_required=True) <NEW_LINE> nfe40_xMotivo = fields.Char( string="Descrição literal do status do serviço solicitado", xsd_required=True) <NEW_LINE> nfe40_cUF = fields.Selection( TCODUFIBGE, string="Código da UF que atendeu a solicitação", xsd_required=True) <NEW_LINE> nfe40_ano = fields.Char( string="Ano de inutilização da numeração") <NEW_LINE> nfe40_CNPJ = fields.Char( string="CNPJ do emitente") <NEW_LINE> nfe40_mod = fields.Selection( TMOD, string="Modelo da NF-e (55, etc.)") <NEW_LINE> nfe40_serie = fields.Char( string="Série da NF-e") <NEW_LINE> nfe40_nNFIni = fields.Char( string="Número da NF-e inicial") <NEW_LINE> nfe40_nNFFin = fields.Char( string="Número da NF-e final") <NEW_LINE> nfe40_dhRecbto = fields.Datetime( string="Data e hora de recebimento", xsd_required=True, help="Data e hora de recebimento, no formato AAAA-MM-DDTHH:MM:SS." "\nDeve ser preenchida com data e hora da gravação no" "\nBanco em caso de Confirmação. Em caso de Rejeição," "\ncom data e hora do recebimento do Pedido de" "\nInutilização.") <NEW_LINE> nfe40_nProt = fields.Char( string="Número do Protocolo de Status da NF", help="Número do Protocolo de Status da NF-e. 1 posição (1 –" "\nSecretaria de Fazenda Estadual 2 – Receita Federal);" "\n2 - código da UF - 2 posições ano; 10 seqüencial no" "\nano.")
|
Dados do Retorno do Pedido de Inutilização de Numeração da Nota Fiscal
Eletrônica
|
625990276fece00bbaccc92f
|
class StartUDDBInstanceRequestSchema(schema.RequestSchema): <NEW_LINE> <INDENT> fields = { "ProjectId": fields.Str(required=True, dump_to="ProjectId"), "Region": fields.Str(required=True, dump_to="Region"), "UDDBId": fields.Str(required=True, dump_to="UDDBId"), "Zone": fields.Str(required=True, dump_to="Zone"), }
|
StartUDDBInstance - 启动UDDB实例,开始提供服务。
如下状态的UDDB实例可以进行这个操作:
Shutoff: 已关闭
当请求返回成功之后,UDDB实例的状态变成"Starting"(启动中); 如果返回失败, UDDB实例状态保持不变 UDDB实例在启动过程中, 当UDDB实例启动成功之后, UDDB实例的状态变成"Running"(正常运行中); 如果启动过程中出现异常, 状态变成"Abnormal"(异常运行中), 或者"Shutoff"(已关闭)
|
62599027d10714528d69ee47
|
class CustomDomainPaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[CustomDomain]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CustomDomainPaged, self).__init__(*args, **kwargs)
|
A paging container for iterating over a list of :class:`CustomDomain <azure.mgmt.cdn.models.CustomDomain>` object
|
625990278a349b6b436871ae
|
class TestingNightlyJobRunner(NightlyJobRunner): <NEW_LINE> <INDENT> def get_job_providers(self): <NEW_LINE> <INDENT> return {name: provider for name, provider in getAdapters([api.portal.get(), getRequest(), self.log], ITestingNightlyJobProvider)}
|
Nightly job runner used for testing.
This runner *only* collects nightly job providers providing
ITestingNightlyJobProvider, and therefore ignores real job providers
that would otherwise interfere with testing conditions.
|
62599027ac7a0e7691f7345f
|
class Weapon(Record): <NEW_LINE> <INDENT> def __init__(self, name="", cost=0): <NEW_LINE> <INDENT> Record.__init__(self) <NEW_LINE> self.name = name <NEW_LINE> self.cost = cost <NEW_LINE> self.stats = collections.OrderedDict() <NEW_LINE> self.stats["Name"] = self.name <NEW_LINE> self.stats["Cost"] = self.cost <NEW_LINE> self.modes = [] <NEW_LINE> self.abilities = [] <NEW_LINE> <DEDENT> def parse(self, row, table): <NEW_LINE> <INDENT> self.name = row["Name"] <NEW_LINE> self.cost = int(row["Cost"]) <NEW_LINE> stats = ["Name", "Cost", "Range", "Type", "S", "AP", "D"] <NEW_LINE> for stat in stats: <NEW_LINE> <INDENT> self.stats[stat] = row[stat] <NEW_LINE> <DEDENT> self.abilities = [x.strip() for x in row["Abilities"].split("|")] <NEW_LINE> if "" in self.abilities: self.abilities.remove("") <NEW_LINE> abilities_str = ", ".join(self.abilities) <NEW_LINE> if len(abilities_str) == 0: abilities_str = "-" <NEW_LINE> self.stats["Abilities"] = abilities_str <NEW_LINE> pattern = "(.*)\\[.*\\]" <NEW_LINE> match = re.match(pattern, self.name) <NEW_LINE> if match: <NEW_LINE> <INDENT> base_name = match.group(1).strip() <NEW_LINE> if not base_name in table: <NEW_LINE> <INDENT> table[base_name] = Weapon(base_name, self.cost) <NEW_LINE> <DEDENT> table[base_name].modes.append(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> table[self.name] = self <NEW_LINE> <DEDENT> <DEDENT> def get_modes(self): <NEW_LINE> <INDENT> if len(self.modes) > 0: <NEW_LINE> <INDENT> return self.modes <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [self]
|
Weapon record.
Weapons are a bit like models in that there can be a number of alternate
profiles associated with a weapon name, but they are subtly different and a
little more complicated.
Assuming we encounter the weapon 'Missile Launcher [Krak]' in a row, and we
have not yet encountered a missile launcher variant, then the following
will end up in the table
{
...,
"Missile Launcher" : Weapon("Missile Launcher", <cost>)
}
and that weapon, in its 'modes' field, will have
[
Weapon("Missile Launcher [Krak]", <cost>)
]
and that (child) weapon will have the actual weapon profile. Subsequently
if we encounter "Missile Launcher [Frag]" in a row, nothing is added to the
table but we lookup the "Missile Launcher" record and add a new profile to
it:
[
Weapon("Missile Launcher [Krak]", <cost>),
Weapon("Missile Launcher [Frag]", <cost>)
]
On the other hand, a Weapon without multiple modes will appear in the table
as a normal record.
{
"Bolt Pistol": Weapon(...), # this is the real record
}
When looking up a weapon, one should always call get_modes() on it to
ensure you're looking at the real record(s) and not a dummy base weapon.
|
62599027925a0f43d25e8fbd
|
class CmdDumpDpProvider(gdb.Command): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(CmdDumpDpProvider, self).__init__("ovs_dump_dp_provider", gdb.COMMAND_DATA) <NEW_LINE> <DEDENT> def invoke(self, arg, from_tty): <NEW_LINE> <INDENT> dp_providers = get_global_variable('dpif_classes') <NEW_LINE> if dp_providers is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for dp_class in ForEachSHASH(dp_providers, typeobj="struct registered_dpif_class"): <NEW_LINE> <INDENT> print("(struct registered_dpif_class *) {}: " "(struct dpif_class *) 0x{:x} = {{type = {}, ...}}, " "refcount = {}". format(dp_class, int(dp_class['dpif_class']), dp_class['dpif_class']['type'].string(), dp_class['refcount']))
|
Dump all registered registered_dpif_class structures.
Usage: ovs_dump_dp_provider
|
625990278c3a8732951f74ce
|
class UpdateNetworkFirewalledServiceModel(object): <NEW_LINE> <INDENT> _names = { "access":'access', "allowed_ips":'allowedIps' } <NEW_LINE> def __init__(self, access=None, allowed_ips=None): <NEW_LINE> <INDENT> self.access = access <NEW_LINE> self.allowed_ips = allowed_ips <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dictionary(cls, dictionary): <NEW_LINE> <INDENT> if dictionary is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> access = dictionary.get('access') <NEW_LINE> allowed_ips = dictionary.get('allowedIps') <NEW_LINE> return cls(access, allowed_ips)
|
Implementation of the 'updateNetworkFirewalledService' model.
TODO: type model description here.
Attributes:
access (AccessEnum): A string indicating the rule for which IPs are
allowed to use the specified service. Acceptable values are
"blocked" (no remote IPs can access the service), "restricted"
(only whitelisted IPs can access the service), and "unrestriced"
(any remote IP can access the service). This field is required
allowed_ips (list of string): An array of whitelisted IPs that can
access the service. This field is required if "access" is set to
"restricted". Otherwise this field is ignored
|
6259902756b00c62f0fb3836
|
class SimpleSerializePrettyTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.factory = kmldom.KmlFactory_GetFactory() <NEW_LINE> self.kml = self.factory.CreateKml() <NEW_LINE> self.folder = self.factory.CreateFolder() <NEW_LINE> self.folder.set_name('folder') <NEW_LINE> self.placemark = self.factory.CreatePlacemark() <NEW_LINE> self.placemark.set_name('placemark') <NEW_LINE> self.folder.add_feature(self.placemark) <NEW_LINE> self.kml.set_feature(self.folder) <NEW_LINE> self.xml = kmldom.SerializePretty(self.kml) <NEW_LINE> <DEDENT> def testBasic(self): <NEW_LINE> <INDENT> assert self.xml <NEW_LINE> assert '<kml>\n' ' <Folder>\n' ' <name>folder</name>\n' ' <Placemark>\n' ' <name>placemark</name>\n' ' </Placemark>\n' ' </Folder>\n' '</kml>\n' == self.xml
|
This tests the SerializePretty() function.
|
62599027507cdc57c63a5d1d
|
class GGTeams(dict): <NEW_LINE> <INDENT> def __new__(cls): <NEW_LINE> <INDENT> if not '_the_instance' in cls.__dict__: <NEW_LINE> <INDENT> cls._the_instance = dict.__new__(cls) <NEW_LINE> for team in (2, 3): <NEW_LINE> <INDENT> cls._the_instance[team] = TeamManagement(team) <NEW_LINE> <DEDENT> <DEDENT> return cls._the_instance <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> item = int(item) <NEW_LINE> if not item in (2, 3): <NEW_LINE> <INDENT> raise KeyError('Team must be either 2 or 3 not "%s"' % item) <NEW_LINE> <DEDENT> return super(GGTeams, self).__getitem__(item) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> for team in self: <NEW_LINE> <INDENT> self[team].reset_levels()
|
Class to store the 2 teams
|
62599027be8e80087fbbfff0
|
class ContentSpacing: <NEW_LINE> <INDENT> xLeftLimit = 0 <NEW_LINE> yTopLimit = 0 <NEW_LINE> xRightLimit = 0 <NEW_LINE> yBottomLimit = 0 <NEW_LINE> xBorder = 7 <NEW_LINE> yBorder = 7 <NEW_LINE> xIncrement = 0 <NEW_LINE> yIncrement = 0 <NEW_LINE> xSpacing = 3 <NEW_LINE> ySpacing = 3 <NEW_LINE> photoTextSpacing = 6 <NEW_LINE> def __init__(self, mode, order): <NEW_LINE> <INDENT> if mode == PrintMode.TEXT_ONLY: <NEW_LINE> <INDENT> self.yIncrement = TextSize.h <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.yIncrement = PhotoSize.h <NEW_LINE> <DEDENT> self.yIncrement += self.ySpacing <NEW_LINE> if mode == PrintMode.PHOTO_ONLY: <NEW_LINE> <INDENT> self.xIncrement = PhotoSize.w <NEW_LINE> <DEDENT> elif mode == PrintMode.TEXT_ONLY: <NEW_LINE> <INDENT> self.xIncrement = TextSize.w <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.xIncrement = PhotoSize.w + self.photoTextSpacing + TextSize.w <NEW_LINE> <DEDENT> self.xIncrement += self.xSpacing <NEW_LINE> if order == PrintDirection.REVERSED: <NEW_LINE> <INDENT> self.xIncrement *= -1 <NEW_LINE> self.yIncrement *= -1 <NEW_LINE> <DEDENT> self.xLeftLimit = self.xBorder <NEW_LINE> self.yTopLimit = self.yBorder <NEW_LINE> self.xRightLimit = A4Size.w - self.xBorder - abs(self.xIncrement) <NEW_LINE> self.yBottomLimit = A4Size.h - self.yBorder - abs(self.yIncrement)
|
The most efficient spacing based on printing mode selected
|
625990273eb6a72ae038b5db
|
class BzrLibraryState(object): <NEW_LINE> <INDENT> def __init__(self, ui, trace): <NEW_LINE> <INDENT> self._ui = ui <NEW_LINE> self._trace = trace <NEW_LINE> self.cmdline_overrides = config.CommandLineStore() <NEW_LINE> self.config_stores = {} <NEW_LINE> self.started = False <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if not self.started: <NEW_LINE> <INDENT> self._start() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def _start(self): <NEW_LINE> <INDENT> self.cleanups = cleanup.ObjectWithCleanups() <NEW_LINE> if bzrlib.version_info[3] == 'final': <NEW_LINE> <INDENT> self.cleanups.add_cleanup( symbol_versioning.suppress_deprecation_warnings(override=True)) <NEW_LINE> <DEDENT> self._trace.__enter__() <NEW_LINE> self._orig_ui = bzrlib.ui.ui_factory <NEW_LINE> bzrlib.ui.ui_factory = self._ui <NEW_LINE> self._ui.__enter__() <NEW_LINE> self.saved_state = bzrlib.global_state <NEW_LINE> bzrlib.global_state = self <NEW_LINE> self.started = True <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> if exc_type is None: <NEW_LINE> <INDENT> for k, store in self.config_stores.iteritems(): <NEW_LINE> <INDENT> store.save_changes() <NEW_LINE> <DEDENT> <DEDENT> self.cleanups.cleanup_now() <NEW_LINE> trace._flush_stdout_stderr() <NEW_LINE> trace._flush_trace() <NEW_LINE> osutils.report_extension_load_failures() <NEW_LINE> self._ui.__exit__(None, None, None) <NEW_LINE> self._trace.__exit__(None, None, None) <NEW_LINE> ui.ui_factory = self._orig_ui <NEW_LINE> bzrlib.global_state = self.saved_state <NEW_LINE> return False
|
The state about how bzrlib has been configured.
This is the core state needed to make use of bzr. The current instance is
currently always exposed as bzrlib.global_state, but we desired to move
to a point where no global state is needed at all.
:ivar saved_state: The bzrlib.global_state at the time __enter__ was
called.
:ivar cleanups: An ObjectWithCleanups which can be used for cleanups that
should occur when the use of bzrlib is completed. This is initialised
in __enter__ and executed in __exit__.
|
625990276e29344779b015c7
|
class Operation(Expression, metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, *operands, **kwargs): <NEW_LINE> <INDENT> self._operands = operands <NEW_LINE> super().__init__(*operands, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def operands(self): <NEW_LINE> <INDENT> return self._operands <NEW_LINE> <DEDENT> @property <NEW_LINE> def args(self): <NEW_LINE> <INDENT> return self._operands
|
Base class for "operations"
Operations are Expressions that act algebraically on other expressions
(their "operands").
Operations differ from more general Expressions by the convention that the
arguments of the Operator are exactly the operands (which must be members
of the algebra!) Any other parameters (non-operands) that may be required
must be given as keyword-arguments.
|
62599027d164cc6175821eef
|
class Crawler(object): <NEW_LINE> <INDENT> def __init__(self, site, timeout, parallel=False): <NEW_LINE> <INDENT> self.site = site <NEW_LINE> self.timeout = timeout <NEW_LINE> self.parallel = parallel <NEW_LINE> self.queued = set() <NEW_LINE> if parallel: <NEW_LINE> <INDENT> self.queued_lock = Lock() <NEW_LINE> self.queue = Queue() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.queue = [] <NEW_LINE> <DEDENT> self.url_count = 0 <NEW_LINE> self.queue_url(site, site, None) <NEW_LINE> <DEDENT> def put_task(self, task): <NEW_LINE> <INDENT> if self.parallel: <NEW_LINE> <INDENT> self.queue.put(task) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.queue.append(task) <NEW_LINE> <DEDENT> <DEDENT> def get_task(self): <NEW_LINE> <INDENT> if self.parallel: <NEW_LINE> <INDENT> return self.queue.get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.queue.pop() <NEW_LINE> <DEDENT> <DEDENT> def task_done(self): <NEW_LINE> <INDENT> if self.parallel: <NEW_LINE> <INDENT> self.queue.task_done() <NEW_LINE> <DEDENT> <DEDENT> def all_done(self): <NEW_LINE> <INDENT> if self.parallel: <NEW_LINE> <INDENT> return self.queue.unfinished_tasks == 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return len(self.queue) == 0 <NEW_LINE> <DEDENT> <DEDENT> def unsynchronized_already_seen(self, url): <NEW_LINE> <INDENT> if not url or url in self.queued: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> self.queued.add(url) <NEW_LINE> self.url_count += 1 <NEW_LINE> return False <NEW_LINE> <DEDENT> def already_seen(self, url): <NEW_LINE> <INDENT> if self.parallel: <NEW_LINE> <INDENT> with self.queued_lock: <NEW_LINE> <INDENT> return self.unsynchronized_already_seen(url) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.unsynchronized_already_seen(url) <NEW_LINE> <DEDENT> <DEDENT> def queue_url(self, url, base, parent): <NEW_LINE> <INDENT> url = make_url(url, base) <NEW_LINE> if self.already_seen(url): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> read = url.startswith(self.site) <NEW_LINE> print(url, self.site, read) <NEW_LINE> index = url.rindex('/') <NEW_LINE> page = url[index+1:] <NEW_LINE> index = page.rfind('.') <NEW_LINE> if index >= 0: <NEW_LINE> <INDENT> ext = page[index+1:] <NEW_LINE> if ext and ext != 'html' and ext != 'htm': <NEW_LINE> <INDENT> read = False <NEW_LINE> <DEDENT> <DEDENT> self.put_task((url, parent, read)) <NEW_LINE> <DEDENT> def handle_url(self, url_info, parser): <NEW_LINE> <INDENT> url, parent, read = url_info <NEW_LINE> print('handling:', url) <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.get(url, timeout=self.timeout) <NEW_LINE> <DEDENT> except (HTTPError, URLError, ConnectionError) as e: <NEW_LINE> <INDENT> print('bad link in {0}: {1}'.format(parent, url)) <NEW_LINE> print('error:', e) <NEW_LINE> return <NEW_LINE> <DEDENT> if not read: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> parser.reset_with_page(response.url) <NEW_LINE> try: <NEW_LINE> <INDENT> data = str(response.content) <NEW_LINE> parser.feed(data) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('error while reading {0}: {1}'.format(url, e)) <NEW_LINE> <DEDENT> <DEDENT> def crawl(self, parser): <NEW_LINE> <INDENT> while not self.all_done(): <NEW_LINE> <INDENT> self.handle_url(self.get_task(), parser) <NEW_LINE> self.task_done()
|
A web crawler that processes links in a given website, recursively
following all links on that site. This crawler supports parallel execution.
Crawling is done by calling crawl with a page parser that queues new tasks
in this Crawler.
|
6259902756b00c62f0fb3838
|
class FermiDirac_fit_func(fit_func_base): <NEW_LINE> <INDENT> dim = 1 <NEW_LINE> param_names = ('A', 'F', 'T') <NEW_LINE> F_guess = 1.9 <NEW_LINE> T_guess = 0.05 <NEW_LINE> def __call__(self, C, x): <NEW_LINE> <INDENT> from numpy import exp <NEW_LINE> A, F, T = self.get_params(C, *(self.param_names)) <NEW_LINE> y = A * (exp((x[0] - F) / T) + 1)**(-1) <NEW_LINE> self.func_call_hook(C, x, y) <NEW_LINE> return y <NEW_LINE> <DEDENT> def Guess_xy(self, x, y): <NEW_LINE> <INDENT> imin = numpy.argmin(y) <NEW_LINE> self.guess_params = (y[imin], self.F_guess, self.T_guess) <NEW_LINE> return self.guess_params
|
Fermi-Dirac function object.
For use with fit_func function.
Functional form:
C[0] * (exp((x - C[1]) / C[2]) + 1)^-1
Coefficients:
* C[0] = amplitude
* C[1] = transition "temperature"
* C[2] = "smearing temperature"
|
62599027a8ecb03325872197
|
class Transcript(models.Model): <NEW_LINE> <INDENT> date = models.DateField() <NEW_LINE> full_text = models.TextField() <NEW_LINE> headline = models.CharField(max_length=255) <NEW_LINE> url = models.CharField(max_length=255, blank=True, null=True) <NEW_LINE> transcript_type = models.CharField(max_length=255) <NEW_LINE> location_text = models.TextField(blank=True, null=True) <NEW_LINE> parsed = models.BooleanField(default=False) <NEW_LINE> parsed_date = models.DateTimeField(blank=True, null=True) <NEW_LINE> created_date = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True
|
Examples of the kind of model code you'd want to tie these together.
speakers = models.ManyToManyField(Speaker, blank=True, null=True)
|
62599027be8e80087fbbfff2
|
class Egress(Pipeline): <NEW_LINE> <INDENT> pass
|
The egress message pipeline.
For messages flowing 'away from' the C++ layer.
|
625990278c3a8732951f74d1
|
class MarketEvent(Event): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.type = 'MARKET'
|
Handles the event of receiving a new market update with corresponding bars.
MarketEvents are triggered each event-loop cycle. It occurs when the
DataHandler object recieves a new update of market data for any symbols
which are currently being trackd. It is used to trigger the Strategy
object to generate a new batch of trading signals. It simply contains an
identification that is a market event, with no other structure
|
62599027287bf620b6272b69
|
class BatteryClass(morse.core.sensor.MorseSensorClass): <NEW_LINE> <INDENT> def __init__(self, obj, parent=None): <NEW_LINE> <INDENT> logger.info("%s initialization" % obj.name) <NEW_LINE> super(self.__class__,self).__init__(obj, parent) <NEW_LINE> self.local_data['charge'] = 100.0 <NEW_LINE> self._time = time.clock() <NEW_LINE> logger.info('Component initialized') <NEW_LINE> <DEDENT> def default_action(self): <NEW_LINE> <INDENT> newtime = time.clock() <NEW_LINE> charge = self.local_data['charge'] <NEW_LINE> dt = newtime - self._time <NEW_LINE> if self.isInChargingZone() and charge < 100: <NEW_LINE> <INDENT> charge = charge + (dt * self.blender_obj['DischargingRate']) <NEW_LINE> <DEDENT> elif charge > 0: <NEW_LINE> <INDENT> charge = charge - (dt * self.blender_obj['DischargingRate']) <NEW_LINE> <DEDENT> self.local_data['charge'] = float(charge) <NEW_LINE> self._time = newtime <NEW_LINE> <DEDENT> def isInChargingZone(self): <NEW_LINE> <INDENT> pose = self.position_3d <NEW_LINE> return False
|
Class definition for the battery sensor.
Sub class of Morse_Object.
DischargingRate: float in percent-per-seconds (game-property)
|
6259902773bcbd0ca4bcb20a
|
class ClipConfig(etam.BaseModuleConfig): <NEW_LINE> <INDENT> def __init__(self, d): <NEW_LINE> <INDENT> super(ClipConfig, self).__init__(d) <NEW_LINE> self.data = self.parse_object_array(d, "data", DataConfig) <NEW_LINE> self.parameters = self.parse_object(d, "parameters", ParametersConfig) <NEW_LINE> self._validate() <NEW_LINE> <DEDENT> def _validate(self): <NEW_LINE> <INDENT> for data in self.data: <NEW_LINE> <INDENT> Config.parse_mutually_exclusive_fields({ "event_detection_path": data.event_detection_path, "event_series_path": data.event_series_path, "frames": self.parameters.frames, })
|
Clip configuration settings.
Attributes:
data (DataConfig)
parameters (ParametersConfig)
|
62599027a8ecb03325872199
|
class VMEntities(BaseEntitiesView): <NEW_LINE> <INDENT> @property <NEW_LINE> def entity_class(self): <NEW_LINE> <INDENT> return InstanceEntity <NEW_LINE> <DEDENT> paginator = PaginationPane() <NEW_LINE> adv_search_clear = Text('//div[@id="main-content"]//h1//span[@id="clear_search"]/a')
|
Entities view for vms/instances collection destinations
|
6259902726238365f5fadacd
|
@dataclass(slots=True) <NEW_LINE> class Name: <NEW_LINE> <INDENT> name: str <NEW_LINE> type: NameType <NEW_LINE> identified_object: Optional[IdentifiedObject] = None
|
The Name class provides the means to define any number of human readable names for an object. A name is **not** to be used for defining inter-object
relationships. For inter-object relationships instead use the object identification 'mRID'.
|
625990278a349b6b436871b4
|
class MockResponse: <NEW_LINE> <INDENT> def __init__(self, mock_url): <NEW_LINE> <INDENT> if mock_url == wine_iri: <NEW_LINE> <INDENT> self.path = test_owl_wine <NEW_LINE> <DEDENT> elif mock_url == pizza_iri: <NEW_LINE> <INDENT> self.path = test_owl_pizza <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid extension') <NEW_LINE> <DEDENT> if not os.path.exists(self.path): <NEW_LINE> <INDENT> raise ValueError("file doesn't exist: {}".format(self.path)) <NEW_LINE> <DEDENT> <DEDENT> def iter_lines(self): <NEW_LINE> <INDENT> with open(self.path, 'rb') as file: <NEW_LINE> <INDENT> lines = list(file) <NEW_LINE> <DEDENT> for line in lines: <NEW_LINE> <INDENT> yield line <NEW_LINE> <DEDENT> <DEDENT> def raise_for_status(self): <NEW_LINE> <INDENT> pass
|
See http://stackoverflow.com/questions/15753390/python-mock-requests-and-the-response
|
625990279b70327d1c57fcfe
|
class WorkflowConfigs(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._temp_dir = tempfile.mkdtemp() <NEW_LINE> self._workflows = [] <NEW_LINE> <DEDENT> def add_workflow(self, ert_script, name=None): <NEW_LINE> <INDENT> workflow = WorkflowConfig(ert_script, self._temp_dir, name) <NEW_LINE> self._workflows.append(workflow) <NEW_LINE> return workflow <NEW_LINE> <DEDENT> def get_workflows(self): <NEW_LINE> <INDENT> configs = {} <NEW_LINE> for workflow in self._workflows: <NEW_LINE> <INDENT> if workflow.name in configs: <NEW_LINE> <INDENT> logging.info( "Duplicate workflow name: {}, skipping {}".format( workflow.name, workflow.function_dir ) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> configs[workflow.name] = workflow.config_path <NEW_LINE> <DEDENT> <DEDENT> return configs
|
Top level workflow config object, holds all workflow configs.
|
62599028d18da76e235b790c
|
class Processor(PostProcessor): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> logging.info('Template post-processor') <NEW_LINE> <DEDENT> def run(self, context, **attributes: Mapping[str, str]) -> Dict[str, Any]: <NEW_LINE> <INDENT> logging.info('Attributes received: %s', attributes) <NEW_LINE> logging.info('Post-process code here.')
|
Post-Processor template for use.
This is a sample - valid, but useless - post processor.
Args:
PostProcessor ([type]): [description]
|
62599028507cdc57c63a5d24
|
class DataverseProvider(object): <NEW_LINE> <INDENT> name = 'Dataverse' <NEW_LINE> short_name = 'dataverse' <NEW_LINE> serializer = DataverseSerializer <NEW_LINE> def __init__(self, account=None): <NEW_LINE> <INDENT> super(DataverseProvider, self).__init__() <NEW_LINE> self.account = account <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{name}: {status}>'.format( name=self.__class__.__name__, status=self.account.provider_id if self.account else 'anonymous' )
|
An alternative to `ExternalProvider` not tied to OAuth
|
625990286e29344779b015cd
|
class Dgp2Dcp(Canonicalization): <NEW_LINE> <INDENT> def __init__(self, problem=None) -> None: <NEW_LINE> <INDENT> super(Dgp2Dcp, self).__init__(canon_methods=None, problem=problem) <NEW_LINE> <DEDENT> def accepts(self, problem): <NEW_LINE> <INDENT> return problem.is_dgp() and all( p.value is not None for p in problem.parameters()) <NEW_LINE> <DEDENT> def apply(self, problem): <NEW_LINE> <INDENT> if not self.accepts(problem): <NEW_LINE> <INDENT> raise ValueError("The supplied problem is not DGP.") <NEW_LINE> <DEDENT> self.canon_methods = DgpCanonMethods() <NEW_LINE> equiv_problem, inverse_data = super(Dgp2Dcp, self).apply(problem) <NEW_LINE> inverse_data._problem = problem <NEW_LINE> return equiv_problem, inverse_data <NEW_LINE> <DEDENT> def canonicalize_expr(self, expr, args): <NEW_LINE> <INDENT> if type(expr) in self.canon_methods: <NEW_LINE> <INDENT> return self.canon_methods[type(expr)](expr, args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return expr.copy(args), [] <NEW_LINE> <DEDENT> <DEDENT> def invert(self, solution, inverse_data): <NEW_LINE> <INDENT> solution = super(Dgp2Dcp, self).invert(solution, inverse_data) <NEW_LINE> if solution.status == settings.SOLVER_ERROR: <NEW_LINE> <INDENT> return solution <NEW_LINE> <DEDENT> for vid, value in solution.primal_vars.items(): <NEW_LINE> <INDENT> solution.primal_vars[vid] = np.exp(value) <NEW_LINE> <DEDENT> solution.opt_val = np.exp(solution.opt_val) <NEW_LINE> return solution
|
Reduce DGP problems to DCP problems.
This reduction takes as input a DGP problem and returns an equivalent DCP
problem. Because every (generalized) geometric program is a DGP problem,
this reduction can be used to convert geometric programs into convex form.
Example
-------
>>> import cvxpy as cp
>>>
>>> x1 = cp.Variable(pos=True)
>>> x2 = cp.Variable(pos=True)
>>> x3 = cp.Variable(pos=True)
>>>
>>> monomial = 3.0 * x_1**0.4 * x_2 ** 0.2 * x_3 ** -1.4
>>> posynomial = monomial + 2.0 * x_1 * x_2
>>> dgp_problem = cp.Problem(cp.Minimize(posynomial), [monomial == 4.0])
>>>
>>> dcp2cone = cvxpy.reductions.Dcp2Cone()
>>> assert not dcp2cone.accepts(dgp_problem)
>>>
>>> gp2dcp = cvxpy.reductions.Dgp2Dcp(dgp_problem)
>>> dcp_problem = gp2dcp.reduce()
>>>
>>> assert dcp2cone.accepts(dcp_problem)
>>> dcp_probem.solve()
>>>
>>> dgp_problem.unpack(gp2dcp.retrieve(dcp_problem.solution))
>>> print(dgp_problem.value)
>>> print(dgp_problem.variables())
|
6259902826238365f5fadacf
|
class Subsystem1: <NEW_LINE> <INDENT> def operation_ready(self) -> str: <NEW_LINE> <INDENT> return "Subsystem1: Ready!" <NEW_LINE> <DEDENT> def operation_go(self) -> str: <NEW_LINE> <INDENT> return "Subsystem1: Go!"
|
Подсистема может принимать запросы либо от фасада, либо от клиента
напрямую. В любом случае, для Подсистемы Фасад – это ещё один клиент, и
он не является частью Подсистемы.
|
62599028d10714528d69ee4b
|
class Lg: <NEW_LINE> <INDENT> __metaclass__ = Singleton <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.info={} <NEW_LINE> self.info=dbLanguage.getAll() <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.info=dbLanguage.getAll() <NEW_LINE> <DEDENT> def g(self,id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> info= self.info.get(id) <NEW_LINE> if not info: <NEW_LINE> <INDENT> log.err(str(id)) <NEW_LINE> return str(id) <NEW_LINE> <DEDENT> return info <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return str(id) <NEW_LINE> log.err("%s不存在"%id)
|
殖民管理器
|
6259902873bcbd0ca4bcb20e
|
class SiteActivity(TableMixin, Base): <NEW_LINE> <INDENT> event = Column(String(31)) <NEW_LINE> description = Column(String(255)) <NEW_LINE> timestamp = Column(DateTime, default=datetime.now) <NEW_LINE> site_id = Column(Integer, ForeignKey( 'site.id', ondelete='CASCADE', onupdate='CASCADE'))
|
Table `site_activity` to log site activities.
This table is a placeholder for future use.
|
625990280a366e3fb87dd966
|
class CrateModel(six.with_metaclass(CrateModelBase, Model)): <NEW_LINE> <INDENT> pass
|
the model to use when storing stuff in crate
it allows us to specify options in Meta we otherwise couldn't use
|
62599028be8e80087fbbfff8
|
@register(Resource.ALPHA_IDENTIFIERS) <NEW_LINE> class AlphaIdentifiers(ListElement): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def read(cls, fp, **kwargs): <NEW_LINE> <INDENT> items = [] <NEW_LINE> while is_readable(fp, 4): <NEW_LINE> <INDENT> items.append(read_fmt('I', fp)[0]) <NEW_LINE> <DEDENT> return cls(items) <NEW_LINE> <DEDENT> def write(self, fp, **kwargs): <NEW_LINE> <INDENT> return sum(write_fmt(fp, 'I', item) for item in self)
|
List of alpha identifiers.
|
625990288e05c05ec3f6f61b
|
class ManagedObject(object): <NEW_LINE> <INDENT> def __init__(self, name="ManagedObject", obj_ref=None, value=None): <NEW_LINE> <INDENT> super(ManagedObject, self).__setattr__('objName', name) <NEW_LINE> if obj_ref is None: <NEW_LINE> <INDENT> obj_ref = Obj(name, value) <NEW_LINE> <DEDENT> object.__setattr__(self, 'obj', obj_ref) <NEW_LINE> object.__setattr__(self, 'propSet', []) <NEW_LINE> <DEDENT> def set(self, attr, val): <NEW_LINE> <INDENT> self.__setattr__(attr, val) <NEW_LINE> <DEDENT> def get(self, attr): <NEW_LINE> <INDENT> return self.__getattr__(attr) <NEW_LINE> <DEDENT> def __setattr__(self, attr, val): <NEW_LINE> <INDENT> for prop in self.propSet: <NEW_LINE> <INDENT> if prop.name == attr: <NEW_LINE> <INDENT> prop.val = val <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> elem = Prop() <NEW_LINE> elem.name = attr <NEW_LINE> elem.val = val <NEW_LINE> self.propSet.append(elem) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> for elem in self.propSet: <NEW_LINE> <INDENT> if elem.name == attr: <NEW_LINE> <INDENT> return elem.val <NEW_LINE> <DEDENT> <DEDENT> msg = _("Property %(attr)s not set for the managed object %(name)s") <NEW_LINE> raise exception.NovaException(msg % {'attr': attr, 'name': self.objName})
|
Managed Data Object base class.
|
62599028c432627299fa3f71
|
class XMLRPCProject(RemoteProject): <NEW_LINE> <INDENT> @property <NEW_LINE> def packages(self): <NEW_LINE> <INDENT> if self._packages == None: <NEW_LINE> <INDENT> distributions = [] <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> info("Finding distributions for project %s" % self.name) <NEW_LINE> <DEDENT> for version in self.repository.server.package_releases(self.name): <NEW_LINE> <INDENT> releases = self.repository.server.release_urls(self.name, version) <NEW_LINE> distributions += [distros_from_url(release['url']) for release in releases] <NEW_LINE> if not releases: <NEW_LINE> <INDENT> info(" XMLRPC server returns no urls for %s version %s." % (self.name, version)) <NEW_LINE> metadata = self.repository.server.release_data(self.name, version) <NEW_LINE> info("Searching project download page '%s' for packages..." % metadata['downloads']) <NEW_LINE> distributions += find_eggs_in_url(metadata['downloads']) <NEW_LINE> <DEDENT> <DEDENT> for dist in distributions: <NEW_LINE> <INDENT> self.repository.environment.add(dist) <NEW_LINE> <DEDENT> self._packages = [] <NEW_LINE> for package in self.repository.environment[self.name]: <NEW_LINE> <INDENT> self._packages.append( self.XMLRPCPackage(self, package.version, package)) <NEW_LINE> <DEDENT> <DEDENT> return self._packages
|
An XMLRPCProject is a remote project located in a PyPI-style
XMLRPC-based repository.
|
62599028a8ecb0332587219d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.