code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class _FullPaths(argparse.Action): <NEW_LINE> <INDENT> def __call__(self, parser, namespace, values, option_string=None): <NEW_LINE> <INDENT> if isinstance(values, (list, tuple)): <NEW_LINE> <INDENT> vals = [os.path.abspath(os.path.expanduser(val)) for val in values] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vals = os.path.abspath(os.path.expanduser(values)) <NEW_LINE> <DEDENT> setattr(namespace, self.dest, vals)
|
Parent class for various file type and file path handling classes.
Expands out given paths to their full absolute paths. This class should not be
called directly. It is the base class for the various different file handling
methods.
|
62599066cb5e8a47e493cd31
|
class BlockComponent(components.TokenComponent): <NEW_LINE> <INDENT> RE = re.compile('(?P<inline>.*)') <NEW_LINE> def __call__(self, info, parent): <NEW_LINE> <INDENT> return tokens.Token(parent)
|
Class for testing MarkdownReader
|
625990663d592f4c4edbc637
|
class UserInfo(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> verbose_name = u"User Information" <NEW_LINE> verbose_name_plural = u"Users Informations" <NEW_LINE> <DEDENT> hashes = models.TextField( blank=True, verbose_name=u"Hashes") <NEW_LINE> user_ip = models.CharField( max_length=256, blank=True, verbose_name=u"IP address") <NEW_LINE> user_browser = models.CharField( max_length=256, blank=True, verbose_name=u"Browser") <NEW_LINE> user_oc = models.CharField( max_length=256, blank=True, verbose_name=u"OC") <NEW_LINE> user_device = models.CharField( max_length=256, blank=True, verbose_name=u"Device") <NEW_LINE> user_timezone = models.CharField( max_length=256, blank=True, null=True, verbose_name=u"Timezone", default=None) <NEW_LINE> user_country = models.CharField( max_length=256, blank=True, null=True, verbose_name=u"Country", default=None) <NEW_LINE> user_city = models.CharField( max_length=256, blank=True, null=True, verbose_name=u"City", default=None) <NEW_LINE> user_cookies = models.TextField( blank=True, null=True, verbose_name=u"Cookies", default=None) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s" % self.user.username
|
To keep extra user data
|
62599066627d3e7fe0e085e4
|
class StaticWebsite(Model): <NEW_LINE> <INDENT> _validation = { 'enabled': {'required': True}, } <NEW_LINE> _attribute_map = { 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}}, 'index_document': {'key': 'IndexDocument', 'type': 'str', 'xml': {'name': 'IndexDocument'}}, 'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str', 'xml': {'name': 'ErrorDocument404Path'}}, } <NEW_LINE> _xml_map = { } <NEW_LINE> def __init__(self, *, enabled: bool, index_document: str=None, error_document404_path: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(StaticWebsite, self).__init__(**kwargs) <NEW_LINE> self.enabled = enabled <NEW_LINE> self.index_document = index_document <NEW_LINE> self.error_document404_path = error_document404_path
|
The properties that enable an account to host a static website.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Indicates whether this account is hosting a
static website
:type enabled: bool
:param index_document: The default name of the index page under each
directory
:type index_document: str
:param error_document404_path: The absolute path of the custom 404 page
:type error_document404_path: str
|
62599066f548e778e596cce4
|
class ApiCreateHuntHandler(api_call_handler_base.ApiCallHandler): <NEW_LINE> <INDENT> args_type = ApiCreateHuntArgs <NEW_LINE> result_type = ApiHunt <NEW_LINE> def Handle(self, args, token=None): <NEW_LINE> <INDENT> generic_hunt_args = standard.GenericHuntArgs() <NEW_LINE> generic_hunt_args.flow_runner_args.flow_name = args.flow_name <NEW_LINE> generic_hunt_args.flow_args = args.flow_args <NEW_LINE> args.hunt_runner_args.ClearFieldsWithLabel( rdf_structs.SemanticDescriptor.Labels.HIDDEN, exceptions="output_plugins") <NEW_LINE> args.hunt_runner_args.hunt_name = standard.GenericHunt.__name__ <NEW_LINE> if args.original_hunt and args.original_flow: <NEW_LINE> <INDENT> raise ValueError( "A hunt can't be a copy of a flow and a hunt at the same time.") <NEW_LINE> <DEDENT> if args.original_hunt: <NEW_LINE> <INDENT> ref = rdf_hunts.FlowLikeObjectReference.FromHuntId( utils.SmartStr(args.original_hunt.hunt_id)) <NEW_LINE> args.hunt_runner_args.original_object = ref <NEW_LINE> <DEDENT> elif args.original_flow: <NEW_LINE> <INDENT> ref = rdf_hunts.FlowLikeObjectReference.FromFlowIdAndClientId( utils.SmartStr(args.original_flow.flow_id), utils.SmartStr(args.original_flow.client_id)) <NEW_LINE> args.hunt_runner_args.original_object = ref <NEW_LINE> <DEDENT> with implementation.GRRHunt.StartHunt( runner_args=args.hunt_runner_args, args=generic_hunt_args, token=token) as hunt: <NEW_LINE> <INDENT> logging.info("User %s created a new %s hunt (%s)", token.username, hunt.args.flow_runner_args.flow_name, hunt.urn) <NEW_LINE> return ApiHunt().InitFromAff4Object(hunt, with_full_summary=True)
|
Handles hunt creation request.
|
625990664428ac0f6e659c8c
|
class InstanceServletRenderServletDelegate(RenderServlet.Delegate): <NEW_LINE> <INDENT> def __init__(self, delegate): <NEW_LINE> <INDENT> self._delegate = delegate <NEW_LINE> <DEDENT> @memoize <NEW_LINE> def CreateServerInstance(self): <NEW_LINE> <INDENT> object_store_creator = ObjectStoreCreator(start_empty=False) <NEW_LINE> branch_utility = self._delegate.CreateBranchUtility(object_store_creator) <NEW_LINE> host_file_system_provider = self._delegate.CreateHostFileSystemProvider( object_store_creator, offline=not (IsDevServer() or IsReleaseServer())) <NEW_LINE> github_file_system_provider = self._delegate.CreateGithubFileSystemProvider( object_store_creator) <NEW_LINE> return ServerInstance(object_store_creator, CompiledFileSystem.Factory(object_store_creator), branch_utility, host_file_system_provider, github_file_system_provider, CloudStorageFileSystemProvider(object_store_creator))
|
AppEngine instances should never need to call out to SVN. That should only
ever be done by the cronjobs, which then write the result into DataStore,
which is as far as instances look. To enable this, crons can pass a custom
(presumably online) ServerInstance into Get().
Why? SVN is slow and a bit flaky. Cronjobs failing is annoying but temporary.
Instances failing affects users, and is really bad.
Anyway - to enforce this, we actually don't give instances access to SVN. If
anything is missing from datastore, it'll be a 404. If the cronjobs don't
manage to catch everything - uhoh. On the other hand, we'll figure it out
pretty soon, and it also means that legitimate 404s are caught before a round
trip to SVN.
|
625990667cff6e4e811b71a2
|
class Test(BT.BiskitTest): <NEW_LINE> <INDENT> def test_lognormal(self): <NEW_LINE> <INDENT> import random <NEW_LINE> import Biskit.gnuplot as gnuplot <NEW_LINE> import Biskit.hist as H <NEW_LINE> cr = [] <NEW_LINE> for i in range( 10000 ): <NEW_LINE> <INDENT> alpha = 1.5 <NEW_LINE> beta = .7 <NEW_LINE> x = 10. <NEW_LINE> R = [ random.lognormvariate( alpha, beta ) for j in range( 10 ) ] <NEW_LINE> cr += [ logConfidence( x, R )[0] ] <NEW_LINE> <DEDENT> ca = logArea( x, alpha, beta ) <NEW_LINE> if self.local: <NEW_LINE> <INDENT> gnuplot.plot( H.density( N.array(cr) - ca, 100 ) ) <NEW_LINE> globals().update( locals() ) <NEW_LINE> <DEDENT> self.assertAlmostEqual( ca, 0.86877651432955771, 7)
|
Test class
|
62599066cc0a2c111447c67d
|
class AdminPage(AuthenticatedRequest): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> member = self.get_admin_member() <NEW_LINE> if not member: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.render_template( 'AdminPage', { 'members': model.Member.all(), 'admin': member } )
|
For /admin
Displays the application's users (minus the admin)
Allows the admin to add/drop users from the application
|
625990668e7ae83300eea7e9
|
class PersonalRequests(RequestListing): <NEW_LINE> <INDENT> template = 'requests_personal.html' <NEW_LINE> decorators = [login_required] <NEW_LINE> def requests(self, filters): <NEW_LINE> <INDENT> requests = super(PersonalRequests, self).requests(filters) <NEW_LINE> requests = requests .join(User) .filter(User.id==current_user.id) <NEW_LINE> return requests <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> current_locale = get_locale() <NEW_LINE> if current_locale is None: <NEW_LINE> <INDENT> current_locale = babel.Locale('en') <NEW_LINE> <DEDENT> if current_locale.language.lower() == 'en' and current_user.name[:-1] == u's': <NEW_LINE> <INDENT> return u"{}' Requests".format(current_user.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return gettext(u"%(name)s's Requests", name=current_user.name)
|
Shows a list of all personally submitted requests and divisions the user
has permissions in.
It will show all requests the current user has submitted.
|
6259906632920d7e50bc77a1
|
class ImageReference(Model): <NEW_LINE> <INDENT> _attribute_map = { 'publisher': {'key': 'publisher', 'type': 'str'}, 'offer': {'key': 'offer', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, publisher: str=None, offer: str=None, sku: str=None, version: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(ImageReference, self).__init__(**kwargs) <NEW_LINE> self.publisher = publisher <NEW_LINE> self.offer = offer <NEW_LINE> self.sku = sku <NEW_LINE> self.version = version
|
Specifies information about the image to use. You can specify information
about platform images, marketplace images, or virtual machine images. This
element is required when you want to use a platform image, marketplace
image, or virtual machine image, but is not used in other creation
operations.
:param publisher: The image publisher.
:type publisher: str
:param offer: Specifies the offer of the platform image or marketplace
image used to create the virtual machine.
:type offer: str
:param sku: The image SKU.
:type sku: str
:param version: Specifies the version of the platform image or marketplace
image used to create the virtual machine. The allowed formats are
Major.Minor.Build or 'latest'. Major, Minor, and Build are decimal
numbers. Specify 'latest' to use the latest version of an image available
at deploy time. Even if you use 'latest', the VM image will not
automatically update after deploy time even if a new version becomes
available.
:type version: str
|
6259906699fddb7c1ca6397c
|
class Stats: <NEW_LINE> <INDENT> exposed = True <NEW_LINE> _cp_config = dict(LowDataAdapter._cp_config, **{"tools.salt_auth.on": True}) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> if cherrypy.config["apiopts"].get("stats_disable_auth"): <NEW_LINE> <INDENT> self._cp_config["tools.salt_auth.on"] = False <NEW_LINE> <DEDENT> <DEDENT> def GET(self): <NEW_LINE> <INDENT> if hasattr(logging, "statistics"): <NEW_LINE> <INDENT> return cpstats.extrapolate_statistics(logging.statistics) <NEW_LINE> <DEDENT> return {}
|
Expose statistics on the running CherryPy server
|
62599066097d151d1a2c27c7
|
class SweepPriorityQueue: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.qlist = [] <NEW_LINE> self.queue_entryD = {} <NEW_LINE> self.nth_addition = 0 <NEW_LINE> <DEDENT> def add_sa_pair(self, sa_pair, neg_priority=0): <NEW_LINE> <INDENT> if sa_pair in self.queue_entryD: <NEW_LINE> <INDENT> old_neg_priority = self.queue_entryD[ sa_pair ][0] <NEW_LINE> if old_neg_priority < neg_priority: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.mark_sa_pair_for_deletion(sa_pair) <NEW_LINE> <DEDENT> entry = [neg_priority, self.nth_addition, sa_pair] <NEW_LINE> self.nth_addition += 1 <NEW_LINE> self.queue_entryD[sa_pair] = entry <NEW_LINE> heapq.heappush(self.qlist, entry) <NEW_LINE> <DEDENT> def mark_sa_pair_for_deletion(self, sa_pair): <NEW_LINE> <INDENT> entry = self.queue_entryD.pop(sa_pair) <NEW_LINE> entry[-1] = None <NEW_LINE> <DEDENT> def pop_sa_pair(self): <NEW_LINE> <INDENT> while self.qlist: <NEW_LINE> <INDENT> neg_priority, _, sa_pair = heapq.heappop(self.qlist) <NEW_LINE> if sa_pair is not None: <NEW_LINE> <INDENT> del self.queue_entryD[sa_pair] <NEW_LINE> return sa_pair, neg_priority <NEW_LINE> <DEDENT> <DEDENT> return (None,None), None <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> return not self.queue_entryD <NEW_LINE> <DEDENT> def summ_print(self): <NEW_LINE> <INDENT> print('========= Current Priority Queue Contents ===========') <NEW_LINE> for v in self.qlist: <NEW_LINE> <INDENT> print( v )
|
A priority queue used for prioritized sweeping algorithm
from page 169 of Sutton & Barto
State-Action pairs are added along with the NEGATIVE of abs( delta )
where delta is the expected change in Q(s,a)
|
625990661f037a2d8b9e5418
|
class RouteDao(BaseDao): <NEW_LINE> <INDENT> def __init__(self, route, alerts, show_geo=False): <NEW_LINE> <INDENT> super(RouteDao, self).__init__() <NEW_LINE> self.copy(route, show_geo) <NEW_LINE> self.set_alerts(alerts) <NEW_LINE> <DEDENT> def copy(self, r, show_geo): <NEW_LINE> <INDENT> self.name = r.route_name <NEW_LINE> self.route_id = r.route_id <NEW_LINE> self.short_name = r.route_short_name <NEW_LINE> self.sort_order = r.route_sort_order <NEW_LINE> self.url = getattr(r, 'route_url', None) <NEW_LINE> self.add_route_dirs(r) <NEW_LINE> if show_geo: <NEW_LINE> <INDENT> self.geom = self.orm_to_geojson(r) <NEW_LINE> <DEDENT> <DEDENT> def add_route_dirs(self, route): <NEW_LINE> <INDENT> dir0 = None <NEW_LINE> dir1 = None <NEW_LINE> try: <NEW_LINE> <INDENT> for d in route.directions: <NEW_LINE> <INDENT> if d.direction_id == 0: <NEW_LINE> <INDENT> dir0 = d.direction_name <NEW_LINE> <DEDENT> elif d.direction_id == 1: <NEW_LINE> <INDENT> dir1 = d.direction_name <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.copy_dirs(dir0, dir1) <NEW_LINE> <DEDENT> def copy_dirs(self, dir0=None, dir1=None): <NEW_LINE> <INDENT> self.direction_0 = dir0 <NEW_LINE> self.direction_1 = dir1 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_route_orm(cls, route, agency="TODO", detailed=False, show_alerts=False, show_geo=False): <NEW_LINE> <INDENT> alerts = [] <NEW_LINE> try: <NEW_LINE> <INDENT> if show_alerts: <NEW_LINE> <INDENT> alerts = AlertsDao.get_route_alerts(object_session(route), route.route_id) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.warn(e) <NEW_LINE> <DEDENT> ret_val = RouteDao(route, alerts, show_geo) <NEW_LINE> return ret_val <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_route_id(cls, session, route_id, agency="TODO", detailed=False, show_alerts=False, show_geo=False): <NEW_LINE> <INDENT> log.info("query Route table") <NEW_LINE> route = session.query(Route).filter(Route.route_id == route_id).one() <NEW_LINE> return cls.from_route_orm(route, agency=agency, detailed=detailed, show_alerts=show_alerts, show_geo=show_geo)
|
RouteDao data object ready for marshaling into JSON
|
625990664428ac0f6e659c8d
|
class TranscriptQuerySet(models.QuerySet): <NEW_LINE> <INDENT> pass
|
QuerySet for Transcript models. Assumed to have a one-to-many
relationship with an AbstractTag model.
|
625990664f6381625f19a052
|
class window_manager(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def open_duplicate_view(self, view_name): <NEW_LINE> <INDENT> return _get_rpcclient().PlotWindowManager("openDuplicateView", None, view_name) <NEW_LINE> <DEDENT> def open_view(self, view_name=None): <NEW_LINE> <INDENT> return _get_rpcclient().PlotWindowManager("openView", None, view_name) <NEW_LINE> <DEDENT> def get_open_views(self): <NEW_LINE> <INDENT> return _get_rpcclient().PlotWindowManager("getOpenViews")
|
Wrapper for IPlotWindowManager in SDA. Allows opening, duplicating and
obtaining list of existing views
|
62599066baa26c4b54d50a00
|
class DutyPaidProofOCRRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImageBase64 = None <NEW_LINE> self.ImageUrl = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ImageBase64 = params.get("ImageBase64") <NEW_LINE> self.ImageUrl = params.get("ImageUrl") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
|
DutyPaidProofOCR请求参数结构体
|
62599066498bea3a75a591ae
|
class Uptime(IntervalModule): <NEW_LINE> <INDENT> settings = ( ("format", "Format string"), ("color", "String color"), ("alert", "If you want the string to change color"), ("seconds_alert", "How many seconds necessary to start the alert"), ("color_alert", "Alert color"), ) <NEW_LINE> file = "/proc/uptime" <NEW_LINE> format = "up {hours}:{mins}" <NEW_LINE> color = "#ffffff" <NEW_LINE> alert = False <NEW_LINE> seconds_alert = 60 * 60 * 24 * 30 <NEW_LINE> color_alert = "#ff0000" <NEW_LINE> def run(self): <NEW_LINE> <INDENT> with open(self.file, "r") as f: <NEW_LINE> <INDENT> seconds = int(float(f.read().split()[0])) <NEW_LINE> <DEDENT> days = seconds // (60 * 60 * 24) <NEW_LINE> hours = seconds // (60 * 60) <NEW_LINE> minutes = seconds // 60 <NEW_LINE> if "{days}" in self.format: <NEW_LINE> <INDENT> hours = (seconds % (60 * 60 * 24)) // (60 * 60) <NEW_LINE> minutes = (seconds % (60 * 60 * 24)) // 60 <NEW_LINE> seconds = (seconds % (60 * 60 * 24)) <NEW_LINE> <DEDENT> if "{hours}" in self.format: <NEW_LINE> <INDENT> minutes = (seconds % (60 * 60)) // 60 <NEW_LINE> seconds = (seconds % (60 * 60)) <NEW_LINE> <DEDENT> if "{mins}" in self.format: <NEW_LINE> <INDENT> seconds = seconds % 60 <NEW_LINE> <DEDENT> fdict = { "days": days, "hours": hours, "mins": minutes, "secs": seconds, "uptime": "{}:{}".format(hours, minutes), } <NEW_LINE> self.data = fdict <NEW_LINE> if self.alert: <NEW_LINE> <INDENT> if seconds > self.seconds_alert: <NEW_LINE> <INDENT> self.color = self.color_alert <NEW_LINE> <DEDENT> <DEDENT> self.output = { "full_text": formatp(self.format, **fdict), "color": self.color }
|
Outputs Uptime
.. rubric:: Available formatters
* `{days}` - uptime in days
* `{hours}` - rest of uptime in hours
* `{mins}` - rest of uptime in minutes
* `{secs}` - rest of uptime in seconds
* `{uptime}` - deprecated: equals '`{hours}:{mins}`'
|
62599066be8e80087fbc07e4
|
class UserForManagerSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserModel <NEW_LINE> fields = ( "id", "username", "email", "is_superuser", "users_permission_level", "library_permission_level", "playlist_permission_level", "validated_by_manager", "validated_by_email", ) <NEW_LINE> read_only_fields = ( "id", "username", "email", "is_superuser", "validated_by_email", )
|
Users edition for managers.
|
62599066cb5e8a47e493cd32
|
class ROSWidget(flx.Widget): <NEW_LINE> <INDENT> def init(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @flx.action <NEW_LINE> def announce_action_client(self, topic, topic_type): <NEW_LINE> <INDENT> self.root.announce_action_client(topic, topic_type) <NEW_LINE> <DEDENT> @flx.action <NEW_LINE> def call_service(self, server_name, server_type, req, cb): <NEW_LINE> <INDENT> self.reaction(cb, "!root."+server_name.replace("/", "_")+"_response") <NEW_LINE> self.root.call_service(server_name, server_type, req) <NEW_LINE> <DEDENT> @flx.action <NEW_LINE> def send_action_goal(self, topic, goal, feedback_cb, done_cb): <NEW_LINE> <INDENT> self.root.send_action_goal(topic, goal) <NEW_LINE> self.reaction(feedback_cb, "!root."+topic.replace("/", "_")+"_feedback") <NEW_LINE> self.reaction(done_cb, "!root."+topic.replace("/", "_")+"_done") <NEW_LINE> <DEDENT> @flx.action <NEW_LINE> def announce_publish(self, topic, topic_type): <NEW_LINE> <INDENT> self.root.announce_publish(topic, topic_type) <NEW_LINE> <DEDENT> @flx.action <NEW_LINE> def publish(self, topic, data): <NEW_LINE> <INDENT> self.root.publish(topic, data) <NEW_LINE> <DEDENT> @flx.action <NEW_LINE> def subscribe(self, topic, topic_type, cb, hz=-1): <NEW_LINE> <INDENT> self.root.subscribe(topic, topic_type, hz) <NEW_LINE> self.reaction(cb, "!root."+topic.replace("/", "_"))
|
flx.Widget subclass that a widget can inherit from for easy access to ros functionality
|
625990663617ad0b5ee078ac
|
class Node: <NEW_LINE> <INDENT> def __init__(self, x, y, parent, g=0, h=0): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.h = h <NEW_LINE> self.g = g <NEW_LINE> self.f = g + h <NEW_LINE> self.parent = parent <NEW_LINE> <DEDENT> def get_G(self, start): <NEW_LINE> <INDENT> x_dis = self.x - start[0] <NEW_LINE> y_dis = self.y - start[1] <NEW_LINE> self.g = x_dis + y_dis + (math.sqrt(2) - 2) * min(x_dis, y_dis) <NEW_LINE> return self.g <NEW_LINE> <DEDENT> def get_H(self, end): <NEW_LINE> <INDENT> x_dis = end[0] - self.x <NEW_LINE> y_dis = end[1] - self.y <NEW_LINE> self.h = x_dis + y_dis + (math.sqrt(2) - 2) * min(x_dis, y_dis) <NEW_LINE> return self.h <NEW_LINE> <DEDENT> def get_F(self, start, end): <NEW_LINE> <INDENT> if self.f == 0: <NEW_LINE> <INDENT> self.f = self.get_G(start) + self.get_H(end) <NEW_LINE> <DEDENT> return self.f <NEW_LINE> <DEDENT> def manhattan(self, from_x, from_y, end_x, end_y): <NEW_LINE> <INDENT> return abs(end_x - from_x) + abs(end_y - from_y)
|
定义相邻横竖的单元格距离是10,斜的的14
|
625990667047854f46340b0f
|
class DescribeEdgeUnitApplicationPodContainersResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ContainerSet = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("ContainerSet") is not None: <NEW_LINE> <INDENT> self.ContainerSet = [] <NEW_LINE> for item in params.get("ContainerSet"): <NEW_LINE> <INDENT> obj = ContainerStatus() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.ContainerSet.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId")
|
DescribeEdgeUnitApplicationPodContainers返回参数结构体
|
62599066a17c0f6771d5d754
|
class DescriptorBase(object): <NEW_LINE> <INDENT> __metaclass__ = DescriptorMetaclass <NEW_LINE> if _USE_C_DESCRIPTORS: <NEW_LINE> <INDENT> _C_DESCRIPTOR_CLASS = () <NEW_LINE> <DEDENT> def __init__(self, options, options_class_name): <NEW_LINE> <INDENT> self._options = options <NEW_LINE> self._options_class_name = options_class_name <NEW_LINE> self.has_options = options is not None <NEW_LINE> <DEDENT> def _SetOptions(self, options, options_class_name): <NEW_LINE> <INDENT> self._options = options <NEW_LINE> self._options_class_name = options_class_name <NEW_LINE> self.has_options = options is not None <NEW_LINE> <DEDENT> def GetOptions(self): <NEW_LINE> <INDENT> if self._options: <NEW_LINE> <INDENT> return self._options <NEW_LINE> <DEDENT> from google.protobuf import descriptor_pb2 <NEW_LINE> try: <NEW_LINE> <INDENT> options_class = getattr(descriptor_pb2, self._options_class_name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise RuntimeError('Unknown options class name %s!' % (self._options_class_name)) <NEW_LINE> <DEDENT> self._options = options_class() <NEW_LINE> return self._options
|
Descriptors base class.
This class is the base of all descriptor classes. It provides common options
related functionality.
Attributes:
has_options: True if the descriptor has non-default options. Usually it
is not necessary to read this -- just call GetOptions() which will
happily return the default instance. However, it's sometimes useful
for efficiency, and also useful inside the protobuf implementation to
avoid some bootstrapping issues.
|
6259906692d797404e38970b
|
class Sprint(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100, blank=True, default='') <NEW_LINE> description = models.TextField(blank=True, default='') <NEW_LINE> end = models.DateField(unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name or _('Sprint ending %s') % self.end
|
Developers iteration period
|
625990667d43ff2487427fbe
|
class Datetime(PropertyDescriptor): <NEW_LINE> <INDENT> def __init__(self, default=datetime.date.today(), help=None): <NEW_LINE> <INDENT> super(Datetime, self).__init__(default=default, help=help) <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> super(Datetime, self).validate(value) <NEW_LINE> datetime_types = (datetime.datetime, datetime.date) <NEW_LINE> try: <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> datetime_types += (np.datetime64,) <NEW_LINE> <DEDENT> except (ImportError, AttributeError) as e: <NEW_LINE> <INDENT> if e.args == ("'module' object has no attribute 'datetime64'",): <NEW_LINE> <INDENT> import sys <NEW_LINE> if 'PyPy' in sys.version: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if (isinstance(value, datetime_types)): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if pd and isinstance(value, (pd.Timestamp)): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> raise ValueError("Expected a datetime instance, got %r" % value) <NEW_LINE> <DEDENT> def transform(self, value): <NEW_LINE> <INDENT> value = super(Datetime, self).transform(value) <NEW_LINE> return value
|
Datetime type property.
|
625990667d847024c075db33
|
class gdgl(find_html_ele): <NEW_LINE> <INDENT> def test_0002(self): <NEW_LINE> <INDENT> super().enter() <NEW_LINE> super().jcgngl() <NEW_LINE> time.sleep(2) <NEW_LINE> super().wdgl() <NEW_LINE> super().change_iframe('网点管理') <NEW_LINE> for i in bank_list[::-1]: <NEW_LINE> <INDENT> super().data_add() <NEW_LINE> t = 1 <NEW_LINE> super().change_iframe_times(t) <NEW_LINE> org_number = str(int(i) + 10000 ) <NEW_LINE> super().data_add_branchCode(org_number) <NEW_LINE> super().data_add_branchName('自动化测试机构') <NEW_LINE> super().data_add_finacialCode('9988776655') <NEW_LINE> super().data_add_belonBankCode(i) <NEW_LINE> super().data_add_pbcCode('') <NEW_LINE> super().data_add_enableDate('2019-08-01') <NEW_LINE> super().change_iframe_father() <NEW_LINE> super().data_add_sure(t) <NEW_LINE> t = t + 2 <NEW_LINE> time.sleep(1)
|
网点管理
|
625990664428ac0f6e659c8e
|
class ParenString(six.text_type): <NEW_LINE> <INDENT> pass
|
Class representing a parenthesis string.
|
6259906632920d7e50bc77a2
|
class Picker(MultiPicker): <NEW_LINE> <INDENT> async def run(self): <NEW_LINE> <INDENT> fetcher = SlowFetcher() <NEW_LINE> response = await fetcher.fetch(self.url()) <NEW_LINE> offset = scrape.pagination(response) <NEW_LINE> first = [jj for jj in scrape.collection(response)] <NEW_LINE> if not offset is None: <NEW_LINE> <INDENT> pages = [self.url(self.PAGINATE*ii) for ii in range(1,int(offset)+1)] <NEW_LINE> others_ = await asyncio.gather(*[fetcher.fetch(url_) for url_ in pages]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> others_ = [] <NEW_LINE> <DEDENT> return chain(first, *map(scrape.collection, tqdm(others_)))
|
docstring for Picker.
|
62599066fff4ab517ebcef78
|
class HTMLResponse(requests.Response): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs) -> None: <NEW_LINE> <INDENT> super(HTMLResponse, self).__init__(*args, **kwargs) <NEW_LINE> self._html = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def html(self) -> HTML: <NEW_LINE> <INDENT> if self._html: <NEW_LINE> <INDENT> return self._html <NEW_LINE> <DEDENT> self._html = HTML(url=self.url, html=self.content, default_encoding=self.encoding) <NEW_LINE> return self._html <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_response(cls, response): <NEW_LINE> <INDENT> html_r = cls() <NEW_LINE> html_r.__dict__.update(response.__dict__) <NEW_LINE> return html_r
|
An HTML-enabled :class:`Response <Response>` object.
Same as Requests class:`Response <Response>` object, but with an
intelligent ``.html`` property added.
|
6259906663d6d428bbee3e37
|
class currentStatus_result(object): <NEW_LINE> <INDENT> __slots__ = [ 'success', ] <NEW_LINE> thrift_spec = ( (0, TType.STRUCT, 'success', (Status, Status.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = Status() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('currentStatus_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, getattr(self, key)) for key in self.__slots__] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for attr in self.__slots__: <NEW_LINE> <INDENT> my_val = getattr(self, attr) <NEW_LINE> other_val = getattr(other, attr) <NEW_LINE> if my_val != other_val: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- success
|
6259906616aa5153ce401c37
|
class DFA: <NEW_LINE> <INDENT> current_state = None <NEW_LINE> def __init__(self, states, alphabet, start_state, accept_states): <NEW_LINE> <INDENT> self.states = states <NEW_LINE> self.alphabet = alphabet <NEW_LINE> self.start_state = start_state <NEW_LINE> self.accept_states = accept_states <NEW_LINE> self.current_state = start_state <NEW_LINE> self.transition_function = dict() <NEW_LINE> self.add_transition_function() <NEW_LINE> return <NEW_LINE> <DEDENT> def add_transition_function(self): <NEW_LINE> <INDENT> self.transition_function[(0, 'a')] = 1 <NEW_LINE> self.transition_function[(0, 'b')] = 2 <NEW_LINE> self.transition_function[(0, 'c')] = 3 <NEW_LINE> self.transition_function[(0, 'd')] = 0 <NEW_LINE> self.transition_function[(1, 'a')] = 1 <NEW_LINE> self.transition_function[(1, 'b')] = 2 <NEW_LINE> self.transition_function[(1, 'c')] = 3 <NEW_LINE> self.transition_function[(1, 'd')] = 0 <NEW_LINE> self.transition_function[(2, 'a')] = 1 <NEW_LINE> self.transition_function[(2, 'b')] = 2 <NEW_LINE> self.transition_function[(2, 'c')] = 3 <NEW_LINE> self.transition_function[(2, 'd')] = 0 <NEW_LINE> self.transition_function[(3, 'a')] = 1 <NEW_LINE> self.transition_function[(3, 'b')] = 2 <NEW_LINE> self.transition_function[(3, 'c')] = 3 <NEW_LINE> self.transition_function[(3, 'd')] = 0 <NEW_LINE> <DEDENT> def transition_to_state_with_input(self, input_value): <NEW_LINE> <INDENT> if (self.current_state, input_value) not in self.transition_function.keys(): <NEW_LINE> <INDENT> self.current_state = None <NEW_LINE> return <NEW_LINE> <DEDENT> self.current_state = self.transition_function[(self.current_state, input_value)] <NEW_LINE> return <NEW_LINE> <DEDENT> def in_accept_state(self): <NEW_LINE> <INDENT> return self.current_state in self.accept_states <NEW_LINE> <DEDENT> def go_to_initial_state(self): <NEW_LINE> <INDENT> self.current_state = self.start_state <NEW_LINE> return <NEW_LINE> <DEDENT> def run_with_input_list(self, input_list): <NEW_LINE> <INDENT> self.go_to_initial_state() <NEW_LINE> for inp in input_list: <NEW_LINE> <INDENT> self.transition_to_state_with_input(inp) <NEW_LINE> continue <NEW_LINE> <DEDENT> return self.in_accept_state() <NEW_LINE> <DEDENT> pass
|
This class is a small implementation of a Deterministic Finite Automata.
|
625990664e4d562566373b64
|
class NasNetANormalCell(NasNetABaseCell): <NEW_LINE> <INDENT> def __init__(self, num_conv_filters, drop_path_keep_prob, total_num_cells, total_training_steps): <NEW_LINE> <INDENT> operations = ['separable_5x5_2', 'separable_3x3_2', 'separable_5x5_2', 'separable_3x3_2', 'avg_pool_3x3', 'none', 'avg_pool_3x3', 'avg_pool_3x3', 'separable_3x3_2', 'none'] <NEW_LINE> used_hiddenstates = [1, 0, 0, 0, 0, 0, 0] <NEW_LINE> hiddenstate_indices = [0, 1, 1, 1, 0, 1, 1, 1, 0, 0] <NEW_LINE> super(NasNetANormalCell, self).__init__(num_conv_filters, operations, used_hiddenstates, hiddenstate_indices, drop_path_keep_prob, total_num_cells, total_training_steps)
|
NASNetA Normal Cell.
|
6259906697e22403b383c66a
|
class Sequence(Iterable): <NEW_LINE> <INDENT> def __init__(self, cells, name=None): <NEW_LINE> <INDENT> Iterable.__init__(self, cells, name) <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> super().__call__(x) <NEW_LINE> for cell in self: <NEW_LINE> <INDENT> x = cell(x) <NEW_LINE> <DEDENT> return x <NEW_LINE> <DEDENT> def to_deepnet_cell(self, provider): <NEW_LINE> <INDENT> if not isinstance(provider, n2d2.provider.DataProvider): <NEW_LINE> <INDENT> raise n2d2.error_handler.WrongInputType("provider", type(provider), ["n2d2.provider.DataProvider"]) <NEW_LINE> <DEDENT> dummy_input = n2d2.Tensor(provider.shape()) <NEW_LINE> provider._deepnet = n2d2.deepnet.DeepNet() <NEW_LINE> provider._deepnet.set_provider(provider) <NEW_LINE> provider._deepnet.N2D2().initialize() <NEW_LINE> dummy_input = dummy_input._set_cell(provider) <NEW_LINE> dummy_output = self(dummy_input) <NEW_LINE> N2D2_deepnet = dummy_output.get_deepnet().N2D2() <NEW_LINE> N2D2_target = N2D2.TargetScore("Target", dummy_output.cell.N2D2(), provider.N2D2()) <NEW_LINE> N2D2_deepnet.addTarget(N2D2_target) <NEW_LINE> N2D2_deepnet.setDatabase(provider.N2D2().getDatabase()) <NEW_LINE> return DeepNetCell(N2D2_deepnet)
|
This implementation of the Iterable class describes a sequential (vertical) ordering of cells.
|
625990664428ac0f6e659c8f
|
class KeyPressInteractorStyle(vtk.vtkInteractorStyleTrackballCamera): <NEW_LINE> <INDENT> def __init__(self, renWin, parent=None): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.AddObserver("KeyPressEvent", self.keyPressEvent) <NEW_LINE> self.OUTPUT_FILE_NAME = "map_output.png" <NEW_LINE> self.renWin = renWin <NEW_LINE> <DEDENT> def keyPressEvent(self, obj, event): <NEW_LINE> <INDENT> key = self.parent.GetKeySym() <NEW_LINE> if (key == "Return" or key == "s"): <NEW_LINE> <INDENT> w2if = vtk.vtkWindowToImageFilter() <NEW_LINE> w2if.SetInput(self.renWin) <NEW_LINE> w2if.Update() <NEW_LINE> writer = vtk.vtkPNGWriter() <NEW_LINE> writer.SetFileName(self.OUTPUT_FILE_NAME) <NEW_LINE> writer.SetInputConnection(w2if.GetOutputPort()) <NEW_LINE> writer.Write()
|
An interactor style class extending vtkInteractorStyleTrackballCamera
that saves a screenshot of the window when the 's' or Return key are
pressed.
|
62599066498bea3a75a591af
|
class TestStatus4(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testStatus4(self): <NEW_LINE> <INDENT> pass
|
Status4 unit test stubs
|
62599066796e427e5384fed4
|
class ScrapingRuleFormatError(scraper_utils.NewsScrapperError): <NEW_LINE> <INDENT> pass
|
Indicates that a rule has invalid format.
|
6259906666673b3332c31b5a
|
class TestCollectionApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = ibmwex.apis.collection_api.CollectionApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_is_indexing_enabled(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_rebuild(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_set_indexing_enabled(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_status(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_status_all(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_update(self): <NEW_LINE> <INDENT> pass
|
CollectionApi unit test stubs
|
62599066ac7a0e7691f73c42
|
class Location(collections.namedtuple('Location', ('row', 'column'))): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def zero(cls): <NEW_LINE> <INDENT> return cls(0, 0) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return Location(self[0] + other[0], self[1] + other[1]) <NEW_LINE> <DEDENT> def __sub__(self, other): <NEW_LINE> <INDENT> return Location(self[0] - other[0], self[1] - other[1]) <NEW_LINE> <DEDENT> def __mul__(self, value): <NEW_LINE> <INDENT> return Location(self[0] * value, self[1] * value) <NEW_LINE> <DEDENT> def above(self, distance=1): <NEW_LINE> <INDENT> return Location(self[0] - distance, self[1]) <NEW_LINE> <DEDENT> def below(self, distance=1): <NEW_LINE> <INDENT> return Location(self[0] + distance, self[1]) <NEW_LINE> <DEDENT> def left(self, distance=1): <NEW_LINE> <INDENT> return Location(self[0], self[1] - distance) <NEW_LINE> <DEDENT> def right(self, distance=1): <NEW_LINE> <INDENT> return Location(self[0], self[1] + distance) <NEW_LINE> <DEDENT> def relative(self, direction, distance=1): <NEW_LINE> <INDENT> return self + (direction * distance) <NEW_LINE> <DEDENT> def adjacent(self): <NEW_LINE> <INDENT> return {self + d for d in ((0, 1), (0, -1), (1, 0), (-1, 0))} <NEW_LINE> <DEDENT> def diagonals(self): <NEW_LINE> <INDENT> return {self + d for d in ((1, 1), (-1, 1), (1, -1), (-1, -1))} <NEW_LINE> <DEDENT> def surrounding(self): <NEW_LINE> <INDENT> return self.adjacent() | self.diagonals() <NEW_LINE> <DEDENT> def relatives(self, directions): <NEW_LINE> <INDENT> for direction in directions: <NEW_LINE> <INDENT> yield self + direction
|
Location reperesents a location in a 2D row-column space. It is primarily
a helper class over regular tuples of (row, column). It assumes that +row
is down and +column is right.
Note that Location does *not* perform any type checking on its arguments;
all the type checking is performed by the validators of the Grid classes.
|
6259906699cbb53fe6832641
|
class CompositeMetric(MetricBase): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> super(CompositeMetric, self).__init__(name) <NEW_LINE> self._metrics = [] <NEW_LINE> <DEDENT> def add_metric(self, metric): <NEW_LINE> <INDENT> if not isinstance(metric, MetricBase): <NEW_LINE> <INDENT> raise ValueError("SubMetric should be inherit from MetricBase.") <NEW_LINE> <DEDENT> self._metrics.append(metric) <NEW_LINE> <DEDENT> def update(self, preds, labels): <NEW_LINE> <INDENT> for m in self._metrics: <NEW_LINE> <INDENT> m.update(preds, labels) <NEW_LINE> <DEDENT> <DEDENT> def eval(self): <NEW_LINE> <INDENT> ans = [] <NEW_LINE> for m in self._metrics: <NEW_LINE> <INDENT> ans.append(m.eval()) <NEW_LINE> <DEDENT> return ans
|
This op creates a container that contains the union of all the added metrics.
After the metrics added in, calling eval() method will compute all the contained metrics automatically.
CAUTION: only metrics with the SAME argument list can be added in a CompositeMetric instance.
Inherit from: `MetricBase <https://www.paddlepaddle.org.cn/documentation/docs/zh/1.5/api_cn/metrics_cn.html#paddle.fluid.metrics.MetricBase>`_
Args:
name (str, optional): Metric name. For details, please refer to :ref:`api_guide_Name`. Default is None.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
preds = [[0.1], [0.7], [0.8], [0.9], [0.2],
[0.2], [0.3], [0.5], [0.8], [0.6]]
labels = [[0], [1], [1], [1], [1],
[0], [0], [0], [0], [0]]
preds = np.array(preds)
labels = np.array(labels)
comp = fluid.metrics.CompositeMetric()
precision = fluid.metrics.Precision()
recall = fluid.metrics.Recall()
comp.add_metric(precision)
comp.add_metric(recall)
comp.update(preds=preds, labels=labels)
numpy_precision, numpy_recall = comp.eval()
print("expect precision: %.2f, got %.2f" % ( 3. / 5, numpy_precision ) )
print("expect recall: %.2f, got %.2f" % (3. / 4, numpy_recall ) )
|
62599066cb5e8a47e493cd33
|
@public <NEW_LINE> class SubscribeOptions(object): <NEW_LINE> <INDENT> __slots__ = ( 'match', 'details', 'details_arg', 'get_retained', 'forward_for', 'correlation_id', 'correlation_uri', 'correlation_is_anchor', 'correlation_is_last', ) <NEW_LINE> def __init__(self, match=None, details=None, details_arg=None, forward_for=None, get_retained=None, correlation_id=None, correlation_uri=None, correlation_is_anchor=None, correlation_is_last=None): <NEW_LINE> <INDENT> assert(match is None or (type(match) == six.text_type and match in [u'exact', u'prefix', u'wildcard'])) <NEW_LINE> assert(details is None or (type(details) == bool and details_arg is None)) <NEW_LINE> assert(details_arg is None or type(details_arg) == str) <NEW_LINE> assert(get_retained is None or type(get_retained) is bool) <NEW_LINE> assert(forward_for is None or type(forward_for) == list) <NEW_LINE> if forward_for: <NEW_LINE> <INDENT> for ff in forward_for: <NEW_LINE> <INDENT> assert type(ff) == dict <NEW_LINE> assert 'session' in ff and type(ff['session']) in six.integer_types <NEW_LINE> assert 'authid' in ff and (ff['authid'] is None or type(ff['authid']) == six.text_type) <NEW_LINE> assert 'authrole' in ff and type(ff['authrole']) == six.text_type <NEW_LINE> <DEDENT> <DEDENT> self.match = match <NEW_LINE> self.details = details <NEW_LINE> if details: <NEW_LINE> <INDENT> self.details_arg = 'details' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.details_arg = details_arg <NEW_LINE> <DEDENT> self.get_retained = get_retained <NEW_LINE> self.forward_for = forward_for <NEW_LINE> self.correlation_id = correlation_id <NEW_LINE> self.correlation_uri = correlation_uri <NEW_LINE> self.correlation_is_anchor = correlation_is_anchor <NEW_LINE> self.correlation_is_last = correlation_is_last <NEW_LINE> <DEDENT> def message_attr(self): <NEW_LINE> <INDENT> options = {} <NEW_LINE> if self.match is not None: <NEW_LINE> <INDENT> options[u'match'] = self.match <NEW_LINE> <DEDENT> if self.get_retained is not None: <NEW_LINE> <INDENT> options[u'get_retained'] = self.get_retained <NEW_LINE> <DEDENT> if self.forward_for is not None: <NEW_LINE> <INDENT> options[u'forward_for'] = self.forward_for <NEW_LINE> <DEDENT> return options <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return u"SubscribeOptions(match={}, details={}, details_arg={}, get_retained={}, forward_for={})".format(self.match, self.details, self.details_arg, self.get_retained, self.forward_for)
|
Used to provide options for subscribing in
:func:`autobahn.wamp.interfaces.ISubscriber.subscribe`.
|
625990665166f23b2e244b2e
|
class NoteModelSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> user = serializers.SlugRelatedField(slug_field="username", required=False, allow_null=True, queryset=User.objects.all()) <NEW_LINE> def create(self, validated_data): <NEW_LINE> <INDENT> request = self.context["request"] <NEW_LINE> user = request.user <NEW_LINE> if user.is_authenticated: <NEW_LINE> <INDENT> validated_data["user"] = user <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> validated_data["user"] = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ip = request.META['HTTP_X_REAL_IP'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> ip = request.META["REMOTE_ADDR"] <NEW_LINE> <DEDENT> validated_data["address"] = ip <NEW_LINE> instance = super().create(validated_data=validated_data) <NEW_LINE> return instance <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Note <NEW_LINE> fields = ("id", "user", "content", "address", "time_added", "is_deleted")
|
Note Model Serializer
|
62599066aad79263cf42ff17
|
class PassCollegeViewset(APIView): <NEW_LINE> <INDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> idcard = request.data.get("idcard", 0) <NEW_LINE> examcode = request.data.get("examcode", 0) <NEW_LINE> student = CollegeStudent.objects.filter(idcard=idcard, examcode=examcode).first() <NEW_LINE> if student == None: <NEW_LINE> <INDENT> return Response({"status_code": status.HTTP_200_OK, "message": "ok", "results": "考生不存在", }, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> elif student.is_pass == True: <NEW_LINE> <INDENT> major = Major.objects.filter(code=student.expectation).first() <NEW_LINE> return Response({"status_code": status.HTTP_200_OK, "message": "ok", "results": { "name": student.name, "expectation": student.expectation, "subject": major.name, "college": major.college.name }, }, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response({"status_code": status.HTTP_200_OK, "message": "ok", "results": "不通过", }, status=status.HTTP_200_OK)
|
用户查看是否通过考试
|
625990664428ac0f6e659c90
|
class TimezoneNotSpecifiedError(TimestampError): <NEW_LINE> <INDENT> pass
|
This error is raised when timezone is not specified.
|
6259906632920d7e50bc77a5
|
class nn_se_lr003(p40): <NEW_LINE> <INDENT> learning_rate = 0.003
|
cnn2blstm
|
6259906663d6d428bbee3e38
|
class CertificateRequestTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> from acme.messages import CertificateRequest <NEW_LINE> self.req = CertificateRequest(csr=CSR, authorizations=('foo',)) <NEW_LINE> <DEDENT> def test_json_de_serializable(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(self.req, jose.JSONDeSerializable)) <NEW_LINE> from acme.messages import CertificateRequest <NEW_LINE> self.assertEqual( self.req, CertificateRequest.from_json(self.req.to_json()))
|
Tests for acme.messages.CertificateRequest.
|
625990668e71fb1e983bd224
|
@navigator.register(FilterEntity, 'All') <NEW_LINE> class ShowAllFilters(NavigateStep): <NEW_LINE> <INDENT> VIEW = FiltersView <NEW_LINE> def am_i_here(self, *args, **kwargs): <NEW_LINE> <INDENT> role_name = kwargs.get('role_name') <NEW_LINE> return self.view.is_displayed and self.view.breadcrumb.locations[1] in ( role_name, f'{role_name} filters', ) <NEW_LINE> <DEDENT> def prerequisite(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.navigate_to(RoleEntity, 'All', **kwargs) <NEW_LINE> <DEDENT> def step(self, *args, **kwargs): <NEW_LINE> <INDENT> role_name = kwargs.get('role_name') <NEW_LINE> self.parent.search(role_name) <NEW_LINE> self.parent.table.row(name=role_name)['Actions'].widget.fill('Filters')
|
Navigate to All Role Filters page by pressing 'Filters' button on Roles
List view.
Args:
role_name: name of role
|
62599066a219f33f346c7f65
|
class Extractor(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _make_social_stats( cls, threads=0, comments=0, replies=0, upvotes=0, followers=0, comments_generated=0, threads_read=0 ): <NEW_LINE> <INDENT> return { DiscussionExportFields.THREADS: threads, DiscussionExportFields.COMMENTS: comments, DiscussionExportFields.REPLIES: replies, DiscussionExportFields.UPVOTES: upvotes, DiscussionExportFields.FOLOWERS: followers, DiscussionExportFields.COMMENTS_GENERATED: comments_generated, DiscussionExportFields.THREADS_READ: threads_read, } <NEW_LINE> <DEDENT> def _get_users(self, course_key): <NEW_LINE> <INDENT> users = CourseEnrollment.objects.users_enrolled_in(course_key) <NEW_LINE> return {user.id: user for user in users} <NEW_LINE> <DEDENT> def _get_social_stats(self, course_key, end_date=None, thread_type=None, thread_ids=None): <NEW_LINE> <INDENT> return { int(user_id): data for user_id, data in User.all_social_stats( str(course_key), end_date=end_date, thread_type=thread_type, thread_ids=thread_ids ).iteritems() } <NEW_LINE> <DEDENT> def _merge_user_data_and_social_stats(self, userdata, social_stats): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for user_id, user in userdata.iteritems(): <NEW_LINE> <INDENT> user_record = { DiscussionExportFields.USER_ID: user.id, DiscussionExportFields.USERNAME: user.username, DiscussionExportFields.EMAIL: user.email, DiscussionExportFields.FIRST_NAME: user.first_name, DiscussionExportFields.LAST_NAME: user.last_name, } <NEW_LINE> stats = social_stats.get(user_id, self._make_social_stats()) <NEW_LINE> result.append(utils.merge_dict(user_record, stats)) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def extract(self, course_key, end_date=None, thread_type=None, thread_ids=None): <NEW_LINE> <INDENT> users = self._get_users(course_key) <NEW_LINE> social_stats = self._get_social_stats( course_key, end_date=end_date, thread_type=thread_type, thread_ids=thread_ids ) <NEW_LINE> return self._merge_user_data_and_social_stats(users, social_stats)
|
Extracts discussion participation data from db and cs_comments_service
|
625990661f037a2d8b9e541a
|
class SQLCache(object): <NEW_LINE> <INDENT> def __init__(self, filepath, worker_keepalive=2.0, commit_spacing=2.0): <NEW_LINE> <INDENT> self._path = os.path.abspath(filepath) <NEW_LINE> self._worker_keepalive = worker_keepalive <NEW_LINE> self._commit_spacing = commit_spacing <NEW_LINE> self._work_queue = Queue(maxsize=64) <NEW_LINE> self._worker_sem = Semaphore() <NEW_LINE> self._worker = None <NEW_LINE> path, filename = os.path.split(self._path) <NEW_LINE> if not os.path.exists(path): <NEW_LINE> <INDENT> os.makedirs(path) <NEW_LINE> <DEDENT> <DEDENT> def _handoff_work(self, action): <NEW_LINE> <INDENT> self._work_queue.put(action) <NEW_LINE> if self._worker_sem.acquire(blocking=False): <NEW_LINE> <INDENT> self._worker = _SQLStoreThread( self._path, self._work_queue, self._worker_keepalive, self._commit_spacing, self._worker_sem ) <NEW_LINE> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> done = Event() <NEW_LINE> box = [] <NEW_LINE> def act(cur): <NEW_LINE> <INDENT> result = cur.execute("SELECT val FROM bucket WHERE key = ?", (key,)) <NEW_LINE> try: <NEW_LINE> <INDENT> box.append(next(result)[0]) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> box.append(None) <NEW_LINE> <DEDENT> done.set() <NEW_LINE> <DEDENT> self._handoff_work(act) <NEW_LINE> done.wait() <NEW_LINE> return box.pop() <NEW_LINE> <DEDENT> def set(self, key, value): <NEW_LINE> <INDENT> def act(cur): <NEW_LINE> <INDENT> cur.execute("REPLACE INTO bucket (key, val) VALUES (?, ?)", (key, value)) <NEW_LINE> <DEDENT> self._handoff_work(act) <NEW_LINE> <DEDENT> def delete(self, key): <NEW_LINE> <INDENT> def act(cur): <NEW_LINE> <INDENT> cur.execute("DELETE FROM bucket WHERE key = ?", (key,)) <NEW_LINE> <DEDENT> self._handoff_work(act) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self._worker: <NEW_LINE> <INDENT> self._worker.join(0) <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.close()
|
Maintains a worker thread with an active cursor in a sqlite database that commits periodically.
Functions as a bare-bones key-value store on the front end (text to bytes). Rather more performant
than committing every interaction, and does not require anything to be installed or set up.
|
6259906601c39578d7f142e4
|
class SelectSpecies(SelectionWindow): <NEW_LINE> <INDENT> def __init__(self, locations, title="Species Selection", description="Select the species:", width=600, slot=0): <NEW_LINE> <INDENT> self.locations = locations <NEW_LINE> super(SelectSpecies, self).__init__(title, description, width, slot) <NEW_LINE> self.back_signal = 'species-dialog-back' <NEW_LINE> self.saved_signal = 'species-selection-saved' <NEW_LINE> self.info_key = 'info-spe-selection' <NEW_LINE> self.button_load_data.hide() <NEW_LINE> <DEDENT> def create_model(self): <NEW_LINE> <INDENT> db = setlyze.database.AccessLocalDB() <NEW_LINE> self.store = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING,gobject.TYPE_STRING, gobject.TYPE_STRING,gobject.TYPE_STRING, gobject.TYPE_STRING,gobject.TYPE_STRING, gobject.TYPE_STRING,gobject.TYPE_STRING, gobject.TYPE_STRING,gobject.TYPE_BOOLEAN, ) <NEW_LINE> for id,common,latin,invasive_in_nl,phylum,cls,order,family,genus,species,subspecies in db.get_species(self.locations): <NEW_LINE> <INDENT> self.store.append([id,latin,common,phylum,cls,order,family,genus,species,subspecies,invasive_in_nl]) <NEW_LINE> <DEDENT> self.model = gtk.TreeModelSort(self.store) <NEW_LINE> self.model.set_sort_column_id(1, gtk.SORT_ASCENDING) <NEW_LINE> <DEDENT> def create_columns(self, treeview): <NEW_LINE> <INDENT> columns = ("Species (Latin)","Species (common)","Phylum","Class","Order","Family","Genus","Species","Subspecies","Invasive in NL") <NEW_LINE> renderer_text = gtk.CellRendererText() <NEW_LINE> renderer_toggle = gtk.CellRendererToggle() <NEW_LINE> for i,name in enumerate(columns, start=1): <NEW_LINE> <INDENT> if name == "Invasive in NL": <NEW_LINE> <INDENT> column = gtk.TreeViewColumn(name, renderer_toggle, active=i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> column = gtk.TreeViewColumn(name, renderer_text, text=i) <NEW_LINE> <DEDENT> if i < 3: <NEW_LINE> <INDENT> column.set_expand(True) <NEW_LINE> <DEDENT> column.set_sort_column_id(i) <NEW_LINE> column.set_resizable(True) <NEW_LINE> treeview.append_column(column)
|
Display a selection dialog that allows the user to make a
selection from the SETL species in the local database.
Design Part: 1.88
|
625990663539df3088ecd9fd
|
class Subject(models.Model): <NEW_LINE> <INDENT> code = models.CharField(_('code'), max_length=16, blank=True, unique=True) <NEW_LINE> name = models.CharField(_('name'), max_length=256) <NEW_LINE> short_name = models.CharField(_('short name'), max_length=32, blank=True, default="") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{} ({})".format(self.name, self.code)
|
e. g. Math 101
|
62599066796e427e5384fed5
|
@keras_export('keras.layers.ReLU') <NEW_LINE> class ReLU(Layer): <NEW_LINE> <INDENT> def __init__(self, max_value=None, negative_slope=0, threshold=0, **kwargs): <NEW_LINE> <INDENT> super(ReLU, self).__init__(**kwargs) <NEW_LINE> if max_value is not None and max_value < 0.: <NEW_LINE> <INDENT> raise ValueError('max_value of a ReLU layer cannot be a negative ' 'value. Got: %s' % max_value) <NEW_LINE> <DEDENT> if negative_slope is None or negative_slope < 0.: <NEW_LINE> <INDENT> raise ValueError('negative_slope of a ReLU layer cannot be a negative ' 'value. Got: %s' % negative_slope) <NEW_LINE> <DEDENT> if threshold is None or threshold < 0.: <NEW_LINE> <INDENT> raise ValueError('threshold of a ReLU layer cannot be a negative ' 'value. Got: %s' % threshold) <NEW_LINE> <DEDENT> self.supports_masking = True <NEW_LINE> if max_value is not None: <NEW_LINE> <INDENT> max_value = backend.cast_to_floatx(max_value) <NEW_LINE> <DEDENT> self.max_value = max_value <NEW_LINE> self.negative_slope = backend.cast_to_floatx(negative_slope) <NEW_LINE> self.threshold = backend.cast_to_floatx(threshold) <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> return backend.relu(inputs, alpha=self.negative_slope, max_value=self.max_value, threshold=self.threshold) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = { 'max_value': self.max_value, 'negative_slope': self.negative_slope, 'threshold': self.threshold } <NEW_LINE> base_config = super(ReLU, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items())) <NEW_LINE> <DEDENT> @tf_utils.shape_type_conversion <NEW_LINE> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return input_shape
|
Rectified Linear Unit activation function.
With default values, it returns element-wise `max(x, 0)`.
Otherwise, it follows:
```
f(x) = max_value if x >= max_value
f(x) = x if threshold <= x < max_value
f(x) = negative_slope * (x - threshold) otherwise
```
Usage:
>>> layer = tf.keras.layers.ReLU()
>>> output = layer([-3.0, -1.0, 0.0, 2.0])
>>> list(output.numpy())
[0.0, 0.0, 0.0, 2.0]
>>> layer = tf.keras.layers.ReLU(max_value=1.0)
>>> output = layer([-3.0, -1.0, 0.0, 2.0])
>>> list(output.numpy())
[0.0, 0.0, 0.0, 1.0]
>>> layer = tf.keras.layers.ReLU(negative_slope=1.0)
>>> output = layer([-3.0, -1.0, 0.0, 2.0])
>>> list(output.numpy())
[-3.0, -1.0, 0.0, 2.0]
>>> layer = tf.keras.layers.ReLU(threshold=1.5)
>>> output = layer([-3.0, -1.0, 1.0, 2.0])
>>> list(output.numpy())
[0.0, 0.0, 0.0, 2.0]
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the batch axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as the input.
Args:
max_value: Float >= 0. Maximum activation value. Default to None, which
means unlimited.
negative_slope: Float >= 0. Negative slope coefficient. Default to 0.
threshold: Float >= 0. Threshold value for thresholded activation. Default
to 0.
|
62599066435de62698e9d568
|
class TileFeatured(BaseTile): <NEW_LINE> <INDENT> security = ClassSecurityInfo() <NEW_LINE> implements(ITileFeatured) <NEW_LINE> portal_type = 'TileFeatured' <NEW_LINE> _at_rename_after_creation = True <NEW_LINE> schema = TileFeatured_schema <NEW_LINE> columns = 6
|
Reserve Content for TileFeatured
|
62599066627d3e7fe0e085ea
|
class Sander(Sandable): <NEW_LINE> <INDENT> def __init__(self, width, length, ballSize, units): <NEW_LINE> <INDENT> self.editor = [ DialogFloat("h", "Coefficient 'H'", default=0.01, min=.001, max=.05), DialogFloat("a", "Coefficient 'A'", default=10.0, min=.5, max=45.0), DialogFloat("b", "Coefficient 'B'", default=28.0, min=.5, max=45.0), DialogFloat("c", "Coefficient 'C'", default=8.0 / 3.0, min=.1, max=5.0), DialogInt("points", "Number of points", default=5000), DialogBreak(), DialogFloat("xOffset", "X Origin", units=units, default=0.0), DialogFloat("yOffset", "Y Origin", units=units, default=0.0), DialogFloat("width", "Width", units=units, default=width, min=1.0, max=1000.0), DialogFloat("length", "Length", units=units, default=length, min=1.0, max=1000.0), ] <NEW_LINE> <DEDENT> def generate(self, params): <NEW_LINE> <INDENT> x0 = 0.1 <NEW_LINE> y0 = 0.0 <NEW_LINE> z0 = 0.0 <NEW_LINE> h = params.h <NEW_LINE> a = params.a <NEW_LINE> b = params.b <NEW_LINE> c = params.c <NEW_LINE> chain = [] <NEW_LINE> for i in range(params.points): <NEW_LINE> <INDENT> x1 = x0 + h * a * (y0 - x0) <NEW_LINE> y1 = y0 + h * (x0 * (b - z0) - y0) <NEW_LINE> z1 = z0 + h * (x0 * y0 - c * z0) <NEW_LINE> x0, y0, z0 = x1, y1, z1 <NEW_LINE> if math.isinf(x0) or math.isnan(x0) or math.isinf(y0) or math.isnan(y0): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> chain.append((x0, y0)) <NEW_LINE> <DEDENT> extents = [(params.xOffset, params.yOffset), (params.xOffset + params.width, params.yOffset + params.length)] <NEW_LINE> return Chains.fit([chain], extents)
|
### Draw a Lorenz Attractor (a type of fractal)
#### Hints
Read the Wikipedia article on [Lorenz Systems](http://en.wikipedia.org/wiki/Lorenz_system)
#### Parameters
* **h, a, b, c** - coefficients that describe parameters for the chaotic differential equations.
* **Number of points** - number of points to draw in the Lorenz curve.
* **X and Y Origin** - lower left corner of the drawing. Usually not worth changing.
* **Width** and **Length** - how big the figure should be. Probably not worth changing.
|
62599066d486a94d0ba2d721
|
class WiredDiscoveryV1(CDPDataItem): <NEW_LINE> <INDENT> type = 0x800F <NEW_LINE> definition = [DIUInt8Attr('hardware_version'), DIUInt32Attr('server_instance'), DIUInt64Attr('capabilities'), DIVariableLengthBytesAttr('payload')] <NEW_LINE> def get_hardware_version(self): <NEW_LINE> <INDENT> return self.hardware_version & 0x1F
|
CDP Data Item: Ciholas Data Protocol Wired Descovery Data Item Definition
|
625990665166f23b2e244b30
|
@register_snippet <NEW_LINE> class ActionButtons(ClusterableModel, models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> small = models.BooleanField(verbose_name="Small Buttons", default=False) <NEW_LINE> fit = models.BooleanField(verbose_name="Fit Buttons", default=False) <NEW_LINE> stacked = models.BooleanField(verbose_name="Stacked Buttons", default=False) <NEW_LINE> panels = [ FieldPanel('name'), MultiFieldPanel([ FieldPanel('small'), FieldPanel('fit'), FieldPanel('stacked'), ], heading="Action Button Group Settings"), InlinePanel('buttons', label="Action Buttons"), ] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Action Button" <NEW_LINE> verbose_name_plural = "Action Buttons" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
|
Action Buttons
|
6259906656b00c62f0fb402d
|
class AniList: <NEW_LINE> <INDENT> api_url = API_URL <NEW_LINE> query = QUERY <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> data = { 'query': self.query, 'variables': { 'name': self.name } } <NEW_LINE> response = requests.post(self.api_url, json=data) <NEW_LINE> return response.text <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> text = self.json() <NEW_LINE> parsed = json.loads(text) <NEW_LINE> return parsed['data']['Media']
|
Main class to communicate with AniList API.
Attributes
----------
api_url : str
AniList API url
query : str
Query that you want to sent to. It must be GraphQL query and
exists in AniList API.
Parameters
----------
name : str
Name of the manga that you want to get.
|
6259906644b2445a339b7510
|
class SparkChartView(proto.Message): <NEW_LINE> <INDENT> spark_chart_type = proto.Field( proto.ENUM, number=1, enum=metrics.SparkChartType, ) <NEW_LINE> min_alignment_period = proto.Field( proto.MESSAGE, number=2, message=duration_pb2.Duration, )
|
A sparkChart is a small chart suitable for inclusion in a
table-cell or inline in text. This message contains the
configuration for a sparkChart to show up on a Scorecard,
showing recent trends of the scorecard's timeseries.
Attributes:
spark_chart_type (google.cloud.monitoring_dashboard_v1.types.SparkChartType):
Required. The type of sparkchart to show in
this chartView.
min_alignment_period (google.protobuf.duration_pb2.Duration):
The lower bound on data point frequency in
the chart implemented by specifying the minimum
alignment period to use in a time series query.
For example, if the data is published once every
10 minutes it would not make sense to fetch and
align data at one minute intervals. This field
is optional and exists only as a hint.
|
62599066379a373c97d9a77e
|
class Float32Array(Array): <NEW_LINE> <INDENT> pass
|
32-bit floating point number
|
6259906676e4537e8c3f0ce3
|
class XMLTreeFile(ElementTree.ElementTree, XMLBackup): <NEW_LINE> <INDENT> sourcebackupfile = None <NEW_LINE> def __init__(self, xml): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.sourcebackupfile = file(xml, "rb") <NEW_LINE> self.sourcebackupfile.close() <NEW_LINE> <DEDENT> except (IOError, OSError): <NEW_LINE> <INDENT> self.sourcebackupfile = TempXMLFile() <NEW_LINE> self.sourcebackupfile.write(xml) <NEW_LINE> self.sourcebackupfile.close() <NEW_LINE> <DEDENT> XMLBackup.__init__(self, self.sourcebackupfile.name) <NEW_LINE> try: <NEW_LINE> <INDENT> ElementTree.ElementTree.__init__(self, element=None, file=self.name) <NEW_LINE> <DEDENT> except expat.ExpatError: <NEW_LINE> <INDENT> raise IOError("Error parsing XML: '%s'" % xml) <NEW_LINE> <DEDENT> self.write() <NEW_LINE> self.flush() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> self.write() <NEW_LINE> self.flush() <NEW_LINE> xmlstr = StringIO.StringIO() <NEW_LINE> self.write(xmlstr) <NEW_LINE> return xmlstr.getvalue() <NEW_LINE> <DEDENT> def backup_copy(self): <NEW_LINE> <INDENT> return self.__class__(self.name) <NEW_LINE> <DEDENT> def reroot(self, xpath): <NEW_LINE> <INDENT> rerooted = self.backup_copy() <NEW_LINE> element = rerooted.find(xpath) <NEW_LINE> if element is None: <NEW_LINE> <INDENT> del rerooted <NEW_LINE> raise KeyError("No element found at %s" % xpath) <NEW_LINE> <DEDENT> rerooted._setroot(element) <NEW_LINE> return rerooted <NEW_LINE> <DEDENT> def get_parent_map(self, element=None): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for p in self.getiterator(element): <NEW_LINE> <INDENT> for c in p: <NEW_LINE> <INDENT> d[c] = p <NEW_LINE> <DEDENT> <DEDENT> return d <NEW_LINE> <DEDENT> def get_parent(self, element, relative_root=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.get_parent_map(relative_root)[element] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_xpath(self, element): <NEW_LINE> <INDENT> parent_map = self.get_parent_map() <NEW_LINE> root = self.getroot() <NEW_LINE> assert root in parent_map.values() <NEW_LINE> if element == root: <NEW_LINE> <INDENT> return '.' <NEW_LINE> <DEDENT> path_list = [] <NEW_LINE> while element != root: <NEW_LINE> <INDENT> path_list.append(u"%s" % element.tag) <NEW_LINE> element = parent_map[element] <NEW_LINE> <DEDENT> assert element == root <NEW_LINE> path_list.reverse() <NEW_LINE> return "/".join(path_list) <NEW_LINE> <DEDENT> def remove(self, element): <NEW_LINE> <INDENT> self.get_parent(element).remove(element) <NEW_LINE> <DEDENT> def remove_by_xpath(self, xpath): <NEW_LINE> <INDENT> self.remove(self.find(xpath)) <NEW_LINE> <DEDENT> def create_by_xpath(self, xpath): <NEW_LINE> <INDENT> cur_element = self.getroot() <NEW_LINE> for tag in xpath.split('/'): <NEW_LINE> <INDENT> next_element = cur_element.find(tag) <NEW_LINE> if next_element is None: <NEW_LINE> <INDENT> next_element = ElementTree.SubElement(cur_element, tag) <NEW_LINE> <DEDENT> cur_element = next_element <NEW_LINE> <DEDENT> <DEDENT> def write(self, filename=None, encoding=ENCODING): <NEW_LINE> <INDENT> if filename is None: <NEW_LINE> <INDENT> filename = self.name <NEW_LINE> <DEDENT> ElementTree.ElementTree.write(self, filename, encoding) <NEW_LINE> <DEDENT> def read(self, xml): <NEW_LINE> <INDENT> self.__del__() <NEW_LINE> self.__init__(xml)
|
Combination of ElementTree root and auto-cleaned XML backup file.
|
62599066009cb60464d02c99
|
class Equal (Constraint): <NEW_LINE> <INDENT> def __init__ (self, other_id, message = None): <NEW_LINE> <INDENT> self.other_id = other_id <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def get_error (self, data, result, entry): <NEW_LINE> <INDENT> if result[self.other_id]==result[entry.id_]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.message is not None: <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> other = filter (lambda x: x[0].id_==self.other_id, data) <NEW_LINE> assert len (other)>=1 <NEW_LINE> return "%s must match %s" % (entry.label, other[0][0].label)
|
Equal constraint: This data entry must have the same value as another data entry.
|
625990664f6381625f19a055
|
class Deck: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cards = [] <NEW_LINE> for suit in range(4): <NEW_LINE> <INDENT> for rank in range(1, 14): <NEW_LINE> <INDENT> self.cards.append(Card(suit, rank)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = "" <NEW_LINE> for i in range(len(self.cards)): <NEW_LINE> <INDENT> s = s + " " * i + str(self.cards[i]) + "\n" <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> import random <NEW_LINE> rng = random.Random() <NEW_LINE> num_cards = len(self.cards) <NEW_LINE> for i in range(num_cards): <NEW_LINE> <INDENT> j = rng.randrange(i, num_cards) <NEW_LINE> (self.cards[i], self.cards[j]) = (self.cards[j], self.cards[i]) <NEW_LINE> <DEDENT> <DEDENT> def remove(self, card): <NEW_LINE> <INDENT> if card in self.cards: <NEW_LINE> <INDENT> self.cards.remove(card) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> return self.cards.pop() <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self.cards == [] <NEW_LINE> <DEDENT> def deal(self, hands, num_cards=999): <NEW_LINE> <INDENT> num_hands = len(hands) <NEW_LINE> for i in range(num_cards): <NEW_LINE> <INDENT> if self.empty(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> card = self.pop() <NEW_LINE> hand = hands[i % num_hands] <NEW_LINE> hand.add(card)
|
Defines a deck of cards.
|
62599066e76e3b2f99fda161
|
class InvalidRuleSource(RuleSourceException): <NEW_LINE> <INDENT> pass
|
The rule's @source decorator has invalid arguments
|
62599066baa26c4b54d50a06
|
class LoggingSetLevelCommand(sublime_plugin.WindowCommand): <NEW_LINE> <INDENT> def run(self, level=20): <NEW_LINE> <INDENT> logging.root.setLevel(ensure_loglevel_int(level)) <NEW_LINE> sublime.status_message("Logging level set to %s" % level) <NEW_LINE> print("Logging level set to %s" % level) <NEW_LINE> logger.info("Logging level set to %s", level) <NEW_LINE> settings = sublime.load_settings(SETTINGS_NAME) <NEW_LINE> if settings.get("logging_persist_changes", False): <NEW_LINE> <INDENT> settings.set("logging_root_level", level) <NEW_LINE> sublime.save_settings(SETTINGS_NAME) <NEW_LINE> logger.info("Persisting settings with logging_root_level = %s", level)
|
command key: logging_set_level
|
62599066498bea3a75a591b1
|
class EffectsListView(QListView): <NEW_LINE> <INDENT> drag_item_size = 48 <NEW_LINE> def contextMenuEvent(self, event): <NEW_LINE> <INDENT> app = get_app() <NEW_LINE> app.context_menu_object = "effects" <NEW_LINE> menu = QMenu(self) <NEW_LINE> menu.addAction(self.win.actionDetailsView) <NEW_LINE> menu.exec_(QCursor.pos()) <NEW_LINE> <DEDENT> def startDrag(self, event): <NEW_LINE> <INDENT> selected_row = self.effects_model.model.itemFromIndex(self.selectionModel().selectedIndexes()[0]).row() <NEW_LINE> icon = self.effects_model.model.item(selected_row, 0).icon() <NEW_LINE> drag = QDrag(self) <NEW_LINE> drag.setMimeData(self.effects_model.model.mimeData(self.selectionModel().selectedIndexes())) <NEW_LINE> drag.setPixmap(icon.pixmap(QSize(self.drag_item_size, self.drag_item_size))) <NEW_LINE> drag.setHotSpot(QPoint(self.drag_item_size / 2, self.drag_item_size / 2)) <NEW_LINE> drag.exec_() <NEW_LINE> <DEDENT> def clear_filter(self): <NEW_LINE> <INDENT> get_app().window.effectsFilter.setText("") <NEW_LINE> <DEDENT> def filter_changed(self): <NEW_LINE> <INDENT> if self.win.effectsFilter.text() == "": <NEW_LINE> <INDENT> self.win.actionEffectsClear.setEnabled(False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.win.actionEffectsClear.setEnabled(True) <NEW_LINE> <DEDENT> self.refresh_view() <NEW_LINE> <DEDENT> def refresh_view(self): <NEW_LINE> <INDENT> self.effects_model.update_model() <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> QListView.__init__(self, *args) <NEW_LINE> self.win = get_app().window <NEW_LINE> self.effects_model = EffectsModel() <NEW_LINE> self.setAcceptDrops(True) <NEW_LINE> self.setDragEnabled(True) <NEW_LINE> self.setDropIndicatorShown(True) <NEW_LINE> self.setModel(self.effects_model.model) <NEW_LINE> self.setIconSize(QSize(131, 108)) <NEW_LINE> self.setGridSize(QSize(102, 92)) <NEW_LINE> self.setViewMode(QListView.IconMode) <NEW_LINE> self.setResizeMode(QListView.Adjust) <NEW_LINE> self.setUniformItemSizes(True) <NEW_LINE> self.setWordWrap(False) <NEW_LINE> self.setTextElideMode(Qt.ElideRight) <NEW_LINE> self.setStyleSheet('QListView::item { padding-top: 2px; }') <NEW_LINE> self.refresh_view() <NEW_LINE> app = get_app() <NEW_LINE> app.window.effectsFilter.textChanged.connect(self.filter_changed) <NEW_LINE> app.window.actionEffectsClear.triggered.connect(self.clear_filter)
|
A TreeView QWidget used on the main window
|
62599066d7e4931a7ef3d736
|
class SourceGroup(Source): <NEW_LINE> <INDENT> def __init__(self, group_id=None, **kwargs): <NEW_LINE> <INDENT> super(SourceGroup, self).__init__(**kwargs) <NEW_LINE> self.type = 'group' <NEW_LINE> self.group_id = group_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def sender_id(self): <NEW_LINE> <INDENT> return self.group_id
|
SourceGroup.
https://devdocs.line.me/en/#source-group
JSON object which contains the source group of the event.
|
62599066dd821e528d6da532
|
class PostListView(ListView): <NEW_LINE> <INDENT> queryset = Post.published.all() <NEW_LINE> context_object_name = 'posts' <NEW_LINE> paginate_by = 3 <NEW_LINE> template_name = 'myblog/post/list.html'
|
对象列表返回类 分页 包装
|
62599066a8ecb0332587297a
|
class RMSprop(Optimizer): <NEW_LINE> <INDENT> def __init__(self, lr=0.001, rho=0.9, epsilon=None, decay=0., **kwargs): <NEW_LINE> <INDENT> super(RMSprop, self).__init__(**kwargs) <NEW_LINE> with K.name_scope(self.__class__.__name__): <NEW_LINE> <INDENT> self.lr = K.variable(lr, name='lr') <NEW_LINE> self.rho = K.variable(rho, name='rho') <NEW_LINE> self.decay = K.variable(decay, name='decay') <NEW_LINE> self.iterations = K.variable(0, dtype='int64', name='iterations') <NEW_LINE> <DEDENT> if epsilon is None: <NEW_LINE> <INDENT> epsilon = K.epsilon() <NEW_LINE> <DEDENT> self.epsilon = epsilon <NEW_LINE> self.initial_decay = decay <NEW_LINE> <DEDENT> @interfaces.legacy_get_updates_support <NEW_LINE> def get_updates(self, loss, params): <NEW_LINE> <INDENT> grads = self.get_gradients(loss, params) <NEW_LINE> accumulators = [K.zeros(K.int_shape(p), dtype=K.dtype(p)) for p in params] <NEW_LINE> self.weights = accumulators <NEW_LINE> self.updates = [K.update_add(self.iterations, 1)] <NEW_LINE> lr = self.lr <NEW_LINE> if self.initial_decay > 0: <NEW_LINE> <INDENT> lr *= (1. / (1. + self.decay * K.cast(self.iterations, K.dtype(self.decay)))) <NEW_LINE> <DEDENT> for p, g, a in zip(params, grads, accumulators): <NEW_LINE> <INDENT> new_a = self.rho * a + (1. - self.rho) * K.square(g) <NEW_LINE> self.updates.append(K.update(a, new_a)) <NEW_LINE> new_p = p - lr * g / (K.sqrt(new_a) + self.epsilon) <NEW_LINE> if getattr(p, 'constraint', None) is not None: <NEW_LINE> <INDENT> new_p = p.constraint(new_p) <NEW_LINE> <DEDENT> self.updates.append(K.update(p, new_p)) <NEW_LINE> <DEDENT> return self.updates <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'lr': float(K.get_value(self.lr)), 'rho': float(K.get_value(self.rho)), 'decay': float(K.get_value(self.decay)), 'epsilon': self.epsilon} <NEW_LINE> base_config = super(RMSprop, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items()))
|
RMSProp optimizer.
It is recommended to leave the parameters of this optimizer
at their default values
(except the learning rate, which can be freely tuned).
This optimizer is usually a good choice for recurrent
neural networks.
# Arguments
lr: float >= 0. Learning rate.
rho: float >= 0.
epsilon: float >= 0. Fuzz factor. If `None`, defaults to `K.epsilon()`.
decay: float >= 0. Learning rate decay over each update.
# References
- [rmsprop: Divide the gradient by a running average of its recent magnitude](http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)
|
62599066aad79263cf42ff1a
|
class Newchannel(_Message): <NEW_LINE> <INDENT> def process(self): <NEW_LINE> <INDENT> (headers, data) = _Message.process(self) <NEW_LINE> try: <NEW_LINE> <INDENT> headers['ChannelState'] = int(headers['ChannelState']) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> headers['ChannelState'] = None <NEW_LINE> <DEDENT> return (headers, data)
|
Indicates that a new channel has been created.
- 'AccountCode': The billing account associated with the channel; may be empty
- 'CallerIDNum': The (often) numeric identifier of the caller
- 'CallerIDName': The caller's name, on supporting channels
- 'Channel': The channel identifier used by Asterisk
- 'ChannelState': One of the following numeric values, as a string:
- '0': Not connected
- '4': Alerting
- '6': Connected
- 'ChannelStateDesc': A lexical description of the channel's current state
- 'Context': The context that the channel is currently operating in
- 'Exten': The extension the channel is currently operating in
- 'Uniqueid': An Asterisk unique value
|
62599066627d3e7fe0e085ec
|
class Solution(object): <NEW_LINE> <INDENT> def methodX(self): <NEW_LINE> <INDENT> pass
|
>>> solution = Solution()
|
625990665166f23b2e244b32
|
class SentimentPlugin(Analyser, Evaluable, models.SentimentPlugin): <NEW_LINE> <INDENT> minPolarityValue = 0 <NEW_LINE> maxPolarityValue = 1 <NEW_LINE> _terse_keys = Analyser._terse_keys + ['minPolarityValue', 'maxPolarityValue'] <NEW_LINE> def test_case(self, case): <NEW_LINE> <INDENT> if 'polarity' in case: <NEW_LINE> <INDENT> expected = case.get('expected', {}) <NEW_LINE> s = models.Sentiment(_auto_id=False) <NEW_LINE> s.marl__hasPolarity = case['polarity'] <NEW_LINE> if 'marl:hasOpinion' not in expected: <NEW_LINE> <INDENT> expected['marl:hasOpinion'] = [] <NEW_LINE> <DEDENT> expected['marl:hasOpinion'].append(s) <NEW_LINE> case['expected'] = expected <NEW_LINE> <DEDENT> super(SentimentPlugin, self).test_case(case) <NEW_LINE> <DEDENT> def normalize(self, value, minValue, maxValue): <NEW_LINE> <INDENT> nv = minValue + (value - self.minPolarityValue) * ( self.maxPolarityValue - self.minPolarityValue) / (maxValue - minValue) <NEW_LINE> return nv <NEW_LINE> <DEDENT> def as_pipe(self): <NEW_LINE> <INDENT> pipe = gsitk_compat.Pipeline([('senpy-plugin', ScikitWrapper(self))]) <NEW_LINE> pipe.name = self.id <NEW_LINE> return pipe <NEW_LINE> <DEDENT> def evaluate_func(self, X, activity=None): <NEW_LINE> <INDENT> if activity is None: <NEW_LINE> <INDENT> parameters = api.parse_params({}, self.extra_params) <NEW_LINE> activity = self.activity(parameters) <NEW_LINE> <DEDENT> entries = [] <NEW_LINE> for feat in X: <NEW_LINE> <INDENT> if isinstance(feat, list): <NEW_LINE> <INDENT> feat = ' '.join(feat) <NEW_LINE> <DEDENT> entries.append(models.Entry(nif__isString=feat)) <NEW_LINE> <DEDENT> labels = [] <NEW_LINE> for e in self.process_entries(entries, activity): <NEW_LINE> <INDENT> sent = e.sentiments[0].polarity <NEW_LINE> label = -1 <NEW_LINE> if sent == 'marl:Positive': <NEW_LINE> <INDENT> label = 1 <NEW_LINE> <DEDENT> elif sent == 'marl:Negative': <NEW_LINE> <INDENT> label = -1 <NEW_LINE> <DEDENT> labels.append(label) <NEW_LINE> <DEDENT> return labels
|
Sentiment plugins provide sentiment annotation (using Marl)
|
6259906692d797404e38970e
|
class FakeDriver(base.DriverBase): <NEW_LINE> <INDENT> def __init__(self, config, logger, systems, chassis): <NEW_LINE> <INDENT> super().__init__(config, logger) <NEW_LINE> self._systems = systems <NEW_LINE> self._chassis = chassis <NEW_LINE> <DEDENT> def get_manager(self, identity): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> system_uuid = self._systems.uuid(identity) <NEW_LINE> system_name = self._systems.name(identity) <NEW_LINE> <DEDENT> except error.AliasAccessError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except error.NotFound: <NEW_LINE> <INDENT> msg = 'Manager with UUID %s was not found' % identity <NEW_LINE> self._logger.error(msg) <NEW_LINE> raise error.NotFound(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = {'Id': system_uuid, 'UUID': system_uuid, 'Name': '%s-Manager' % system_name} <NEW_LINE> self._logger.debug('Found manager %(mgr)s by UUID %(id)s', {'mgr': result, 'id': identity}) <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def driver(self): <NEW_LINE> <INDENT> return '<static-managers>' <NEW_LINE> <DEDENT> @property <NEW_LINE> def managers(self): <NEW_LINE> <INDENT> return sorted(self._systems.systems) <NEW_LINE> <DEDENT> def get_managed_systems(self, manager): <NEW_LINE> <INDENT> return [manager['UUID']] <NEW_LINE> <DEDENT> def get_managed_chassis(self, manager): <NEW_LINE> <INDENT> if manager['UUID'] == self.managers[0]: <NEW_LINE> <INDENT> return self._chassis.chassis <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def get_managers_for_system(self, ident): <NEW_LINE> <INDENT> return [self._systems.uuid(ident)]
|
Redfish manager that copied systems.
|
6259906656b00c62f0fb402f
|
class TestImageCacheXattr(test_utils.BaseTestCase, ImageCacheTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestImageCacheXattr, self).setUp() <NEW_LINE> if getattr(self, 'disable', False): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.cache_dir = self.useFixture(fixtures.TempDir()).path <NEW_LINE> if not getattr(self, 'inited', False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import xattr <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> self.inited = True <NEW_LINE> self.disabled = True <NEW_LINE> self.disabled_message = ("python-xattr not installed.") <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self.inited = True <NEW_LINE> self.disabled = False <NEW_LINE> self.config(image_cache_dir=self.cache_dir, image_cache_driver='xattr', image_cache_max_size=1024 * 5) <NEW_LINE> self.cache = image_cache.ImageCache() <NEW_LINE> if not xattr_writes_supported(self.cache_dir): <NEW_LINE> <INDENT> self.inited = True <NEW_LINE> self.disabled = True <NEW_LINE> self.disabled_message = ("filesystem does not support xattr") <NEW_LINE> return
|
Tests image caching when xattr is used in cache
|
625990664e4d562566373b69
|
class FactoryRegistration(object): <NEW_LINE> <INDENT> descriptor = None <NEW_LINE> class_factory = None <NEW_LINE> def __init__(self, descriptor, class_factory): <NEW_LINE> <INDENT> self.descriptor = descriptor <NEW_LINE> self.class_factory = class_factory
|
Holds registration of specific component in component factory.
|
625990662ae34c7f260ac849
|
class HeadingItem(BaseItem): <NEW_LINE> <INDENT> __metaclass__ = generate_docstring <NEW_LINE> fixed = {'menu_type': 'heading'} <NEW_LINE> defaults = {'name': None, 'description': None, 'heading_level': DEFAULT_HEADING_LEVEL, 'extra_data': None, 'klass': None, }
|
Wrapper/interface for heading objects in a LayerTree/menu.
|
62599066fff4ab517ebcef7d
|
class Visual7W_Pointing(Visual7W): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs) -> None: <NEW_LINE> <INDENT> super(Visual7W_Pointing, self).__init__(*args, data_type="pointing", **kwargs)
|
One type of VQA Dataset
http://web.stanford.edu/%7Eyukez/visual7w/
Download Pointing Dataset
|
625990668e7ae83300eea7f0
|
class Amenity(BaseModel): <NEW_LINE> <INDENT> name = "" <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs)
|
amenity class
|
62599066cc0a2c111447c681
|
class Jogo: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cena = Cena(TABULEIRO, direita=Tabuleiro()) <NEW_LINE> self.start = Elemento(BOTAO, x=50, y=50, cena=self.cena) <NEW_LINE> <DEDENT> def vai(self): <NEW_LINE> <INDENT> self.cena.vai() <NEW_LINE> <DEDENT> def bt_click(self): <NEW_LINE> <INDENT> self.janela = Tk() <NEW_LINE> <DEDENT> elemento = Button(janela, width=20, text="Jogar Dado", command=bt_click) <NEW_LINE> elemento.place(x=100, y=100) <NEW_LINE> lb = Label(janela, text="Teste") <NEW_LINE> lb.place(x=100, y=150)
|
Representa uma cena de tabuleiro com o botão
|
6259906655399d3f05627c83
|
class SearchError(DiaspyError): <NEW_LINE> <INDENT> pass
|
Exception raised when something related to searching goes wrong.
|
62599066009cb60464d02c9b
|
class TestGeolookupResult(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testGeolookupResult(self): <NEW_LINE> <INDENT> pass
|
GeolookupResult unit test stubs
|
625990668da39b475be0494c
|
class RgbColor(core.RgbColor): <NEW_LINE> <INDENT> def writeSdl(self, f): <NEW_LINE> <INDENT> f.write("rgb <") <NEW_LINE> f.write(str.join(",", (list(map(str,self))))) <NEW_LINE> f.write(">")
|
Extends L{core.RgbColor} by implementing the L{writeSdl} method.
|
62599066d268445f2663a70e
|
class ExtWindow(BaseExtWindow): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ExtWindow, self).__init__(*args, **kwargs) <NEW_LINE> self._ext_name = 'Ext.m3.Window' <NEW_LINE> self.init_component(*args, **kwargs) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> assert getattr(self, '_ext_name'), 'Class %s is not define "_ext_name"' % self.__class__.__name__ <NEW_LINE> self.pre_render() <NEW_LINE> try: <NEW_LINE> <INDENT> self.render_base_config() <NEW_LINE> self.render_params() <NEW_LINE> <DEDENT> except UnicodeDecodeError: <NEW_LINE> <INDENT> raise Exception('Some attribute is not unicode') <NEW_LINE> <DEDENT> base_config = self._get_config_str() <NEW_LINE> params = self._get_params_str() <NEW_LINE> res = '%(ext_name)s({%(base_config)s},{%(params)s})' % { 'ext_name': self._ext_name, 'base_config': base_config, 'params': params} <NEW_LINE> return 'new %s' % res if not self._is_function_render else res
|
Окно
:raises: AssertionError, UnicodeDecodeError
|
62599066498bea3a75a591b2
|
class ITribunaContentLayer(IRedominoAdvancedKeywordLayer): <NEW_LINE> <INDENT> pass
|
Marker interface that defines a Zope 3 browser layer.
|
62599066a8370b77170f1b32
|
class UpdateDestroyReview(RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Review.objects.all() <NEW_LINE> serializer_class = ReviewSerializer <NEW_LINE> lookup_url_kwarg = 'review_id' <NEW_LINE> permission_classes = [IsOwnerOrReadOnly, IsAuthenticated]
|
get, update and delete specific review by owner
|
62599066e5267d203ee6cf70
|
class Glossary(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'glossary_url': {'required': True}, 'format': {'required': True}, } <NEW_LINE> _attribute_map = { 'glossary_url': {'key': 'glossaryUrl', 'type': 'str'}, 'format': {'key': 'format', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'storage_source': {'key': 'storageSource', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, glossary_url: str, format: str, version: Optional[str] = None, storage_source: Optional[Union[str, "StorageSource"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(Glossary, self).__init__(**kwargs) <NEW_LINE> self.glossary_url = glossary_url <NEW_LINE> self.format = format <NEW_LINE> self.version = version <NEW_LINE> self.storage_source = storage_source
|
Glossary / translation memory for the request.
All required parameters must be populated in order to send to Azure.
:param glossary_url: Required. Location of the glossary.
We will use the file extension to extract the formatting if the format parameter is not
supplied.
If the translation language pair is not present in the glossary, it will not be applied.
:type glossary_url: str
:param format: Required. Format.
:type format: str
:param version: Optional Version. If not specified, default is used.
:type version: str
:param storage_source: Storage Source. Possible values include: "AzureBlob".
:type storage_source: str or ~azure.ai.translation.document.models.StorageSource
|
6259906676e4537e8c3f0ce6
|
class ListDocumentsResponse(proto.Message): <NEW_LINE> <INDENT> @property <NEW_LINE> def raw_page(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> documents = proto.RepeatedField( proto.MESSAGE, number=1, message=gf_document.Document, ) <NEW_LINE> next_page_token = proto.Field(proto.STRING, number=2)
|
The response for
[Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments].
Attributes:
documents (Sequence[~.gf_document.Document]):
The Documents found.
next_page_token (str):
The next page token.
|
62599066435de62698e9d56d
|
class PlayerDifficulty(Enum): <NEW_LINE> <INDENT> EASY = 1, <NEW_LINE> MEDIUM = 2, <NEW_LINE> HARD = 3, <NEW_LINE> HUMAN = 4
|
Easy sAI
|
62599066cb5e8a47e493cd36
|
class ListTaskCallMixin(Call): <NEW_LINE> <INDENT> def list_tasks(self): <NEW_LINE> <INDENT> from highton.models.task import Task <NEW_LINE> return fields.ListField( name=self.ENDPOINT, init_class=Task ).decode( self.element_from_string( self._get_request( endpoint=self.ENDPOINT + '/' + str(self.id) + '/' + Task.ENDPOINT, ).text ) )
|
A mixin to get all tasks of inherited class
These could be: people || companies || kases || deals
|
625990667047854f46340b18
|
class IonObjectDeserializer(IonObjectSerializationBase): <NEW_LINE> <INDENT> deserialize = IonObjectSerializationBase.operate <NEW_LINE> def __init__(self, transform_method=None, obj_registry=None, **kwargs): <NEW_LINE> <INDENT> assert obj_registry <NEW_LINE> self._obj_registry = obj_registry <NEW_LINE> IonObjectSerializationBase.__init__(self, transform_method=transform_method) <NEW_LINE> <DEDENT> def _transform(self, obj): <NEW_LINE> <INDENT> if isinstance(obj, dict) and "type_" in obj: <NEW_LINE> <INDENT> objc = obj.copy() <NEW_LINE> type = objc.pop('type_') <NEW_LINE> ion_obj = self._obj_registry.new(type.encode('ascii')) <NEW_LINE> for k, v in objc.iteritems(): <NEW_LINE> <INDENT> setattr(ion_obj, k, v) <NEW_LINE> <DEDENT> return ion_obj <NEW_LINE> <DEDENT> if _have_numpy: <NEW_LINE> <INDENT> if isinstance(obj, dict): <NEW_LINE> <INDENT> msg = obj.get('numpy',False) <NEW_LINE> log.debug('message = %s', msg) <NEW_LINE> if msg: <NEW_LINE> <INDENT> shape = msg.get('shape') <NEW_LINE> type = msg.get('type') <NEW_LINE> data = msg.get('body') <NEW_LINE> log.debug('Numpy Array Detected:\n type: %s\n shape: %s\n body: %s',type,shape,data) <NEW_LINE> ret = np.fromstring(string=data,dtype=type).reshape(shape) <NEW_LINE> return np.array(ret) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return obj
|
Deserializer for IonObjects.
Defines a _transform method to transform dictionaries produced by IonObjectSerializer back
into IonObjects. You *MUST* pass an object registry
|
6259906699fddb7c1ca63981
|
class DebugColrPrinterTests(DebugPrinterTests): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.dp_class = DebugColrPrinter <NEW_LINE> self.class_name = self.dp_class.__name__
|
Tests for the DebugColrPrinter class.
|
6259906632920d7e50bc77ab
|
class Segment(api_extensions.ExtensionDescriptor): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_name(cls): <NEW_LINE> <INDENT> return "Segment" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_alias(cls): <NEW_LINE> <INDENT> return "segment" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_description(cls): <NEW_LINE> <INDENT> return "Segments extension." <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_updated(cls): <NEW_LINE> <INDENT> return "2016-02-24T17:00:00-00:00" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_resources(cls): <NEW_LINE> <INDENT> resource_attributes = RESOURCE_ATTRIBUTE_MAP[SEGMENTS] <NEW_LINE> controller = base.create_resource( SEGMENTS, SEGMENT, directory.get_plugin(SEGMENTS), resource_attributes, allow_pagination=True, allow_sorting=True) <NEW_LINE> return [extensions.ResourceExtension(SEGMENTS, controller, attr_map=resource_attributes)] <NEW_LINE> <DEDENT> def update_attributes_map(self, attributes): <NEW_LINE> <INDENT> super(Segment, self).update_attributes_map( attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP) <NEW_LINE> <DEDENT> def get_extended_resources(self, version): <NEW_LINE> <INDENT> if version == "2.0": <NEW_LINE> <INDENT> return RESOURCE_ATTRIBUTE_MAP <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def get_required_extensions(self): <NEW_LINE> <INDENT> return [ext_stddesc.Standardattrdescription.get_alias(), stdattrseg_apidef.ALIAS]
|
Extension class supporting Segments.
|
625990667c178a314d78e79e
|
class LRUCache(object): <NEW_LINE> <INDENT> def __init__(self, max_entries, default_entry): <NEW_LINE> <INDENT> self._max_entries = max_entries <NEW_LINE> self._default_entry = default_entry <NEW_LINE> self._cache = collections.OrderedDict() <NEW_LINE> self._on_evict = None <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> value = self._cache.pop(key, self._default_entry) <NEW_LINE> if value != self._default_entry: <NEW_LINE> <INDENT> self._cache[key] = value <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def put(self, key, value): <NEW_LINE> <INDENT> self._cache[key] = value <NEW_LINE> while len(self._cache) > self._max_entries: <NEW_LINE> <INDENT> name, value = self._cache.popitem(last=False) <NEW_LINE> if self._on_evict is not None: <NEW_LINE> <INDENT> self._on_evict(name, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def evict(self, key): <NEW_LINE> <INDENT> value = self._cache.pop(key, self._default_entry) <NEW_LINE> if self._on_evict is not None: <NEW_LINE> <INDENT> self._on_evict(key, value) <NEW_LINE> <DEDENT> <DEDENT> def evict_all(self): <NEW_LINE> <INDENT> if self._on_evict is not None: <NEW_LINE> <INDENT> for item in self._cache.items(): <NEW_LINE> <INDENT> self._on_evict(*item) <NEW_LINE> <DEDENT> <DEDENT> self._cache.clear() <NEW_LINE> <DEDENT> def set_on_evict(self, func): <NEW_LINE> <INDENT> self._on_evict = func <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._cache)
|
A simple LRUCache implementation used to manage the internal runtime state.
An internal runtime state is used to handle the data under a specific key of a "public" state.
So the number of the internal runtime states may keep growing during the streaming task
execution. To prevent the OOM caused by the unlimited growth, we introduce this LRUCache
to evict the inactive internal runtime states.
|
625990663cc13d1c6d466ea8
|
class ExpressRouteConnectionList(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ExpressRouteConnection]'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ExpressRouteConnection"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ExpressRouteConnectionList, self).__init__(**kwargs) <NEW_LINE> self.value = value
|
ExpressRouteConnection list.
:param value: The list of ExpressRoute connections.
:type value: list[~azure.mgmt.network.v2019_08_01.models.ExpressRouteConnection]
|
62599066fff4ab517ebcef80
|
class WeightedPositiveWords(MRJob): <NEW_LINE> <INDENT> INPUT_PROTOCOL = JSONValueProtocol <NEW_LINE> OUTPUT_PROTOCOL = JSONValueProtocol <NEW_LINE> weight_list = [] <NEW_LINE> def mapper(self, _, data): <NEW_LINE> <INDENT> if "review_id" in data: <NEW_LINE> <INDENT> yield data['business_id'], ('review', (data['text'], data['stars'], data['useful'])) <NEW_LINE> <DEDENT> elif "business_id" in data: <NEW_LINE> <INDENT> if data['categories']: <NEW_LINE> <INDENT> yield data['business_id'], ('categories', data['categories']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def reducer(self, business_id, reviews): <NEW_LINE> <INDENT> reviews_list = [] <NEW_LINE> for data in reviews: <NEW_LINE> <INDENT> if "review" in data: <NEW_LINE> <INDENT> reviews_list.append(word.lower() for word in data[1][0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> yield business_id, reviews_list
|
Find the most positive words in the dataset.
|
62599066462c4b4f79dbd16b
|
class GetBookmarkInputSet(InputSet): <NEW_LINE> <INDENT> def set_ChangeSignature(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ChangeSignature', value) <NEW_LINE> <DEDENT> def set_Date(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Date', value) <NEW_LINE> <DEDENT> def set_Meta(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Meta', value) <NEW_LINE> <DEDENT> def set_Password(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Password', value) <NEW_LINE> <DEDENT> def set_Tag(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Tag', value) <NEW_LINE> <DEDENT> def set_URL(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'URL', value) <NEW_LINE> <DEDENT> def set_Username(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Username', value)
|
An InputSet with methods appropriate for specifying the inputs to the GetBookmark
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
|
62599066460517430c432c07
|
class TestJSONEnforcer: <NEW_LINE> <INDENT> enforcer = mid.JSONEnforcer() <NEW_LINE> @pytest.mark.parametrize('accepts', [True, False]) <NEW_LINE> def test_client_accept(self, accepts): <NEW_LINE> <INDENT> req = mock.Mock() <NEW_LINE> req.client_accepts_json = accepts <NEW_LINE> req.method = 'GET' <NEW_LINE> if not accepts: <NEW_LINE> <INDENT> with pytest.raises(errors.HTTPNotAcceptable): <NEW_LINE> <INDENT> self.enforcer.process_request(req, 'foo') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.enforcer.process_request(req, 'foo') <NEW_LINE> <DEDENT> <DEDENT> @pytest.mark.parametrize('method, content_type, raises', [ ('GET', None, False), ('GET', 'application/json', False), ('GET', 'mimetype/xml', False), ('POST', None, True), ('POST', 'application/json', False), ('POST', 'mimetype/xml', True), ('PATCH', None, True), ('PATCH', 'application/json', False), ('PATCH', 'mimetype/xml', True), ('PUT', None, True), ('PUT', 'application/json', False), ('PUT', 'mimetype/xml', True), ('DELETE', None, False), ('DELETE', 'application/json', False), ('DELETE', 'mimetype/xml', False), ]) <NEW_LINE> def test_method_content_type(self, method, content_type, raises): <NEW_LINE> <INDENT> req = mock.Mock(client_accepts_json=True) <NEW_LINE> req.method = method <NEW_LINE> req.content_type = content_type <NEW_LINE> if raises: <NEW_LINE> <INDENT> with pytest.raises(errors.HTTPUnsupportedMediaType): <NEW_LINE> <INDENT> self.enforcer.process_request(req, 'foo') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.enforcer.process_request(req, 'foo')
|
Test enforcement of JSON requests
|
62599066e76e3b2f99fda165
|
class RPCMethodError(Exception): <NEW_LINE> <INDENT> pass
|
Error might be raised during rpc methods processing
|
625990664a966d76dd5f065b
|
class RawProduct(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> shelf_life = models.PositiveSmallIntegerField(default=0)
|
Example: Coffee Beans
|
6259906601c39578d7f142e7
|
class GanGenerator(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def generate_data_dict(self, noise: torch.Tensor, *args, **kwargs) -> tuple: <NEW_LINE> <INDENT> raise NotImplementedError
|
Abstract class for standard GAN generators, which aim to convert input noise into synthetic data.
|
62599066d7e4931a7ef3d738
|
class SimpleCursorShapeConfig(CursorShapeConfig): <NEW_LINE> <INDENT> def __init__(self, cursor_shape: CursorShape = CursorShape._NEVER_CHANGE) -> None: <NEW_LINE> <INDENT> self.cursor_shape = cursor_shape <NEW_LINE> <DEDENT> def get_cursor_shape(self, application: "Application[Any]") -> CursorShape: <NEW_LINE> <INDENT> return self.cursor_shape
|
Always show the given cursor shape.
|
6259906667a9b606de547654
|
class ModelType(Enum): <NEW_LINE> <INDENT> CONV1 = 1, <NEW_LINE> VGG16 = 2
|
An enum type for specifying a model architexture.
|
62599066be8e80087fbc07ee
|
class GatewayRejectionReason(object): <NEW_LINE> <INDENT> Avs = "avs" <NEW_LINE> AvsAndCvv = "avs_and_cvv" <NEW_LINE> Cvv = "cvv" <NEW_LINE> Duplicate = "duplicate" <NEW_LINE> Fraud = "fraud" <NEW_LINE> ThreeDSecure = "three_d_secure" <NEW_LINE> Unrecognized = "unrecognized"
|
Constants representing gateway rejection reasons. Available types are:
* braintree.Transaction.GatewayRejectionReason.Avs
* braintree.Transaction.GatewayRejectionReason.AvsAndCvv
* braintree.Transaction.GatewayRejectionReason.Cvv
* braintree.Transaction.GatewayRejectionReason.Duplicate
* braintree.Transaction.GatewayRejectionReason.Fraud
* braintree.Transaction.GatewayRejectionReason.ThreeDSecure
|
625990667b25080760ed8894
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.