code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class Document(pgdb.Model): <NEW_LINE> <INDENT> document_id = pgdb.Column(UUID(as_uuid=True), primary_key=True, nullable=False, default=lambda: uuid.uuid4(), unique=True) <NEW_LINE> user_id = pgdb.Column(UUID(as_uuid=True), nullable=False) <NEW_LINE> document = pgdb.Column(pgdb.Text, nullable=False) <NEW_LINE> created_datetime = pgdb.Column(pgdb.DateTime(), nullable=False, default=datetime.datetime.utcnow()) <NEW_LINE> modified_datetime = pgdb.Column(pgdb.DateTime(), nullable=False, default=datetime.datetime.utcnow())
|
A squanched document
|
6259902926238365f5fadaef
|
class InvalidCommand(ParsingError): <NEW_LINE> <INDENT> _fmt = (_LOCATION_FMT + "Invalid command '%(cmd)s'") <NEW_LINE> def __init__(self, lineno, cmd): <NEW_LINE> <INDENT> self.cmd = cmd <NEW_LINE> ParsingError.__init__(self, lineno)
|
Raised when an unknown command found.
|
62599029711fe17d825e1469
|
class Dim(object): <NEW_LINE> <INDENT> def __init__(self, dist: float, dist_label=None): <NEW_LINE> <INDENT> self.dist = dist <NEW_LINE> self.dist_label = dist_label <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.dist) <NEW_LINE> <DEDENT> def __add__(self, other: Union[float, 'Dim']): <NEW_LINE> <INDENT> if isinstance(other, Dim): <NEW_LINE> <INDENT> if self.dist_label and other.dist_label: <NEW_LINE> <INDENT> new_label = self.dist_label + " + {}".format(other.dist_label) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_label = None <NEW_LINE> <DEDENT> return Dim(self.dist + other.dist, new_label) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_label = self.dist_label + " + {}".format( other) if self.dist_label else None <NEW_LINE> return Dim(self.dist + other, new_label) <NEW_LINE> <DEDENT> <DEDENT> def __radd__(self, other): <NEW_LINE> <INDENT> return self.__add__(other) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> new_label = "({} * {})".format(self.dist_label, other) if self.dist_label else None <NEW_LINE> return Dim(self.dist * other, new_label) <NEW_LINE> <DEDENT> def __rmul__(self, other): <NEW_LINE> <INDENT> return self.__mul__(other) <NEW_LINE> <DEDENT> def __truediv__(self, other): <NEW_LINE> <INDENT> new_label = "({} / {})".format(self.dist_label, other) if self.dist_label else None <NEW_LINE> return Dim(self.dist / other, new_label) <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> new_label = "-({})".format( self.dist_label) if self.dist_label else None <NEW_LINE> return Dim(-self.dist, new_label)
|
Corresponds to a 'dimension' - a distance with an optional
label.
The labels are informational right now - the intended future use is that
they will be passed to 360 so that object dimensions are specfied by user
parameters. Alternatively in some cases, user paramters will be generated
from Dim object labels.
Operator overloading is employed to make arithmetic elsewhere easier. Not
all operators are implemented yet, or implemented for 'other' types.
Negative dimensions may be used for arithmetic purposes but are not
properly represented in 360 by themselves.
Args:
dist (float): amplitude/distance represented by object
dist_label: optional user parameter forumla associated with the object
|
625990291d351010ab8f4ab6
|
class FormElementPluginError(FormPluginError): <NEW_LINE> <INDENT> pass
|
Raised when form element plugin error occurs.
|
625990295166f23b2e244375
|
class TypeArgDoesNotExistException(FriendlyException): <NEW_LINE> <INDENT> def __init__(self, missing_type): <NEW_LINE> <INDENT> super().__init__(f"type {missing_type} doesn't exist in the .ctxrc")
|
Exception raised when the type argument passed is not in the .ctxrc
|
62599029a4f1c619b294f594
|
class Dict(dict): <NEW_LINE> <INDENT> def __init__(self, names=(), values=(), **kw): <NEW_LINE> <INDENT> super(Dict,self).__init__(**kw) <NEW_LINE> for k, v in zip(names, values): <NEW_LINE> <INDENT> self[k] = v <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(r"'Dict' object has no attribute '%s'" % key) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> self[key] = value
|
simple dict but support access as x,y style.
|
62599029925a0f43d25e8fe7
|
class AppsLocationsService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'apps_locations' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(AppengineV1alpha.AppsLocationsService, self).__init__(client) <NEW_LINE> self._upload_configs = { } <NEW_LINE> <DEDENT> def Get(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('Get') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> Get.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1alpha/apps/{appsId}/locations/{locationsId}', http_method=u'GET', method_id=u'appengine.apps.locations.get', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1alpha/{+name}', request_field='', request_type_name=u'AppengineAppsLocationsGetRequest', response_type_name=u'Location', supports_download=False, ) <NEW_LINE> def List(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('List') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> List.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1alpha/apps/{appsId}/locations', http_method=u'GET', method_id=u'appengine.apps.locations.list', ordered_params=[u'name'], path_params=[u'name'], query_params=[u'filter', u'pageSize', u'pageToken'], relative_path=u'v1alpha/{+name}/locations', request_field='', request_type_name=u'AppengineAppsLocationsListRequest', response_type_name=u'ListLocationsResponse', supports_download=False, )
|
Service class for the apps_locations resource.
|
6259902973bcbd0ca4bcb231
|
class HostTestPluginResetMethod_Stlink(HostTestPluginBase): <NEW_LINE> <INDENT> name = "HostTestPluginResetMethod_Stlink" <NEW_LINE> type = "ResetMethod" <NEW_LINE> capabilities = ["stlink"] <NEW_LINE> required_parameters = [] <NEW_LINE> stable = False <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> HostTestPluginBase.__init__(self) <NEW_LINE> <DEDENT> def is_os_supported(self, os_name=None): <NEW_LINE> <INDENT> if not os_name: <NEW_LINE> <INDENT> os_name = self.host_os_support() <NEW_LINE> <DEDENT> if os_name and os_name.startswith("Windows"): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def setup(self, *args, **kwargs): <NEW_LINE> <INDENT> self.ST_LINK_CLI = "ST-LINK_CLI.exe" <NEW_LINE> return True <NEW_LINE> <DEDENT> def create_stlink_fix_file(self, file_path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(file_path, "w") as fix_file: <NEW_LINE> <INDENT> fix_file.write(os.linesep) <NEW_LINE> <DEDENT> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> self.print_plugin_error("Error opening STLINK-PRESS-ENTER-BUG file") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> def execute(self, capability, *args, **kwargs): <NEW_LINE> <INDENT> result = False <NEW_LINE> if self.check_parameters(capability, *args, **kwargs) is True: <NEW_LINE> <INDENT> if capability == "stlink": <NEW_LINE> <INDENT> cmd = [self.ST_LINK_CLI, "-Rst", "-Run"] <NEW_LINE> enter_file_path = os.path.join(tempfile.gettempdir(), FIX_FILE_NAME) <NEW_LINE> self.create_stlink_fix_file(enter_file_path) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(enter_file_path, "r") as fix_file: <NEW_LINE> <INDENT> stdin_arg = kwargs.get("stdin", fix_file) <NEW_LINE> result = self.run_command(cmd, stdin=stdin_arg) <NEW_LINE> <DEDENT> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> self.print_plugin_error("Error opening STLINK-PRESS-ENTER-BUG file") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result
|
Plugin interface adaptor for STLINK_CLI.
|
62599029796e427e5384f71d
|
class ILabCASCollectionRDFGenerator(IRDFGenerator): <NEW_LINE> <INDENT> labcasSolrURL = schema.TextLine( title=_('LabCAS Data Access API URL'), description=_('The Uniform Resource Locator to the LabCAS API.'), required=True, constraint=validateAccessibleURL, default='https://edrn-labcas.jpl.nasa.gov/data-access-api' ) <NEW_LINE> username = schema.TextLine( title=_('Username'), description=_('Username to authenticate with; use a service account if available'), required=True, default='service' ) <NEW_LINE> password = schema.TextLine( title=_('Password'), description=_('Password to confirm the identity of the username; this will be visible!'), required=True, )
|
Generator for RDF using data from LabCAS.
|
6259902921bff66bcd723c02
|
@base.ReleaseTracks(base.ReleaseTrack.BETA) <NEW_LINE> class SetClusterSelector(base.UpdateCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> flags.AddTemplateFlag(parser, 'set cluster selector') <NEW_LINE> flags.AddZoneFlag(parser) <NEW_LINE> parser.add_argument( '--cluster-labels', metavar='KEY=VALUE', type=arg_parsers.ArgDict( key_type=labels_util.KEY_FORMAT_VALIDATOR, value_type=labels_util.VALUE_FORMAT_VALIDATOR, min_length=1), action=arg_parsers.UpdateAction, help='A list of label KEY=VALUE pairs to add.') <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> dataproc = dp.Dataproc(self.ReleaseTrack()) <NEW_LINE> template = util.ParseWorkflowTemplates(args.template, dataproc) <NEW_LINE> workflow_template = dataproc.GetRegionsWorkflowTemplate( template, args.version) <NEW_LINE> labels = labels_util.Diff(additions=args.cluster_labels).Apply( dataproc.messages.ClusterSelector.ClusterLabelsValue).GetOrNone() <NEW_LINE> cluster_selector = dataproc.messages.ClusterSelector( clusterLabels=labels, zone=properties.VALUES.compute.zone.GetOrFail()) <NEW_LINE> workflow_template.placement = dataproc.messages.WorkflowTemplatePlacement( clusterSelector=cluster_selector) <NEW_LINE> response = dataproc.client.projects_regions_workflowTemplates.Update( workflow_template) <NEW_LINE> return response
|
Set cluster selector for the workflow template.
|
6259902966673b3332c31390
|
class PacketEvent(object): <NEW_LINE> <INDENT> def __init__(self, event, args=None): <NEW_LINE> <INDENT> self.name = event <NEW_LINE> self.arguments = OrderedDict() if args is None else OrderedDict(args) <NEW_LINE> <DEDENT> def __call__(self, argument, value=None): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return self.arguments[argument] <NEW_LINE> <DEDENT> self.arguments[argument] = value <NEW_LINE> return value
|
Packet event.
This object is a data structure which stores packet data under specific
keys according to the mapping rules defined in the Protocol Parser.
|
625990298e05c05ec3f6f62c
|
class SecretLinkFactoryTestCase(InvenioTestCase): <NEW_LINE> <INDENT> extra_data = dict(recid=1) <NEW_LINE> def test_validation(self): <NEW_LINE> <INDENT> t = SecretLinkFactory.create_token(1, self.extra_data) <NEW_LINE> self.assertIsNotNone(SecretLinkFactory.validate_token( t, expected_data=self.extra_data)) <NEW_LINE> t = SecretLinkFactory.create_token( 1, self.extra_data, expires_at=datetime.now()+timedelta(days=1) ) <NEW_LINE> self.assertIsNotNone(SecretLinkFactory.validate_token( t, expected_data=self.extra_data)) <NEW_LINE> self.assertIsNone(SecretLinkFactory.validate_token( t, expected_data=dict(recid=2))) <NEW_LINE> <DEDENT> def test_creation(self): <NEW_LINE> <INDENT> d = datetime.now()+timedelta(days=1) <NEW_LINE> t = SecretLinkFactory.create_token(1, self.extra_data) <NEW_LINE> self.assertIsNotNone(SecretLinkSerializer().validate_token( t, expected_data=self.extra_data)) <NEW_LINE> self.assertIsNone(TimedSecretLinkSerializer().validate_token( t, expected_data=self.extra_data)) <NEW_LINE> t1 = SecretLinkFactory.create_token( 1, self.extra_data, expires_at=d ) <NEW_LINE> t2 = SecretLinkFactory.create_token(1, self.extra_data) <NEW_LINE> self.assertIsNone(SecretLinkSerializer().validate_token( t1, expected_data=self.extra_data)) <NEW_LINE> self.assertIsNotNone(TimedSecretLinkSerializer().validate_token( t1, expected_data=self.extra_data)) <NEW_LINE> self.assertNotEqual(t1, t2) <NEW_LINE> <DEDENT> def test_load_token(self): <NEW_LINE> <INDENT> t = SecretLinkFactory.create_token(1, self.extra_data) <NEW_LINE> self.assertIsNotNone(SecretLinkFactory.load_token(t)) <NEW_LINE> t = SecretLinkFactory.create_token( 1, self.extra_data, expires_at=datetime.now()-timedelta(days=1)) <NEW_LINE> self.assertRaises(SignatureExpired, SecretLinkFactory.load_token, t) <NEW_LINE> self.assertIsNotNone(SecretLinkFactory.load_token(t, force=True))
|
Test case for factory class.
|
62599029287bf620b6272b90
|
class LayerNotFound(Exception): <NEW_LINE> <INDENT> pass
|
...
|
6259902926238365f5fadaf3
|
class _FileDiffCommon(object): <NEW_LINE> <INDENT> fieldsets = ( (None, { 'fields': ('diffset', 'status', 'binary', ('source_file', 'source_revision'), ('dest_file', 'dest_detail'), 'diff', 'parent_diff') }), (_('Internal State'), { 'description': _('<p>This is advanced state that should not be ' 'modified unless something is wrong.</p>'), 'fields': ('extra_data',), 'classes': ['collapse'], }), ) <NEW_LINE> raw_id_fields = ( 'commit', 'diff_hash', 'diffset', 'legacy_diff_hash', 'legacy_parent_diff_hash', 'parent_diff_hash', ) <NEW_LINE> readonly_fields = ('diff', 'parent_diff') <NEW_LINE> def diff(self, filediff): <NEW_LINE> <INDENT> return self._style_diff(filediff.diff) <NEW_LINE> <DEDENT> diff.short_description = _('Diff') <NEW_LINE> def parent_diff(self, filediff): <NEW_LINE> <INDENT> return self._style_diff(filediff.parent_diff) <NEW_LINE> <DEDENT> parent_diff.short_description = _('Parent diff') <NEW_LINE> def _style_diff(self, diff): <NEW_LINE> <INDENT> return format_html( '</p>{0}<p>', mark_safe(highlight(diff, DiffLexer(), HtmlFormatter())))
|
Common attributes for FileDiffAdmin and FileDiffInline.
|
625990298a349b6b436871da
|
class TestBadgeApplicationPurchaseConstraints(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return BadgeApplicationPurchaseConstraints( limit = allegro_api.models.badge_application_purchase_constraints_limit.BadgeApplicationPurchaseConstraintsLimit( per_user = allegro_api.models.badge_application_purchase_constraints_limit_per_user.BadgeApplicationPurchaseConstraintsLimitPerUser( max_items = 1, ), ) ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return BadgeApplicationPurchaseConstraints( ) <NEW_LINE> <DEDENT> <DEDENT> def testBadgeApplicationPurchaseConstraints(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
|
BadgeApplicationPurchaseConstraints unit test stubs
|
6259902963f4b57ef0086543
|
class InputKolBspData(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.DataList = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("DataList") is not None: <NEW_LINE> <INDENT> self.DataList = [] <NEW_LINE> for item in params.get("DataList"): <NEW_LINE> <INDENT> obj = InputKolDataList() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.DataList.append(obj)
|
CheckKol
|
6259902921bff66bcd723c04
|
class process_gpm(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.enabled = True <NEW_LINE> self.checked = False <NEW_LINE> <DEDENT> def onClick(self): <NEW_LINE> <INDENT> pass
|
Implementation for WEBDHMV2_addin.process_gpm (Button)
|
62599029d10714528d69ee5d
|
class ImageQualityClassifier(object): <NEW_LINE> <INDENT> def __init__(self, model_ckpt, model_patch_side_length, num_classes, graph=None, session_config=None): <NEW_LINE> <INDENT> self._model_patch_side_length = model_patch_side_length <NEW_LINE> self._num_classes = num_classes <NEW_LINE> if graph is None: <NEW_LINE> <INDENT> graph = tensorflow.Graph() <NEW_LINE> <DEDENT> self.graph = graph <NEW_LINE> with self.graph.as_default(): <NEW_LINE> <INDENT> self._image_placeholder = tensorflow.placeholder( tensorflow.float32, shape=[None, None, 1]) <NEW_LINE> self._probabilities = self._probabilities_from_image( self._image_placeholder, model_patch_side_length, num_classes) <NEW_LINE> self._sess = tensorflow.Session(config=session_config) <NEW_LINE> saver = tensorflow.train.Saver() <NEW_LINE> saver.restore(self._sess, model_ckpt) <NEW_LINE> <DEDENT> logger.debug('Restored image focus prediction model from %s.', model_ckpt) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self._sess.close() <NEW_LINE> <DEDENT> def _probabilities_from_image(self, image_placeholder, model_patch_side_length, num_classes): <NEW_LINE> <INDENT> labels_fake = tensorflow.zeros([self._num_classes]) <NEW_LINE> image_path_fake = tensorflow.constant(['unused']) <NEW_LINE> tiles, labels, _ = _get_image_tiles_tensor( image_placeholder, labels_fake, image_path_fake, model_patch_side_length) <NEW_LINE> model_metrics = evaluation.get_model_and_metrics( tiles, num_classes=num_classes, one_hot_labels=labels, is_training=False) <NEW_LINE> return model_metrics.probabilities <NEW_LINE> <DEDENT> def score(self, image, invert=False): <NEW_LINE> <INDENT> pred = self.predict(image) <NEW_LINE> classes = numpy.arange(self._num_classes) <NEW_LINE> if invert: <NEW_LINE> <INDENT> classes = classes[::-1] <NEW_LINE> <DEDENT> assert pred.probabilities.ndim == classes.ndim == 1 <NEW_LINE> return numpy.dot(pred.probabilities, classes) <NEW_LINE> <DEDENT> def predict(self, image): <NEW_LINE> <INDENT> feed_dict = {self._image_placeholder: numpy.expand_dims(image, 2)} <NEW_LINE> [np_probabilities] = self._sess.run( [self._probabilities], feed_dict=feed_dict) <NEW_LINE> return evaluation.aggregate_prediction_from_probabilities( np_probabilities, evaluation.METHOD_AVERAGE) <NEW_LINE> <DEDENT> def get_patch_predictions(self, image): <NEW_LINE> <INDENT> results = [] <NEW_LINE> w = constants.PATCH_SIDE_LENGTH <NEW_LINE> for i in range(0, image.shape[0] - w, w): <NEW_LINE> <INDENT> for j in range(0, image.shape[1] - w, w): <NEW_LINE> <INDENT> results.append((i, j, w, w, self.predict(image[i:i + w, j:j + w]))) <NEW_LINE> <DEDENT> <DEDENT> return results
|
Object for running image quality model inference.
Attributes:
graph: TensorFlow graph.
|
625990299b70327d1c57fd24
|
class TestPipeline(compiler.Compiler): <NEW_LINE> <INDENT> def define_pipelines(self): <NEW_LINE> <INDENT> name = 'test parfor aliasing' <NEW_LINE> pm = PassManager(name) <NEW_LINE> pm.add_pass(TranslateByteCode, "analyzing bytecode") <NEW_LINE> pm.add_pass(FixupArgs, "fix up args") <NEW_LINE> pm.add_pass(IRProcessing, "processing IR") <NEW_LINE> pm.add_pass(WithLifting, "Handle with contexts") <NEW_LINE> if not self.state.flags.no_rewrites: <NEW_LINE> <INDENT> pm.add_pass(GenericRewrites, "nopython rewrites") <NEW_LINE> pm.add_pass(RewriteSemanticConstants, "rewrite semantic constants") <NEW_LINE> pm.add_pass(DeadBranchPrune, "dead branch pruning") <NEW_LINE> <DEDENT> pm.add_pass(InlineClosureLikes, "inline calls to locally defined closures") <NEW_LINE> pm.add_pass(NopythonTypeInference, "nopython frontend") <NEW_LINE> pm.add_pass(NativeLowering, "native lowering") <NEW_LINE> pm.add_pass(NoPythonBackend, "nopython mode backend") <NEW_LINE> pm.finalize() <NEW_LINE> return [pm]
|
Test pipeline that just converts prange() to parfor and calls
remove_dead(). Copy propagation can replace B in the example code
which this pipeline avoids.
|
6259902923e79379d538d4ad
|
class ChromaticMotionFeature(featuresModule.FeatureExtractor): <NEW_LINE> <INDENT> id = 'm10' <NEW_LINE> def __init__(self, dataOrStream=None, *arguments, **keywords): <NEW_LINE> <INDENT> super().__init__(dataOrStream=dataOrStream, *arguments, **keywords) <NEW_LINE> self.name = 'Chromatic Motion' <NEW_LINE> self.description = 'Fraction of melodic intervals corresponding to a semi-tone.' <NEW_LINE> self.isSequential = True <NEW_LINE> self.dimensions = 1 <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> histo = self.data['midiIntervalHistogram'] <NEW_LINE> total = sum(histo) <NEW_LINE> if not total: <NEW_LINE> <INDENT> raise JSymbolicFeatureException('input lacks notes') <NEW_LINE> <DEDENT> targets = [1] <NEW_LINE> count = 0 <NEW_LINE> for t in targets: <NEW_LINE> <INDENT> count += histo[t] <NEW_LINE> <DEDENT> self.feature.vector[0] = count / total
|
Fraction of melodic intervals corresponding to a semitone.
>>> s = corpus.parse('bwv66.6')
>>> fe = features.jSymbolic.ChromaticMotionFeature(s)
>>> f = fe.extract()
>>> f.vector
[0.220...]
|
62599029be8e80087fbc001b
|
class SageManager(pw.Model): <NEW_LINE> <INDENT> NAME = pw.CharField(unique=True) <NEW_LINE> PASSWD = pw.CharField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> database = DATABASE
|
Main database class for storing passwords. Built on the Peewee ORM.
|
625990298c3a8732951f74fb
|
class CustomTire(car.Tire): <NEW_LINE> <INDENT> __maximum_miles = 500
|
This is the class for CustomTire.
Arg:
None
|
625990291f5feb6acb163b93
|
class FieldTypeViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.FieldType.objects.all() <NEW_LINE> serializer_class = serializers.FieldTypeSerializer
|
Viewset for FieldType model.
|
625990298e05c05ec3f6f62d
|
class TweetViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> model = Tweet <NEW_LINE> queryset = Tweet.public.all().order_by('pk') <NEW_LINE> serializer_class = TweetSerializer <NEW_LINE> paginate_by = 50
|
`Tweet` šiuo atveju yra žinutė,
kuri buvo užskaityta upkarmoas boto
kaip teisinga ir už ją buvo priskaičiuota
karmos taškų.
|
62599029d164cc6175821f1b
|
class Client: <NEW_LINE> <INDENT> def __init__(self, clientID, name, email) : <NEW_LINE> <INDENT> self.name=name <NEW_LINE> self.email=email <NEW_LINE> self.clientID=clientID <NEW_LINE> self.positions={} <NEW_LINE> <DEDENT> def getName(self) : <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def getID(self): <NEW_LINE> <INDENT> return self.clientID <NEW_LINE> <DEDENT> def getPositions(self): <NEW_LINE> <INDENT> return list(self.positions.values()) <NEW_LINE> <DEDENT> def setName(self, name) : <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def setEmail(self, email) : <NEW_LINE> <INDENT> self.email = email <NEW_LINE> <DEDENT> def addPosition(self, position) : <NEW_LINE> <INDENT> if self.hasPosition(position.getSymbol()) : <NEW_LINE> <INDENT> currentPosition = self.positions[position.getSymbol()] <NEW_LINE> currentPosition.quantity += position.getQuantity() <NEW_LINE> currentPosition.setLastModificationDate(position.getLastModificationDate()) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.positions[position.getSymbol()] = position <NEW_LINE> <DEDENT> <DEDENT> def getPosition(self, symbol) : <NEW_LINE> <INDENT> if self.hasPosition(symbol) : <NEW_LINE> <INDENT> return self.positions[symbol] <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> raise PositionException("Client does not hold position on this security") <NEW_LINE> <DEDENT> <DEDENT> def hasPosition(self, symbol) : <NEW_LINE> <INDENT> return True if symbol in self.positions else False <NEW_LINE> <DEDENT> def updatePositions(self, transacton): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.__dict__) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> positions = [ str(position) for position in self.positions.values() ] <NEW_LINE> return "%d:%s:%s:%s" % (self.clientID, self.name, self.email, ",".join(positions))
|
A class representing a client holding a position
|
62599029a8ecb033258721c1
|
class DistributionBarViz(DistributionPieViz): <NEW_LINE> <INDENT> viz_type = "dist_bar" <NEW_LINE> verbose_name = _("Distribution - Bar Chart") <NEW_LINE> is_timeseries = False <NEW_LINE> def query_obj(self): <NEW_LINE> <INDENT> d = super(DistributionBarViz, self).query_obj() <NEW_LINE> fd = self.form_data <NEW_LINE> if ( len(d['groupby']) < len(fd.get('groupby') or []) + len(fd.get('columns') or []) ): <NEW_LINE> <INDENT> raise Exception( _("Can't have overlap between Series and Breakdowns")) <NEW_LINE> <DEDENT> if not fd.get('metrics'): <NEW_LINE> <INDENT> raise Exception(_("Pick at least one metric")) <NEW_LINE> <DEDENT> if not fd.get('groupby'): <NEW_LINE> <INDENT> raise Exception(_("Pick at least one field for [Series]")) <NEW_LINE> <DEDENT> return d <NEW_LINE> <DEDENT> def get_data(self, df): <NEW_LINE> <INDENT> fd = self.form_data <NEW_LINE> row = df.groupby(self.groupby).sum()[self.metrics[0]].copy() <NEW_LINE> row.sort_values(ascending=False, inplace=True) <NEW_LINE> columns = fd.get('columns') or [] <NEW_LINE> pt = df.pivot_table( index=self.groupby, columns=columns, values=self.metrics) <NEW_LINE> if fd.get("contribution"): <NEW_LINE> <INDENT> pt = pt.fillna(0) <NEW_LINE> pt = pt.T <NEW_LINE> pt = (pt / pt.sum()).T <NEW_LINE> <DEDENT> pt = pt.reindex(row.index) <NEW_LINE> chart_data = [] <NEW_LINE> for name, ys in pt.iteritems(): <NEW_LINE> <INDENT> if pt[name].dtype.kind not in "biufc" or name in self.groupby: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if isinstance(name, string_types): <NEW_LINE> <INDENT> series_title = name <NEW_LINE> <DEDENT> elif len(self.metrics) > 1: <NEW_LINE> <INDENT> series_title = ", ".join(name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l = [str(s) for s in name[1:]] <NEW_LINE> series_title = ", ".join(l) <NEW_LINE> <DEDENT> values = [] <NEW_LINE> for i, v in ys.iteritems(): <NEW_LINE> <INDENT> x = i <NEW_LINE> if isinstance(x, (tuple, list)): <NEW_LINE> <INDENT> x = ', '.join([str(s) for s in x]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = str(x) <NEW_LINE> <DEDENT> values.append({ 'x': x, 'y': v, }) <NEW_LINE> <DEDENT> d = { "key": series_title, "values": values, } <NEW_LINE> chart_data.append(d) <NEW_LINE> <DEDENT> return chart_data
|
A good old bar chart
|
62599029be8e80087fbc001d
|
class FlattenLayer(Layer): <NEW_LINE> <INDENT> @deprecated_alias(layer='prev_layer', end_support_version=1.9) <NEW_LINE> def __init__(self, prev_layer, name='flatten'): <NEW_LINE> <INDENT> super(FlattenLayer, self).__init__(prev_layer=prev_layer, name=name) <NEW_LINE> _out = flatten_reshape(self.inputs, name=name) <NEW_LINE> self.n_units = int(_out.get_shape()[-1]) <NEW_LINE> logging.info("FlattenLayer %s: %d" % (self.name, self.n_units)) <NEW_LINE> self.outputs = _out <NEW_LINE> self._add_layers(self.outputs)
|
A layer that reshapes high-dimension input into a vector.
Then we often apply DenseLayer, RNNLayer, ConcatLayer and etc on the top of a flatten layer.
[batch_size, mask_row, mask_col, n_mask] ---> [batch_size, mask_row * mask_col * n_mask]
Parameters
----------
prev_layer : :class:`Layer`
Previous layer.
name : str
A unique layer name.
Examples
--------
>>> import tensorflow as tf
>>> import tensorlayer as tl
>>> x = tf.placeholder(tf.float32, shape=[None, 28, 28, 1])
>>> net = tl.layers.InputLayer(x, name='input')
>>> net = tl.layers.FlattenLayer(net, name='flatten')
[?, 784]
|
62599029d164cc6175821f1c
|
class VLStringAtom(_BufferedAtom): <NEW_LINE> <INDENT> kind = 'vlstring' <NEW_LINE> type = 'vlstring' <NEW_LINE> base = UInt8Atom() <NEW_LINE> def _tobuffer(self, object_): <NEW_LINE> <INDENT> if not isinstance(object_, basestring): <NEW_LINE> <INDENT> raise TypeError("object is not a string: %r" % (object_,)) <NEW_LINE> <DEDENT> return numpy.string_(object_) <NEW_LINE> <DEDENT> def fromarray(self, array): <NEW_LINE> <INDENT> return array.tostring()
|
Defines an atom of type ``vlstring``.
This class describes a *row* of the VLArray class, rather than an atom. It
differs from the StringAtom class in that you can only add *one instance of
it to one specific row*, i.e. the :meth:`VLArray.append` method only
accepts one object when the base atom is of this type.
Like StringAtom, this class does not make assumptions on the encoding of
the string, and raw bytes are stored as is. Unicode strings are supported
as long as no character is out of the ASCII set; otherwise, you will need
to *explicitly* convert them to strings before you can save them. For full
Unicode support, using VLUnicodeAtom (see :ref:`VLUnicodeAtom`) is
recommended.
Variable-length string atoms do not accept parameters and they cause the
reads of rows to always return Python strings. You can regard vlstring
atoms as an easy way to save generic variable length strings.
|
6259902930c21e258be997b0
|
class PipedReaderBuilder(ReaderBuilder): <NEW_LINE> <INDENT> def __init__(self, builder, piper): <NEW_LINE> <INDENT> self._builder = builder <NEW_LINE> self._piper = piper <NEW_LINE> <DEDENT> def schema(self): <NEW_LINE> <INDENT> return self._builder.schema() <NEW_LINE> <DEDENT> def enqueue_splits(self, net, split_queue): <NEW_LINE> <INDENT> return self._builder.enqueue_splits(net, split_queue) <NEW_LINE> <DEDENT> def splits(self, net): <NEW_LINE> <INDENT> return self._builder.splits(net) <NEW_LINE> <DEDENT> def new_reader(self, split_queue): <NEW_LINE> <INDENT> output = self._piper(self._builder.new_reader(split_queue)) <NEW_LINE> return output if isinstance(output, Reader) else output.reader()
|
ReaderBuilder that modifies underlying builder by calling `piper`
function on each new reader produced, and return the result of
the function. This way, it is possible to append data processing
pipelines that will be replicated for each reader that gets created.
E.g.:
PipedReaderBuilder(
ReaderBuilder(...),
lambda reader: pipe(reader, processor=my_proc))
|
6259902921a7993f00c66f22
|
class Vertex: <NEW_LINE> <INDENT> def __init__(self, p): <NEW_LINE> <INDENT> self.x = p[0] <NEW_LINE> self.y = p[1] <NEW_LINE> self.next = None <NEW_LINE> self.expanded = False <NEW_LINE> <DEDENT> def expand(self, lp): <NEW_LINE> <INDENT> v1 = self <NEW_LINE> v2 = self.next <NEW_LINE> v = array([v2.y - v1.y, v1.x - v2.x]) <NEW_LINE> v /= norm(v) <NEW_LINE> try: <NEW_LINE> <INDENT> z = optimize_direction(v, lp) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.expanded = True <NEW_LINE> return None <NEW_LINE> <DEDENT> xopt, yopt = z <NEW_LINE> if abs(cross([xopt-v1.x, yopt-v1.y], [v1.x-v2.x, v1.y-v2.y])) < 1e-4: <NEW_LINE> <INDENT> self.expanded = True <NEW_LINE> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vnew = Vertex([xopt, yopt]) <NEW_LINE> vnew.next = self.next <NEW_LINE> self.next = vnew <NEW_LINE> self.expanded = False <NEW_LINE> return vnew
|
Vertex of the projected polygon, with a pointer to its successor.
|
625990295e10d32532ce40d6
|
class SuitSplitDateTimeWidget(forms.SplitDateTimeWidget): <NEW_LINE> <INDENT> def __init__(self, attrs=None): <NEW_LINE> <INDENT> widgets = [SuitDateWidget, SuitTimeWidget] <NEW_LINE> forms.MultiWidget.__init__(self, widgets, attrs) <NEW_LINE> <DEDENT> def format_output(self, rendered_widgets): <NEW_LINE> <INDENT> out_tpl = '<div class="datetime form-inline">%s %s</div>' <NEW_LINE> return mark_safe(out_tpl % (rendered_widgets[0], rendered_widgets[1]))
|
A SplitDateTime Widget that has some admin-specific styling.
|
62599029d18da76e235b7920
|
class TransactionViewSet(ModelViewSet): <NEW_LINE> <INDENT> queryset = Transaction.objects.all() <NEW_LINE> serializer_class = TransactionSerializer
|
Вывод транзакций
|
625990290a366e3fb87dd98e
|
class Net(BaseNet): <NEW_LINE> <INDENT> def forward(self, inputs): <NEW_LINE> <INDENT> axes = self.config['axes'] <NEW_LINE> starts = self.config['starts'] <NEW_LINE> ends = self.config['ends'] <NEW_LINE> if self.config['isStartsTensor']: <NEW_LINE> <INDENT> starts = paddle.to_tensor(starts) <NEW_LINE> <DEDENT> if self.config['isEndsTensor']: <NEW_LINE> <INDENT> ends = paddle.to_tensor(ends) <NEW_LINE> <DEDENT> x = paddle.slice(inputs, axes=axes, starts=starts, ends=ends) <NEW_LINE> return x
|
simple Net
|
6259902921bff66bcd723c08
|
class Query: <NEW_LINE> <INDENT> def __init__(self, raw_query: str, **config): <NEW_LINE> <INDENT> self._tree = _QueryParser(config).parse(raw_query) <NEW_LINE> <DEDENT> def execute( self, lexicon: Lexicon, index: CaptionIndex, documents=None, ignore_word_not_found=True, case_insensitive=False ) -> Iterable[CaptionIndex.Document]: <NEW_LINE> <INDENT> return self._tree.eval(_Expr.Context( lexicon, index, documents, ignore_word_not_found, case_insensitive)) <NEW_LINE> <DEDENT> def estimate_cost(self, lexicon: Lexicon) -> float: <NEW_LINE> <INDENT> return self._tree.estimate_cost(lexicon)
|
Parse and execute queries
|
62599029711fe17d825e146d
|
class AudioModel(UserOwnedModel, EntityModel): <NEW_LINE> <INDENT> audio = models.FileField( verbose_name='音频', upload_to='audio/', blank=True, null=True, ) <NEW_LINE> duration = models.FloatField( verbose_name='时长', blank=True, default=0, ) <NEW_LINE> is_active = models.BooleanField( verbose_name='是否可用', default=True, ) <NEW_LINE> audio_mp3 = models.FileField( verbose_name='音频mp3文件', upload_to='audio/mp3/', null=True, blank=True, ) <NEW_LINE> audio_ogg = models.FileField( verbose_name='音频ogg文件', upload_to='audio/ogg/', null=True, blank=True, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = '音频' <NEW_LINE> verbose_name_plural = '音频' <NEW_LINE> db_table = 'base_audio' <NEW_LINE> <DEDENT> def url(self): <NEW_LINE> <INDENT> return self.audio.url if self.is_active else None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def make_from_uploaded_file(cls, file): <NEW_LINE> <INDENT> audio = cls.objects.create() <NEW_LINE> temp_path = os.path.join( settings.MEDIA_ROOT, 'audio', '{}.{}'.format(audio.id, file.name.split('.')[-1]), ) <NEW_LINE> if settings.NORMALIZE_AUDIO: <NEW_LINE> <INDENT> ogg_path = 'audio/ogg/{}.ogg'.format(audio.id) <NEW_LINE> mp3_path = 'audio/mp3/{}.mp3'.format(audio.id) <NEW_LINE> os.makedirs(os.path.join(settings.MEDIA_ROOT, os.path.dirname(ogg_path)), exist_ok=True) <NEW_LINE> os.makedirs(os.path.join(settings.MEDIA_ROOT, os.path.dirname(mp3_path)), exist_ok=True) <NEW_LINE> of = open(temp_path, 'wb') <NEW_LINE> of.write(file.read()) <NEW_LINE> of.close() <NEW_LINE> from .libs.audiotranscode import AudioTranscode <NEW_LINE> at = AudioTranscode() <NEW_LINE> at.transcode(temp_path, os.path.join(settings.MEDIA_ROOT, ogg_path)) <NEW_LINE> at.transcode(temp_path, os.path.join(settings.MEDIA_ROOT, mp3_path)) <NEW_LINE> audio.audio_ogg.name = ogg_path <NEW_LINE> audio.audio_mp3.name = mp3_path <NEW_LINE> from mutagen.mp3 import MP3 <NEW_LINE> audio.duration = MP3(os.path.join(settings.MEDIA_ROOT, mp3_path)).info.length <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raw_path = 'audio/raw/{}'.format(file.name) <NEW_LINE> os.makedirs(os.path.join(settings.MEDIA_ROOT, os.path.dirname(raw_path)), exist_ok=True) <NEW_LINE> of = open(raw_path, 'wb') <NEW_LINE> of.write(file.read()) <NEW_LINE> of.close() <NEW_LINE> audio.audio.name = raw_path <NEW_LINE> <DEDENT> audio.save() <NEW_LINE> return audio
|
音频对象
|
6259902930c21e258be997b2
|
class BluetoothAddress(bytes): <NEW_LINE> <INDENT> def __new__(cls, value: Union[str, bytes]) -> "BluetoothAddress": <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> value = [int(x, 16) for x in value.split(":")] <NEW_LINE> <DEDENT> if len(value) != 6: <NEW_LINE> <INDENT> raise TypeError("requires exactly 6 bytes") <NEW_LINE> <DEDENT> return bytes.__new__(BluetoothAddress, value) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return ":".join(f"{b:02X}" for b in self) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"{self.__class__.__name__}({repr(str(self))})"
|
A Bluetooth address.
These addresses use the same EUI-48 format as MAC addresses but are for
identifying individual Bluetooth devices instead of network cards.
|
625990291f5feb6acb163b97
|
class ScenarioLeafItem(GrayIfLastMixin, EditableMixin, LeafItem): <NEW_LINE> <INDENT> @property <NEW_LINE> def item_type(self): <NEW_LINE> <INDENT> return "scenario" <NEW_LINE> <DEDENT> def add_item_to_db(self, db_item): <NEW_LINE> <INDENT> self.db_mngr.add_scenarios({self.db_map: [db_item]}) <NEW_LINE> <DEDENT> def update_item_in_db(self, db_item): <NEW_LINE> <INDENT> self.db_mngr.update_scenarios({self.db_map: [db_item]}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def scenario_alternative_root_item(self): <NEW_LINE> <INDENT> return self.child(1) <NEW_LINE> <DEDENT> def _do_finalize(self): <NEW_LINE> <INDENT> if not self.id: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> super()._do_finalize() <NEW_LINE> self.append_children([ScenarioActiveItem(), ScenarioAlternativeRootItem()]) <NEW_LINE> <DEDENT> def handle_updated_in_db(self): <NEW_LINE> <INDENT> super().handle_updated_in_db() <NEW_LINE> self.scenario_alternative_root_item.update_alternative_id_list()
|
A scenario leaf item.
|
62599029bf627c535bcb245e
|
class OutStream(object): <NEW_LINE> <INDENT> flush_interval = 0.05 <NEW_LINE> topic=None <NEW_LINE> def __init__(self, session, pub_socket, name): <NEW_LINE> <INDENT> self.session = session <NEW_LINE> self.pub_socket = pub_socket <NEW_LINE> self.name = name <NEW_LINE> self.parent_header = {} <NEW_LINE> self._new_buffer() <NEW_LINE> <DEDENT> def set_parent(self, parent): <NEW_LINE> <INDENT> self.parent_header = extract_header(parent) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.pub_socket = None <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> if self.pub_socket is None: <NEW_LINE> <INDENT> raise ValueError(u'I/O operation on closed file') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = self._buffer.getvalue() <NEW_LINE> if data: <NEW_LINE> <INDENT> if not isinstance(data, unicode): <NEW_LINE> <INDENT> enc = sys.stdin.encoding or sys.getdefaultencoding() <NEW_LINE> data = data.decode(enc, 'replace') <NEW_LINE> <DEDENT> content = {u'name':self.name, u'data':data} <NEW_LINE> msg = self.session.send(self.pub_socket, u'stream', content=content, parent=self.parent_header, ident=self.topic) <NEW_LINE> logger.debug(msg) <NEW_LINE> self._buffer.close() <NEW_LINE> self._new_buffer() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def isatty(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> raise IOError('Read not supported on a write only stream.') <NEW_LINE> <DEDENT> def read(self, size=-1): <NEW_LINE> <INDENT> raise IOError('Read not supported on a write only stream.') <NEW_LINE> <DEDENT> def readline(self, size=-1): <NEW_LINE> <INDENT> raise IOError('Read not supported on a write only stream.') <NEW_LINE> <DEDENT> def write(self, string): <NEW_LINE> <INDENT> if self.pub_socket is None: <NEW_LINE> <INDENT> raise ValueError('I/O operation on closed file') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if type(string) == unicode: <NEW_LINE> <INDENT> string = string.encode('utf-8') <NEW_LINE> <DEDENT> self._buffer.write(string) <NEW_LINE> current_time = time.time() <NEW_LINE> if self._start <= 0: <NEW_LINE> <INDENT> self._start = current_time <NEW_LINE> <DEDENT> elif current_time - self._start > self.flush_interval: <NEW_LINE> <INDENT> self.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def writelines(self, sequence): <NEW_LINE> <INDENT> if self.pub_socket is None: <NEW_LINE> <INDENT> raise ValueError('I/O operation on closed file') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for string in sequence: <NEW_LINE> <INDENT> self.write(string) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _new_buffer(self): <NEW_LINE> <INDENT> self._buffer = StringIO() <NEW_LINE> self._start = -1
|
A file like object that publishes the stream to a 0MQ PUB socket.
|
625990295166f23b2e24437d
|
class Sent(UnaryConcept): <NEW_LINE> <INDENT> def get_next_concept(self, lexed): <NEW_LINE> <INDENT> first_unconsumed_concept = find(lambda c: c is not None, lexed) <NEW_LINE> if type(first_unconsumed_concept) not in {Eval, Def}: <NEW_LINE> <INDENT> raise SyntaxError("Sentence must be evaluation or definition") <NEW_LINE> <DEDENT> return lexed[first_unconsumed_concept.position]
|
Statement. Collects all concepts to the left
|
62599029d10714528d69ee60
|
class Declaration(object): <NEW_LINE> <INDENT> __slots__ = ( 'end_location', 'name', 'spelling', 'start_location', 'usr', ) <NEW_LINE> def __init__(self, cursor): <NEW_LINE> <INDENT> self.name = cursor.displayname <NEW_LINE> self.spelling = cursor.spelling <NEW_LINE> self.usr = cursor.get_usr() <NEW_LINE> start = cursor.extent.start <NEW_LINE> end = cursor.extent.end <NEW_LINE> self.start_location = (start.file, start.line, start.column, start.offset) <NEW_LINE> self.end_location = (end.file, end.line, end.column, end.offset) <NEW_LINE> self._init() <NEW_LINE> <DEDENT> def _init(self): <NEW_LINE> <INDENT> pass
|
The base class for all declarations.
This class is never instantiated directly. It provides fields common to
all declarations.
Declarations are intended to be dumb containers. Most of the logic for
populating data can be found in the built-in observers. The few exceptions
are where it makes sense to consolidate redundancy, such as in the base
class.
Declarations all contain the following properties:
end_location -- The source location where the declaration ends. Is a tuple
of (filename, line, column, offset). Line and column are indexed from
1. Offset is the byte offset in the source file.
start_location -- The source location where the declaration starts. Is a
tuple of (filename, line, column, offset). Line and column are indexed
from 1. Offset is the byte offset in the source file.
usr -- The Unified Symbol Resolution (USR) string for the entity. These
are effectively global identifiers that can be used to test for
type equality.
TODO capture all fields from Clang bindings automatically.
|
625990299b70327d1c57fd2a
|
class TestSFCardViewerRowData: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def test_init_and_property() -> None: <NEW_LINE> <INDENT> used_date = "2018/11/13" <NEW_LINE> is_commuter_pass_enter = "" <NEW_LINE> railway_company_name_enter = "メトロ" <NEW_LINE> station_name_enter = "六本木一丁目" <NEW_LINE> is_commuter_pass_exit = "" <NEW_LINE> railway_company_name_exit = "メトロ" <NEW_LINE> station_name_exit = "後楽園" <NEW_LINE> used_amount = "195" <NEW_LINE> balance = "3601" <NEW_LINE> note = "" <NEW_LINE> sf_card_viewer_row_data = RowDataFactory(SFCardViewerRowData).create( [ used_date, is_commuter_pass_enter, railway_company_name_enter, station_name_enter, is_commuter_pass_exit, railway_company_name_exit, station_name_exit, used_amount, balance, note, ] ) <NEW_LINE> assert sf_card_viewer_row_data.is_commuter_pass_enter == is_commuter_pass_enter <NEW_LINE> assert sf_card_viewer_row_data.railway_company_name_enter == railway_company_name_enter <NEW_LINE> assert sf_card_viewer_row_data.station_name_enter == station_name_enter <NEW_LINE> assert sf_card_viewer_row_data.is_commuter_pass_exit == is_commuter_pass_exit <NEW_LINE> assert sf_card_viewer_row_data.railway_company_name_exit == railway_company_name_exit <NEW_LINE> assert sf_card_viewer_row_data.used_amount == 195 <NEW_LINE> assert sf_card_viewer_row_data.balance == balance <NEW_LINE> assert sf_card_viewer_row_data.note == Note.EMPTY <NEW_LINE> assert sf_card_viewer_row_data.date == datetime(2018, 11, 13, 0, 0) <NEW_LINE> assert sf_card_viewer_row_data.store_name == station_name_exit
|
Tests for SFCardViewerRowData.
|
6259902915baa72349462f41
|
class GRRConfig(rdfvalue.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = jobs_pb2.GRRConfig
|
The configuration of a GRR Client.
|
625990293eb6a72ae038b60d
|
@gin.configurable <NEW_LINE> class EventFirstOccurrenceMetric(ExperimentMetric): <NEW_LINE> <INDENT> event_never_occurred_val = -1 <NEW_LINE> def __init__(self, check_event_fn, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._first_occurrence_epoch = None <NEW_LINE> self._check_event_fn = check_event_fn <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def update_state(self, epoch, episodes): <NEW_LINE> <INDENT> if (self._first_occurrence_epoch is None and self._check_event_fn(episodes)): <NEW_LINE> <INDENT> self._first_occurrence_epoch = epoch <NEW_LINE> <DEDENT> <DEDENT> def result(self, epoch): <NEW_LINE> <INDENT> return self._first_occurrence_epoch or self.event_never_occurred_val
|
Logs first occurrence of an event.
|
62599029711fe17d825e146e
|
class Hansen(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=2): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N)) <NEW_LINE> self.global_optimum = [[-7.58989583, -7.70831466]] <NEW_LINE> self.fglob = -176.54179 <NEW_LINE> <DEDENT> def fun(self, x, *args): <NEW_LINE> <INDENT> self.nfev += 1 <NEW_LINE> i = arange(5.) <NEW_LINE> a = (i + 1) * cos(i * x[0] + i + 1) <NEW_LINE> b = (i + 1) * cos((i + 2) * x[1] + i + 1) <NEW_LINE> return sum(a) * sum(b)
|
Hansen objective function.
This class defines the Hansen [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Hansen}}(x) = \left[ \sum_{i=0}^4(i+1)\cos(ix_1+i+1)\right ]
\left[\sum_{j=0}^4(j+1)\cos[(j+2)x_2+j+1])\right ]
with :math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -176.54179` for
:math:`x = [-7.58989583, -7.70831466]`.
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
TODO Jamil #61 is missing the starting value of i.
|
62599029d99f1b3c44d0664b
|
class WsDeployment(Construct): <NEW_LINE> <INDENT> def __init__( self, scope: Stack, id: str, ws_stage: WsStage, description: Optional[str] = None ) -> None: <NEW_LINE> <INDENT> deployment_func = StageDeploymentSingletonFunction( scope=scope, name=f'{id}Function' ) <NEW_LINE> StageDeploymentResource( scope=scope, resource_name=f'{id}Resource', deployment_function=deployment_func, api_id=ws_stage.api_id, stage_name=ws_stage.stage_name, description=description or f'Deployment for {ws_stage.stage_name}.' ) <NEW_LINE> super().__init__( scope=scope, id=id, )
|
Creates web socket api deployment.
|
625990295166f23b2e24437f
|
class Environment(object): <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> classes, activityenum = core.steplist(model), model.ACTIVITY <NEW_LINE> stepclasses = list() <NEW_LINE> stepinstances = list() <NEW_LINE> knownclassnames = set() <NEW_LINE> knowninstancenames = set() <NEW_LINE> AvailableStepGroup = namedtuple('AvailableStepGroup', 'default_instances classes') <NEW_LINE> for cls in unifex._make_stepdict(model).values(): <NEW_LINE> <INDENT> if not issubclass(cls, core.StepAbstract): <NEW_LINE> <INDENT> raise ValueError("Classes used as steps must inherit from core.StepAbstract.") <NEW_LINE> <DEDENT> stepname = cls.__name__.lower() <NEW_LINE> if stepname is None: <NEW_LINE> <INDENT> raise ValueError('The step name for class "%s" is not defined.' % cls) <NEW_LINE> <DEDENT> elif stepname in knownclassnames: <NEW_LINE> <INDENT> raise ValueError('The step name "%s" is defined twice.' % stepname) <NEW_LINE> <DEDENT> elif stepname.lower() in knowninstancenames: <NEW_LINE> <INDENT> raise ValueError('The step name "%s", when converted to lower case, is defined twice.' % stepname) <NEW_LINE> <DEDENT> stepclasses.append(cls) <NEW_LINE> stepinstances.append(cls(cls._default_execpath)) <NEW_LINE> knownclassnames.add(stepname) <NEW_LINE> knowninstancenames.add(stepname.lower()) <NEW_LINE> <DEDENT> stepclasses = tuple(stepclasses) <NEW_LINE> stepinstances = tuple(stepinstances) <NEW_LINE> od = OrderedDict((x, []) for x in activityenum) <NEW_LINE> for s,si in zip(stepclasses, stepinstances): <NEW_LINE> <INDENT> for a in s.activities: <NEW_LINE> <INDENT> od[a].append((s, si)) <NEW_LINE> <DEDENT> <DEDENT> activitylist = list() <NEW_LINE> for activity, classinstancepairs in od.items(): <NEW_LINE> <INDENT> clsnames = list() <NEW_LINE> instancenames = list() <NEW_LINE> for stepcls, stepinstance in classinstancepairs: <NEW_LINE> <INDENT> clsname = stepcls.__name__ <NEW_LINE> instancename = clsname.lower() <NEW_LINE> clsnames.append(clsname) <NEW_LINE> instancenames.append(instancename) <NEW_LINE> <DEDENT> activitylist.append(FrozenNamespace((key, element[1]) for key, element in zip(instancenames, classinstancepairs))) <NEW_LINE> <DEDENT> self.__activities = FrozenNamespace((key.name, value) for key, value in zip(od.keys(), activitylist)) <NEW_LINE> self._stepclasses = stepclasses <NEW_LINE> self._stepinstances = stepinstances <NEW_LINE> <DEDENT> @property <NEW_LINE> def activities(self): <NEW_LINE> <INDENT> return self.__activities <NEW_LINE> <DEDENT> @property <NEW_LINE> def stepclasses(self): <NEW_LINE> <INDENT> return self._stepclasses <NEW_LINE> <DEDENT> @property <NEW_LINE> def stepinstances(self): <NEW_LINE> <INDENT> return self._stepinstances
|
Represent the current environment in a (presumably) easy way for writing
recipes.
|
6259902923e79379d538d4b4
|
class ApplyTable(models.Model): <NEW_LINE> <INDENT> name = models.CharField('名称',max_length=200) <NEW_LINE> desp = models.TextField(verbose_name='描述',blank=True) <NEW_LINE> file = models.CharField('文件材料',max_length=500,help_text='请选择(PDF/图片)上传') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name or '未命名表格' <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name='申请表格表'
|
申请表格
|
6259902963f4b57ef0086547
|
class MyInt(int): <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return int(self) != int(other) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return int(self) == int(other)
|
class to override int comparrisons
|
62599029e76e3b2f99fd99b6
|
class WafStateTest(TestCase): <NEW_LINE> <INDENT> def test_wafstate(self): <NEW_LINE> <INDENT> print("========== waf state test begin ==========") <NEW_LINE> response = self.client.get('/api/statefile/') <NEW_LINE> print(response.content) <NEW_LINE> print("********** waf state test end *****************\n")
|
class to Test waf state
|
62599029a8ecb033258721c7
|
class AnalysisVersion: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.version = "1.0.0" <NEW_LINE> <DEDENT> def getVersion(self): <NEW_LINE> <INDENT> return self.version
|
Analysis version class to store version information and return
it to the caller.
|
625990298c3a8732951f7502
|
class NegativeBinomial(Discrete): <NEW_LINE> <INDENT> def __init__(self, mu, alpha, *args, **kwargs): <NEW_LINE> <INDENT> super(NegativeBinomial, self).__init__(*args, **kwargs) <NEW_LINE> self.mu = mu <NEW_LINE> self.alpha = alpha <NEW_LINE> self.mode = floor(mu).astype('int32') <NEW_LINE> <DEDENT> def logp(self, value): <NEW_LINE> <INDENT> mu = self.mu <NEW_LINE> alpha = self.alpha <NEW_LINE> pois = bound(logpow(mu, value) - factln(value) - mu, mu > 0, value >= 0) <NEW_LINE> negbinom = bound(gammaln(value + alpha) - factln(value) - gammaln(alpha) + logpow(mu / (mu + alpha), value) + logpow(alpha / (mu + alpha), alpha), mu > 0, alpha > 0, value >= 0) <NEW_LINE> return switch(1*(alpha > 1e10), pois, negbinom)
|
Negative binomial log-likelihood.
The negative binomial distribution describes a Poisson random variable
whose rate parameter is gamma distributed. PyMC's chosen parameterization
is based on this mixture interpretation.
.. math::
f(x \mid \mu, lpha) = rac{\Gamma(x+lpha)}{x! \Gamma(lpha)} (lpha/(\mu+lpha))^lpha (\mu/(\mu+lpha))^x
Parameters
----------
mu : float
mu > 0
alpha : float
alpha > 0
.. note::
- :math:`E[x]=\mu`
|
62599029d99f1b3c44d0664d
|
class MapSwipeDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/MapSwipe/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull())
|
Test rerources work.
|
6259902930c21e258be997b7
|
class Bullet(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, target, origin): <NEW_LINE> <INDENT> pygame.sprite.Sprite.__init__(self) <NEW_LINE> self.image = pygame.Surface([2, 2]) <NEW_LINE> self.image.fill(color.BLACK) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.center = (origin[0], origin[1]) <NEW_LINE> self.x, self.y = float(self.rect.centerx), float(self.rect.centery) <NEW_LINE> self.speed = 2. <NEW_LINE> self.bullet_vector = self.find_bullet_vector(target) <NEW_LINE> self.high = False <NEW_LINE> <DEDENT> def find_bullet_vector(self, target): <NEW_LINE> <INDENT> distance = [target[0] - self.rect.centerx, target[1] - self.rect.centery] <NEW_LINE> norm = math.sqrt(distance[0] ** 2 + distance[1] ** 2) <NEW_LINE> direction = [distance[0]/norm, distance[1]/norm] <NEW_LINE> bullet_vector = [direction[0]*self.speed, direction[1] * self.speed] <NEW_LINE> return bullet_vector <NEW_LINE> <DEDENT> def update(self, wall_list): <NEW_LINE> <INDENT> self.x += self.bullet_vector[0] <NEW_LINE> self.y += self.bullet_vector[1] <NEW_LINE> self.rect.center = (int(round(self.x)), int(round(self.y))) <NEW_LINE> wall_hit_list = pygame.sprite.spritecollide(self, wall_list, False) <NEW_LINE> for wall in wall_hit_list: <NEW_LINE> <INDENT> if wall.istall: <NEW_LINE> <INDENT> self.kill() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.high = True
|
This class represents the bullets shot in the game
Parameters
----------
target : Int List with length 2
Specifies a point (x,y) that the Bullet should pass through
origin : Int List with length 2
Specifies the point (x,y) that the Bullet should start from
Attributes
---------
image : Pygame's Sprite's image
Represents the Bullet's image
rect : Pygame's Sprite's rect
Represents the Bullet's rectangle
speed : Float
Represents the speed in which the Bullet will move
bullet_vector : Float List with length 2
Represents the direction the Bullet should follow
high : boolean
True if the Bullet has passed through a low wall and False otherwise
x : float
Represents the x coordinate of the center of the object
y : float
Represents the y coordinate of the center of the object
|
62599029e76e3b2f99fd99b8
|
class ReweightedLasso(BaseEstimator, RegressorMixin): <NEW_LINE> <INDENT> def __init__(self, alpha_fraction=.01, max_iter=2000, max_iter_reweighting=100, tol=1e-4): <NEW_LINE> <INDENT> self.alpha_fraction = alpha_fraction <NEW_LINE> self.max_iter = max_iter <NEW_LINE> self.max_iter_reweighting = max_iter_reweighting <NEW_LINE> self.tol = tol <NEW_LINE> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> n_samples, n_features = X.shape <NEW_LINE> self.coef_ = np.zeros(n_features) <NEW_LINE> weights = np.ones_like(self.coef_) <NEW_LINE> coef_old = self.coef_.copy() <NEW_LINE> self.loss_ = [] <NEW_LINE> alpha_max = abs(X.T.dot(y)).max() / len(X) <NEW_LINE> alpha = self.alpha_fraction * alpha_max <NEW_LINE> for i in range(self.max_iter_reweighting): <NEW_LINE> <INDENT> Xw = X * weights <NEW_LINE> coef_ = solver_lasso(Xw, y, alpha, self.max_iter) <NEW_LINE> coef_ = coef_ * weights <NEW_LINE> err = abs(coef_ - coef_old).max() <NEW_LINE> err /= max(abs(coef_).max(), abs(coef_old).max(), 1.) <NEW_LINE> coef_old = coef_.copy() <NEW_LINE> weights = 2 * (abs(coef_) ** 0.5 + 1e-10) <NEW_LINE> obj = 0.5 * ((X @ coef_ - y) ** 2).sum() / n_samples <NEW_LINE> obj += (alpha * abs(coef_) ** 0.5).sum() <NEW_LINE> self.loss_.append(obj) <NEW_LINE> if err < self.tol and i: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if i == self.max_iter_reweighting - 1 and i: <NEW_LINE> <INDENT> warnings.warn('Reweighted objective did not converge.' + ' You might want to increase ' + 'the number of iterations of reweighting.' + ' Fitting data with very small alpha' + ' may cause precision problems.', ConvergenceWarning) <NEW_LINE> <DEDENT> self.coef_ = coef_ <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> return np.dot(X, self.coef_)
|
Reweighted Lasso estimator with L1 regularizer.
The optimization objective for Reweighted Lasso is::
(1 / (2 * n_samples)) * ||Y - XW||^2_Fro + alpha * ||W||_0.5
Where::
||W||_0.5 = sum_i sum_j sqrt|w_ij|
Parameters
----------
alpha : (float or array-like), shape (n_tasks)
Optional, default ones(n_tasks)
Constant that multiplies the L0.5 term. Defaults to 1.0
max_iter : int, optional
The maximum number of inner loop iterations
max_iter_reweighting : int, optional
Maximum number of reweighting steps i.e outer loop iterations
tol : float, optional
The tolerance for the optimization: if the updates are
smaller than ``tol``, the optimization code checks the
dual gap for optimality and continues until it is smaller
than ``tol``.
Attributes
----------
coef_ : array, shape (n_features,)
Parameter vector (W in the cost function formula).
|
6259902921bff66bcd723c0e
|
class Deck(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.deck = [] <NEW_LINE> self.shuffled = False <NEW_LINE> for suit in SUITS: <NEW_LINE> <INDENT> for rank in RANKS: <NEW_LINE> <INDENT> self.deck.append(Card(suit, rank)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> deck_comp = '' <NEW_LINE> for card in self.deck: <NEW_LINE> <INDENT> deck_comp += '\n' + card.__str__() <NEW_LINE> <DEDENT> return "The deck has: " + deck_comp <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> random.shuffle(self.deck) <NEW_LINE> self.shuffled = True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def deal(self): <NEW_LINE> <INDENT> single_card = self.deck.pop() <NEW_LINE> return single_card
|
class layout for all items and methods that compromise a deck
|
625990293eb6a72ae038b611
|
class PyPymc3(PythonPackage): <NEW_LINE> <INDENT> homepage = "http://github.com/pymc-devs/pymc3" <NEW_LINE> pypi = "pymc3/pymc3-3.8.tar.gz" <NEW_LINE> version('3.8', sha256='1bb2915e4a29877c681ead13932b0b7d276f7f496e9c3f09ba96b977c99caf00') <NEW_LINE> depends_on('python@3.5.4:', type=('build', 'run')) <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-arviz@0.4.1:', type=('build', 'run')) <NEW_LINE> depends_on('py-theano@1.0.4:', type=('build', 'run')) <NEW_LINE> depends_on('py-numpy@1.13.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-scipy@0.18.1:', type=('build', 'run')) <NEW_LINE> depends_on('py-pandas@0.18.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-patsy@0.4.0:', type=('build', 'run')) <NEW_LINE> depends_on('py-tqdm@4.8.4:', type=('build', 'run')) <NEW_LINE> depends_on('py-h5py@2.7.0:', type=('build', 'run'))
|
PyMC3 is a Python package for Bayesian statistical modeling and
Probabilistic Machine Learning focusing on advanced Markov chain Monte
Carlo (MCMC) and variational inference (VI) algorithms. Its flexibility and
extensibility make it applicable to a large suite of problems.
|
625990295e10d32532ce40da
|
class RHChatManageEventRemove(RHEventChatroomMixin, RHChatManageEventBase): <NEW_LINE> <INDENT> def _checkParams(self, params): <NEW_LINE> <INDENT> RHChatManageEventBase._checkParams(self, params) <NEW_LINE> RHEventChatroomMixin._checkParams(self) <NEW_LINE> <DEDENT> def _process(self): <NEW_LINE> <INDENT> reason = '{} has requested to delete this room.'.format(to_unicode(session.user.full_name)) <NEW_LINE> chatroom_deleted = self.event_chatroom.delete(reason) <NEW_LINE> notify_deleted(self.chatroom, self.event, session.user, chatroom_deleted) <NEW_LINE> if chatroom_deleted: <NEW_LINE> <INDENT> flash(_('Chatroom deleted'), 'success') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flash(_('Chatroom removed from event'), 'success') <NEW_LINE> <DEDENT> self.event.log(EventLogRealm.management, EventLogKind.change, 'Chat', 'Chatroom removed: {}'.format(self.chatroom.name), session.user, data={'Deleted from server': 'Yes' if chatroom_deleted else 'No'}) <NEW_LINE> return redirect(url_for_plugin('.manage_rooms', self.event))
|
Removes a chatroom from an event (and if necessary from the server)
|
62599029d18da76e235b7924
|
class validate_in(validate_base): <NEW_LINE> <INDENT> def check_arg(self, arg, values, exception): <NEW_LINE> <INDENT> if arg not in values: <NEW_LINE> <INDENT> raise exception("%s:%d %s must be one of %s" % (self.file, self.line, arg, values))
|
Decorator to validate argument is in a set of valid argument values
The decorator expects one or more arguments, which are 3-tuples of
(name, list, exception), where name is the argument name in the
function being decorated, list is the list of valid argument values
and exception is the exception type to be raised if validation fails.
|
62599029925a0f43d25e8ff5
|
class SvnCreateCustomTagDirStep(AbstractStep): <NEW_LINE> <INDENT> def __init__( self, dirTrunk, dirTag ): <NEW_LINE> <INDENT> AbstractStep.__init__( self, "Svn Create Tag" ) <NEW_LINE> self.dirTrunk = dirTrunk <NEW_LINE> self.dirTag = dirTag <NEW_LINE> <DEDENT> def do( self ): <NEW_LINE> <INDENT> self.reporter.message( "TRUNK DIR: %s" % self.dirTrunk ) <NEW_LINE> self.reporter.message( "TAG DIR: %s" % self.dirTag ) <NEW_LINE> commitMessage = "Created by Build" <NEW_LINE> command = 'svn copy --non-interactive --trust-server-cert ' + self.dirTrunk + ' ' + self.dirTag + ' -m \"'+commitMessage+'\"' <NEW_LINE> return ExecProg( command, self.reporter, self.dirTrunk ) == 0
|
Svn Create Tag Dir Step
|
625990298a349b6b436871e6
|
class AfterSaleServiceConditions(BaseApi): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AfterSaleServiceConditions, self).__init__(*args, **kwargs) <NEW_LINE> self.endpoint = 'after-sales-service-conditions' <NEW_LINE> self.return_policies = AfterSaleServiceConditionReturnPolicies(self) <NEW_LINE> self.implied_warranties = AfterSaleServiceConditionImpliedWarranties(self) <NEW_LINE> self.warranties = AfterSaleServiceConditionWarranties(self)
|
Base Endpoint
|
62599029e76e3b2f99fd99ba
|
@implementer(IViewView) <NEW_LINE> class SiteLayoutView(BrowserView): <NEW_LINE> <INDENT> index = ViewPageTemplateFile(os.path.join("templates", "main_template.pt")) <NEW_LINE> def __init__(self, context, request, name="layout"): <NEW_LINE> <INDENT> super().__init__(context, request) <NEW_LINE> self.__name__ = name <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.index()
|
Default site layout view called from the site layout resolving view
|
62599029a4f1c619b294f5a2
|
class HTTPSession(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> requests.packages.urllib3.disable_warnings() <NEW_LINE> self.rs = requests.Session() <NEW_LINE> self.baseurl = 'http://radiant-gorge-83016.herokuapp.com' <NEW_LINE> <DEDENT> def print_res_info(self, res): <NEW_LINE> <INDENT> print('INFO: USED URL: {url}'.format(url=res.url)) <NEW_LINE> print('INFO: RESPONSE BODY: {res_body}'.format(res_body=res.text)) <NEW_LINE> print('INFO: RESPONSE STATUS: {status_code}'.format(status_code=res.status_code))
|
Class that holds the requests session for all api calls.
|
625990291d351010ab8f4ac4
|
class CB_RECALL_ANY4res(BaseObj): <NEW_LINE> <INDENT> _strfmt1 = "" <NEW_LINE> _attrlist = ("status",) <NEW_LINE> def __init__(self, unpack): <NEW_LINE> <INDENT> self.status = nfsstat4(unpack)
|
struct CB_RECALL_ANY4res {
nfsstat4 status;
};
|
6259902921a7993f00c66f2c
|
class DecisionTreeRegressionModel(JavaModel): <NEW_LINE> <INDENT> pass
|
Model fitted by DecisionTreeRegressor.
|
6259902966673b3332c3139e
|
class TagRegistered(type): <NEW_LINE> <INDENT> attr_name = 'tag' <NEW_LINE> def __init__(cls, name, bases, namespace): <NEW_LINE> <INDENT> super(TagRegistered, cls).__init__(name, bases, namespace) <NEW_LINE> if not hasattr(cls, '_registry'): <NEW_LINE> <INDENT> cls._registry = {} <NEW_LINE> <DEDENT> meta = cls.__class__ <NEW_LINE> attr = getattr(cls, meta.attr_name, None) <NEW_LINE> if attr: <NEW_LINE> <INDENT> cls._registry[attr] = cls
|
As classes of this metaclass are created, they keep a registry in the
base class of all classes by a class attribute, indicated by attr_name.
>>> FooObject = TagRegistered('FooObject', (), dict(tag='foo'))
>>> FooObject._registry['foo'] is FooObject
True
>>> BarObject = TagRegistered('Barobject', (FooObject,), dict(tag='bar'))
>>> FooObject._registry is BarObject._registry
True
>>> len(FooObject._registry)
2
'...' below should be 'jaraco.classes' but for pytest-dev/pytest#3396
>>> FooObject._registry['bar']
<class '....meta.Barobject'>
|
625990295e10d32532ce40db
|
class DatasetTransform(object): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, configs=None): <NEW_LINE> <INDENT> if configs: <NEW_LINE> <INDENT> self.configs = configs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.configs = get_specified_config('DatasetTransform') <NEW_LINE> <DEDENT> self.labels = self.configs['labels'] <NEW_LINE> self.transform_strategy = self.configs['transform_strategy'] <NEW_LINE> <DEDENT> def apply_ds(self, ds, trans_func=None, repeat_size=1, batch_size=32, num_parallel_workers=None): <NEW_LINE> <INDENT> if not isinstance(trans_func, list): <NEW_LINE> <INDENT> raise TypeError('trans_func must be list') <NEW_LINE> <DEDENT> ds = ds.map(operations=TypeCast(ts.int32), input_columns="label", num_parallel_workers=num_parallel_workers) <NEW_LINE> ds = ds.map(operations=trans_func, input_columns="image", num_parallel_workers=num_parallel_workers) <NEW_LINE> ds = ds.batch(batch_size, drop_remainder=True) <NEW_LINE> ds = ds.repeat(repeat_size) <NEW_LINE> return ds <NEW_LINE> <DEDENT> def postprocess(self, input, strategy='TOP1_CLASS'): <NEW_LINE> <INDENT> if not isinstance(input, np.ndarray): <NEW_LINE> <INDENT> raise TypeError("Input should be NumPy, got {}.".format(type(input))) <NEW_LINE> <DEDENT> if not input.ndim == 2: <NEW_LINE> <INDENT> raise TypeError("Input should be 2-D Numpy, got {}.".format(input.ndim)) <NEW_LINE> <DEDENT> if strategy not in self.transform_strategy: <NEW_LINE> <INDENT> raise ValueError("Strategy should be one of {}, got {}.".format(self.transform_strategy, strategy)) <NEW_LINE> <DEDENT> softmax = Softmax() <NEW_LINE> score_list = softmax(Tensor(input, dtype=ts.float32)).asnumpy() <NEW_LINE> if strategy == 'TOP1_CLASS': <NEW_LINE> <INDENT> score = max(score_list[0]) <NEW_LINE> return ('TOP1: {}, score: {}').format(str(self.labels[input[0].argmax()]), str(round(score, 5))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label_index = np.argsort(input[0])[::-1] <NEW_LINE> score_index = np.sort(score_list[0])[::-1] <NEW_LINE> top5_labels = [] <NEW_LINE> res = '' <NEW_LINE> top5_scores = score_index[:5].tolist() <NEW_LINE> top_num = int(strategy.split('_')[0].split('TOP')[-1]) <NEW_LINE> for i in range(top_num): <NEW_LINE> <INDENT> top5_labels.append(self.labels[label_index[i]]) <NEW_LINE> res += 'TOP' + str(i+1) + ": " + str(top5_labels[i]) + ", score: " + str(format(top5_scores[i], '.7f')) + '\t' <NEW_LINE> <DEDENT> return res
|
Base class for all dataset transforms.
|
6259902956b00c62f0fb386f
|
class Subject(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def attach(self, observer: Observer) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def detach(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def notify(self) -> None: <NEW_LINE> <INDENT> pass
|
The Subject interface declares a set of methods for managing subscribers.
|
6259902926238365f5fadb01
|
class Cblas(Package): <NEW_LINE> <INDENT> homepage = "http://www.netlib.org/blas/_cblas/" <NEW_LINE> version('2015-06-06', sha256='0f6354fd67fabd909baf57ced2ef84e962db58fae126e4f41b21dd4fec60a2a3', url='http://www.netlib.org/blas/blast-forum/cblas.tgz') <NEW_LINE> depends_on('blas') <NEW_LINE> parallel = False <NEW_LINE> def patch(self): <NEW_LINE> <INDENT> mf = FileFilter('Makefile.in') <NEW_LINE> mf.filter('^BLLIB =.*', 'BLLIB = {0}'.format( ' '.join(self.spec['blas'].libs.libraries))) <NEW_LINE> mf.filter('^CC =.*', 'CC = cc') <NEW_LINE> mf.filter('^FC =.*', 'FC = fc') <NEW_LINE> <DEDENT> def install(self, spec, prefix): <NEW_LINE> <INDENT> make('all') <NEW_LINE> mkdirp(prefix.lib) <NEW_LINE> mkdirp(prefix.include) <NEW_LINE> install('lib/cblas_LINUX.a', prefix.lib.join('libcblas.a')) <NEW_LINE> install('include/cblas.h', prefix.include) <NEW_LINE> install('include/cblas_f77.h', prefix.include)
|
The BLAS (Basic Linear Algebra Subprograms) are routines that
provide standard building blocks for performing basic vector and
matrix operations.
|
62599029925a0f43d25e8ff7
|
class DatFont8(KaitaiStruct): <NEW_LINE> <INDENT> SEQ_FIELDS = ["chars"] <NEW_LINE> def __init__(self, _io, _parent=None, _root=None): <NEW_LINE> <INDENT> self._io = _io <NEW_LINE> self._parent = _parent <NEW_LINE> self._root = _root if _root else self <NEW_LINE> self._debug = collections.defaultdict(dict) <NEW_LINE> <DEDENT> def _read(self): <NEW_LINE> <INDENT> self._debug['chars']['start'] = self._io.pos() <NEW_LINE> self.chars = [None] * (95) <NEW_LINE> for i in range(95): <NEW_LINE> <INDENT> if not 'arr' in self._debug['chars']: <NEW_LINE> <INDENT> self._debug['chars']['arr'] = [] <NEW_LINE> <DEDENT> self._debug['chars']['arr'].append({'start': self._io.pos()}) <NEW_LINE> self.chars[i] = self._io.read_bytes(8) <NEW_LINE> self._debug['chars']['arr'][i]['end'] = self._io.pos() <NEW_LINE> <DEDENT> self._debug['chars']['end'] = self._io.pos()
|
Simple monochrome monospaced font, 95 characters, 8x8 px
characters.
|
62599029ac7a0e7691f73498
|
class ResourceManager(): <NEW_LINE> <INDENT> def __init__(self, acquire_resource, release_resource, check_resource_ok=None): <NEW_LINE> <INDENT> self.acquire_resource = acquire_resource <NEW_LINE> self.release_resource = release_resource <NEW_LINE> if check_resource_ok is None: <NEW_LINE> <INDENT> def check_resource_ok(resource): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> self.check_resource_ok = check_resource_ok <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def _cleanup_on_error(self): <NEW_LINE> <INDENT> with ExitStack() as stack: <NEW_LINE> <INDENT> stack.push(self) <NEW_LINE> yield <NEW_LINE> stack.pop_all() <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> resource = self.acquire_resource() <NEW_LINE> with self._cleanup_on_error(): <NEW_LINE> <INDENT> if not self.check_resource_ok(resource): <NEW_LINE> <INDENT> msg = "Failed validation for {!r}" <NEW_LINE> raise RuntimeError(msg.format(resource)) <NEW_LINE> <DEDENT> <DEDENT> return resource <NEW_LINE> <DEDENT> def __exit__(self, *exec_details): <NEW_LINE> <INDENT> self.release_resource()
|
A Resoruce Manager that loosely implements Resource Acquisiton as Initialization.
|
6259902963f4b57ef008654a
|
class BitVecNumRef(BitVecRef): <NEW_LINE> <INDENT> def as_long(self): <NEW_LINE> <INDENT> return int(self.as_string()) <NEW_LINE> <DEDENT> def as_signed_long(self): <NEW_LINE> <INDENT> sz = self.size() <NEW_LINE> val = self.as_long() <NEW_LINE> if val >= 2**(sz - 1): <NEW_LINE> <INDENT> val = val - 2**sz <NEW_LINE> <DEDENT> if val < -2**(sz - 1): <NEW_LINE> <INDENT> val = val + 2**sz <NEW_LINE> <DEDENT> return int(val) <NEW_LINE> <DEDENT> def as_string(self): <NEW_LINE> <INDENT> return Z3_get_numeral_string(self.ctx_ref(), self.as_ast())
|
Bit-vector values.
|
625990296fece00bbaccc969
|
class IAvataSkin(IOpenPlansSkin): <NEW_LINE> <INDENT> pass
|
i am a skin
|
625990298c3a8732951f7508
|
@attributes([ "bucket", "target_prefix", ]) <NEW_LINE> class UpdateS3ErrorPage(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def error_key(self): <NEW_LINE> <INDENT> return '{}error_pages/404.html'.format(self.target_prefix)
|
Update the error_key for an S3 bucket website endpoint to point to a new
path.
If the key is changed, return the old key.
:ivar bytes bucket: Name of bucket to change routing rule for.
:ivar bytes target_prefix: Target prefix to redirect to.
|
62599029be8e80087fbc0029
|
class Ruleset(object): <NEW_LINE> <INDENT> def __init__(self, name, comments=[], props={}, uid=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.comments = comments <NEW_LINE> self.props = props <NEW_LINE> if uid: <NEW_LINE> <INDENT> self.uid = uid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.uid = name <NEW_LINE> <DEDENT> <DEDENT> def process(self, text): <NEW_LINE> <INDENT> matched_lines = {} <NEW_LINE> matched_rules = [] <NEW_LINE> for rule in self.rules: <NEW_LINE> <INDENT> processed_rule = rule.process(text) <NEW_LINE> matched_lines.update(processed_rule.lines) <NEW_LINE> matched_rules.append(processed_rule) <NEW_LINE> <DEDENT> return ProcessedRuleset(self, matched_lines, matched_rules)
|
Set of rules
|
625990290a366e3fb87dd998
|
class DealSubscribeDetailForm(NoManytoManyHintModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = DealSubscribe <NEW_LINE> fields = ['mobile', 'includes', 'excludes', 'is_active', 'msg_count']
|
Detail form for DealSubscribe
|
62599029507cdc57c63a5d58
|
class TestFloodEvacuationVectorHazardFunction(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> registry = ImpactFunctionManager().registry <NEW_LINE> registry.clear() <NEW_LINE> registry.register(FloodEvacuationVectorHazardFunction) <NEW_LINE> <DEDENT> def test_run(self): <NEW_LINE> <INDENT> function = FloodEvacuationVectorHazardFunction.instance() <NEW_LINE> hazard_path = test_data_path('hazard', 'flood_multipart_polygons.shp') <NEW_LINE> exposure_path = test_data_path( 'exposure', 'pop_binary_raster_20_20.asc') <NEW_LINE> hazard_layer = read_layer(hazard_path) <NEW_LINE> exposure_layer = read_layer(exposure_path) <NEW_LINE> function.hazard = hazard_layer <NEW_LINE> function.exposure = exposure_layer <NEW_LINE> field1 = StringParameter() <NEW_LINE> field1.name = 'Affected Field' <NEW_LINE> field1.is_required = True <NEW_LINE> field1.value = 'FLOODPRONE' <NEW_LINE> function.parameters['affected_field'] = field1 <NEW_LINE> field2 = StringParameter() <NEW_LINE> field2.name = 'Affected Value' <NEW_LINE> field2.is_required = True <NEW_LINE> field2.value = 'YES' <NEW_LINE> function.parameters['affected_value'] = field2 <NEW_LINE> function.run() <NEW_LINE> impact = function.impact <NEW_LINE> keywords = impact.get_keywords() <NEW_LINE> affected_population = numpy.nansum(impact.get_data()) <NEW_LINE> total_population = keywords['total_population'] <NEW_LINE> self.assertEqual(affected_population, 20) <NEW_LINE> self.assertEqual(total_population, 200) <NEW_LINE> <DEDENT> def test_filter(self): <NEW_LINE> <INDENT> hazard_keywords = { 'layer_purpose': 'hazard', 'layer_mode': 'classified', 'layer_geometry': 'polygon', 'hazard': 'flood', 'hazard_category': 'single_event', 'vector_hazard_classification': 'flood_vector_hazard_classes' } <NEW_LINE> exposure_keywords = { 'layer_purpose': 'exposure', 'layer_mode': 'continuous', 'layer_geometry': 'raster', 'exposure': 'population', 'exposure_unit': 'count' } <NEW_LINE> impact_functions = ImpactFunctionManager().filter_by_keywords( hazard_keywords, exposure_keywords) <NEW_LINE> message = 'There should be 1 impact function, but there are: %s' % len(impact_functions) <NEW_LINE> self.assertEqual(1, len(impact_functions), message) <NEW_LINE> retrieved_if = impact_functions[0].metadata().as_dict()['id'] <NEW_LINE> expected = ImpactFunctionManager().get_function_id( FloodEvacuationVectorHazardFunction) <NEW_LINE> message = 'Expecting %s, but getting %s instead' % ( expected, retrieved_if) <NEW_LINE> self.assertEqual(expected, retrieved_if, message)
|
Test for Flood Vector Building Impact Function.
|
625990295e10d32532ce40dc
|
class Event(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200) <NEW_LINE> teaser = models.TextField(max_length=200, blank=True, null=True) <NEW_LINE> wikiPage = models.CharField(max_length=200) <NEW_LINE> startDate = models.DateTimeField() <NEW_LINE> endDate = models.DateTimeField(blank=True, null=True) <NEW_LINE> who = models.CharField(max_length=200, blank=True) <NEW_LINE> where = models.CharField(max_length=200, blank=True) <NEW_LINE> created_at = models.DateTimeField(default=datetime.datetime.now()) <NEW_LINE> created_by = models.ForeignKey(User) <NEW_LINE> deleted = models.BooleanField(default=False) <NEW_LINE> category = models.ForeignKey(Category, blank=True, null=True) <NEW_LINE> location = models.ForeignKey(Location, blank=True, null=True) <NEW_LINE> objects = models.Manager() <NEW_LINE> all = EventManager() <NEW_LINE> future = FutureEventFixedNumberManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> status = '' <NEW_LINE> if self.deleted: <NEW_LINE> <INDENT> status = ' [deleted]' <NEW_LINE> <DEDENT> return u'%s (%s)%s' % (self.name, self.startDate, status) <NEW_LINE> <DEDENT> def past(self): <NEW_LINE> <INDENT> return self.startDate < datetime.datetime.now() <NEW_LINE> <DEDENT> @permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return('django.views.generic.list_detail.object_detail', [str(self.id)]) <NEW_LINE> <DEDENT> def save(self, editor=False, new=False): <NEW_LINE> <INDENT> if new and editor != False: <NEW_LINE> <INDENT> self.created_by = editor <NEW_LINE> self.created_by.save() <NEW_LINE> <DEDENT> super(Event, self).save() <NEW_LINE> <DEDENT> def start_end_date_eq(self): <NEW_LINE> <INDENT> return self.startDate.date() == self.endDate.date() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> self.deleted = True
|
Represents an event
|
625990295166f23b2e244387
|
class ProgressSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> user = UserSerializer(read_only=True) <NEW_LINE> category = CategorySerializer(read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Progress <NEW_LINE> fields = '__all__'
|
serializes the Progress model
(nested with user and the category serializer)
|
625990298a349b6b436871ea
|
class FeedbackDeleted(): <NEW_LINE> <INDENT> def __init__(self, *, status: int = None, message: str = None) -> None: <NEW_LINE> <INDENT> self.status = status <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, _dict: Dict) -> 'FeedbackDeleted': <NEW_LINE> <INDENT> args = {} <NEW_LINE> valid_keys = ['status', 'message'] <NEW_LINE> bad_keys = set(_dict.keys()) - set(valid_keys) <NEW_LINE> if bad_keys: <NEW_LINE> <INDENT> raise ValueError( 'Unrecognized keys detected in dictionary for class FeedbackDeleted: ' + ', '.join(bad_keys)) <NEW_LINE> <DEDENT> if 'status' in _dict: <NEW_LINE> <INDENT> args['status'] = _dict.get('status') <NEW_LINE> <DEDENT> if 'message' in _dict: <NEW_LINE> <INDENT> args['message'] = _dict.get('message') <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> return cls.from_dict(_dict) <NEW_LINE> <DEDENT> def to_dict(self) -> Dict: <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'status') and self.status is not None: <NEW_LINE> <INDENT> _dict['status'] = self.status <NEW_LINE> <DEDENT> if hasattr(self, 'message') and self.message is not None: <NEW_LINE> <INDENT> _dict['message'] = self.message <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> return self.to_dict() <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other: 'FeedbackDeleted') -> bool: <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other: 'FeedbackDeleted') -> bool: <NEW_LINE> <INDENT> return not self == other
|
The status and message of the deletion request.
:attr int status: (optional) HTTP return code.
:attr str message: (optional) Status message returned from the service.
|
62599029ac7a0e7691f7349a
|
class CacheId(str): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"CacheId({super().__repr__()})"
|
Unique identifier of the Cache object.
|
6259902973bcbd0ca4bcb243
|
class NestedHost(Host): <NEW_LINE> <INDENT> def __init__(self, host): <NEW_LINE> <INDENT> super(NestedHost, self).__init__(host) <NEW_LINE> <DEDENT> def children(self): <NEW_LINE> <INDENT> return self.procs
|
This class contains information and actions related to a nested job.
|
62599029ec188e330fdf9846
|
class ProjectOverviewResponse(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'project', (Project, Project.thrift_spec), None, ), (2, TType.I32, 'numExperiments', None, None, ), (3, TType.I32, 'numExperimentRuns', None, None, ), ) <NEW_LINE> def __init__(self, project=None, numExperiments=None, numExperimentRuns=None,): <NEW_LINE> <INDENT> self.project = project <NEW_LINE> self.numExperiments = numExperiments <NEW_LINE> self.numExperimentRuns = numExperimentRuns <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.project = Project() <NEW_LINE> self.project.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.numExperiments = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.numExperimentRuns = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('ProjectOverviewResponse') <NEW_LINE> if self.project is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('project', TType.STRUCT, 1) <NEW_LINE> self.project.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.numExperiments is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('numExperiments', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.numExperiments) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.numExperimentRuns is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('numExperimentRuns', TType.I32, 3) <NEW_LINE> oprot.writeI32(self.numExperimentRuns) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- project
- numExperiments
- numExperimentRuns
|
625990299b70327d1c57fd34
|
class CharmUpgradeOperation(object): <NEW_LINE> <INDENT> def __init__(self, agent): <NEW_LINE> <INDENT> self._agent = agent <NEW_LINE> self._log = logging.getLogger("unit.upgrade") <NEW_LINE> self._charm_directory = tempfile.mkdtemp( suffix="charm-upgrade", prefix="tmp") <NEW_LINE> <DEDENT> def retrieve_charm(self, charm_id): <NEW_LINE> <INDENT> return download_charm( self._agent.client, charm_id, self._charm_directory) <NEW_LINE> <DEDENT> def _remove_tree(self, result): <NEW_LINE> <INDENT> if os.path.exists(self._charm_directory): <NEW_LINE> <INDENT> shutil.rmtree(self._charm_directory) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> d = self._run() <NEW_LINE> d.addBoth(self._remove_tree) <NEW_LINE> return d <NEW_LINE> <DEDENT> @inlineCallbacks <NEW_LINE> def _run(self): <NEW_LINE> <INDENT> self._log.info("Starting charm upgrade...") <NEW_LINE> workflow_state = yield self._agent.workflow.get_state() <NEW_LINE> if not workflow_state in ("started",): <NEW_LINE> <INDENT> self._log.warning( "Unit not in an upgradeable state: %s", workflow_state) <NEW_LINE> yield self._agent.unit_state.clear_upgrade_flag() <NEW_LINE> returnValue(False) <NEW_LINE> <DEDENT> upgrade_flag = yield self._agent.unit_state.get_upgrade_flag() <NEW_LINE> if not upgrade_flag: <NEW_LINE> <INDENT> self._log.warning("No upgrade flag set.") <NEW_LINE> returnValue(False) <NEW_LINE> <DEDENT> self._log.debug("Clearing upgrade flag.") <NEW_LINE> yield self._agent.unit_state.clear_upgrade_flag() <NEW_LINE> service_state_manager = ServiceStateManager(self._agent.client) <NEW_LINE> service_state = yield service_state_manager.get_service_state( self._agent.unit_name.split("/")[0]) <NEW_LINE> service_charm_id = yield service_state.get_charm_id() <NEW_LINE> unit_charm_id = yield self._agent.unit_state.get_charm_id() <NEW_LINE> if service_charm_id == unit_charm_id: <NEW_LINE> <INDENT> self._log.debug("Unit already running latest charm") <NEW_LINE> yield self._agent.unit_state.clear_upgrade_flag() <NEW_LINE> returnValue(True) <NEW_LINE> <DEDENT> self._log.debug("Retrieving charm %s", service_charm_id) <NEW_LINE> charm = yield self.retrieve_charm(service_charm_id) <NEW_LINE> self._log.debug("Stopping hook execution.") <NEW_LINE> yield self._agent.executor.stop() <NEW_LINE> self._log.debug("Setting unit charm id to %s", service_charm_id) <NEW_LINE> yield self._agent.unit_state.set_charm_id(service_charm_id) <NEW_LINE> self._log.debug("Extracting new charm.") <NEW_LINE> charm.extract_to( os.path.join(self._agent.unit_directory, "charm")) <NEW_LINE> self._log.debug("Invoking upgrade transition.") <NEW_LINE> success = yield self._agent.workflow.fire_transition( "upgrade_charm") <NEW_LINE> if success: <NEW_LINE> <INDENT> self._log.debug("Unit upgraded.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._log.warning("Upgrade failed.") <NEW_LINE> <DEDENT> returnValue(success)
|
A unit agent charm upgrade operation.
|
625990296fece00bbaccc96b
|
class BgClient(object): <NEW_LINE> <INDENT> executor = ThreadPoolExecutor(max_workers=10) <NEW_LINE> def __init__(self, t_client): <NEW_LINE> <INDENT> self.t_client = t_client <NEW_LINE> <DEDENT> def __getattr__(self, thrift_method): <NEW_LINE> <INDENT> def submit(*args, **kwargs): <NEW_LINE> <INDENT> return self.executor.submit( self.t_client.__getattr__(thrift_method), *args, **kwargs ) <NEW_LINE> <DEDENT> return submit
|
Helper class that wraps a thriftpy TClient
|
625990298c3a8732951f750b
|
class MapReader: <NEW_LINE> <INDENT> def recursive_flood_fill(self, grid, row: int, col: int): <NEW_LINE> <INDENT> if (row < 0 or row > len(grid) - 1): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (col < 0 or col > len(grid[0]) - 1): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (grid[row][col] != 1): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (grid[row][col] == 1): <NEW_LINE> <INDENT> grid[row][col] = -1 <NEW_LINE> <DEDENT> self.recursive_flood_fill(grid, row - 1, col) <NEW_LINE> self.recursive_flood_fill(grid, row + 1, col) <NEW_LINE> self.recursive_flood_fill(grid, row, col - 1) <NEW_LINE> self.recursive_flood_fill(grid, row, col + 1) <NEW_LINE> <DEDENT> def is_one(self, grid, row, col): <NEW_LINE> <INDENT> if (row < 0 or row > len(grid) - 1): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if (col < 0 or col > len(grid[0]) - 1): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if grid[row][col] == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def iterative_flood_fill(self, grid, row, col): <NEW_LINE> <INDENT> if (row < 0 or row > len(grid) - 1): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (col < 0 or col > len(grid[0]) - 1): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (grid[row][col] != 1): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> q = [] <NEW_LINE> grid[row][col] = -1 <NEW_LINE> q.append([row, col]) <NEW_LINE> while len(q) > 0: <NEW_LINE> <INDENT> [cur_row, cur_col] = q[0] <NEW_LINE> del q[0] <NEW_LINE> if (self.is_one(grid, cur_row - 1, cur_col) == True): <NEW_LINE> <INDENT> grid[cur_row - 1][cur_col] = -1 <NEW_LINE> q.append([cur_row - 1, cur_col]) <NEW_LINE> <DEDENT> if (self.is_one(grid, cur_row + 1, cur_col) == True): <NEW_LINE> <INDENT> grid[cur_row + 1][cur_col] = -1 <NEW_LINE> q.append([cur_row + 1, cur_col]) <NEW_LINE> <DEDENT> if (self.is_one(grid, cur_row, cur_col - 1) == True): <NEW_LINE> <INDENT> grid[cur_row][cur_col - 1] = -1 <NEW_LINE> q.append([cur_row, cur_col - 1]) <NEW_LINE> <DEDENT> if (self.is_one(grid, cur_row, cur_col + 1) == True): <NEW_LINE> <INDENT> grid[cur_row][cur_col + 1] = -1 <NEW_LINE> q.append([cur_row, cur_col + 1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def count_islands(self, reduced_map): <NEW_LINE> <INDENT> islands = 0 <NEW_LINE> for row in range(len(reduced_map)): <NEW_LINE> <INDENT> for col in range(len(reduced_map[0])): <NEW_LINE> <INDENT> if (reduced_map[row][col] == 1): <NEW_LINE> <INDENT> islands += 1 <NEW_LINE> self.iterative_flood_fill(reduced_map, row, col) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return islands
|
Reads the reduced map (1s and 0s), where 1 - island and 0 - sea
|
6259902956b00c62f0fb3873
|
class SectionLinkCollection(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'SectionLinkList': 'list[SectionLink]', 'link': 'Link' } <NEW_LINE> self.attributeMap = { 'SectionLinkList': 'SectionLinkList','link': 'link'} <NEW_LINE> self.SectionLinkList = None <NEW_LINE> self.link = None
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
62599029d10714528d69ee66
|
class _CoordGroup: <NEW_LINE> <INDENT> def __init__(self, coords, cubes): <NEW_LINE> <INDENT> self.coords = coords <NEW_LINE> self.cubes = cubes <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.coords) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return list(self).__getitem__(key) <NEW_LINE> <DEDENT> def _first_coord_w_cube(self): <NEW_LINE> <INDENT> return next( filter( lambda cube_coord: cube_coord[1] is not None, zip(self.cubes, self.coords), ) ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( "[" + ", ".join( [ coord.name() if coord is not None else "None" for coord in self ] ) + "]" ) <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> _, first_coord = self._first_coord_w_cube() <NEW_LINE> return first_coord.name() <NEW_LINE> <DEDENT> def _oid_tuple(self): <NEW_LINE> <INDENT> return tuple((id(coord) for coord in self)) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self._oid_tuple()) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> result = NotImplemented <NEW_LINE> if isinstance(other, _CoordGroup): <NEW_LINE> <INDENT> result = self._oid_tuple() == other._oid_tuple() <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def matches(self, predicate, default_val=True): <NEW_LINE> <INDENT> for cube, coord in zip(self.cubes, self.coords): <NEW_LINE> <INDENT> if coord is None: <NEW_LINE> <INDENT> yield default_val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield predicate(cube, coord) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def matches_all(self, predicate): <NEW_LINE> <INDENT> return all(self.matches(predicate)) <NEW_LINE> <DEDENT> def matches_any(self, predicate): <NEW_LINE> <INDENT> return any(self.matches(predicate))
|
Represents a list of coordinates, one for each given cube. Which can be
operated on conveniently.
|
62599029d164cc6175821f2c
|
class Address(models.Model): <NEW_LINE> <INDENT> street = models.CharField(max_length=200, blank=True, verbose_name=_('street')) <NEW_LINE> city = models.CharField(max_length=100, blank=True, verbose_name=_('city')) <NEW_LINE> postal_number = models.CharField(max_length=10, blank=True, verbose_name=_('zip code')) <NEW_LINE> region = models.CharField(max_length=30, blank=True, verbose_name=_('region')) <NEW_LINE> @staticmethod <NEW_LINE> def autocomplete_search_fields(): <NEW_LINE> <INDENT> return ( "street_icontains", "city__icontains", ) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> template = u"{street}, {city}, {postal}, {region}" <NEW_LINE> return template.format(street=self.street, city=self.city, postal=self.postal_number, region=self.region ) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('address') <NEW_LINE> verbose_name_plural = _('addresses')
|
Represents a postal address.
|
625990291d351010ab8f4acb
|
class RelativeChessBoard(): <NEW_LINE> <INDENT> def __init__(self, chessboard, row, col, relative_row_start_pos=0, relative_col_start_pos=0): <NEW_LINE> <INDENT> if (row + relative_row_start_pos > chessboard.getRowMaxNum() or col + relative_col_start_pos > chessboard.getColMaxNum()): <NEW_LINE> <INDENT> raise ValueError('Out of range of chessboard!') <NEW_LINE> <DEDENT> self._row = row <NEW_LINE> self._col = col <NEW_LINE> self._rel_row = relative_row_start_pos <NEW_LINE> self._rel_col = relative_col_start_pos <NEW_LINE> self._chessboard = chessboard <NEW_LINE> <DEDENT> def getRowMaxNum(self): <NEW_LINE> <INDENT> return self._row <NEW_LINE> <DEDENT> def getColMaxNum(self): <NEW_LINE> <INDENT> return self._col <NEW_LINE> <DEDENT> def getBoard(self, irow, icol): <NEW_LINE> <INDENT> if irow >= self._row or icol >= self._col: <NEW_LINE> <INDENT> raise IndexError('Index out of range!') <NEW_LINE> <DEDENT> return self._chessboard.getBoard(irow + self._rel_row, icol + self._rel_col) <NEW_LINE> <DEDENT> def setBoard(self, irow, icol, data): <NEW_LINE> <INDENT> self._chessboard.setBoard(irow + self._rel_row, icol + self._rel_col, data) <NEW_LINE> <DEDENT> def inChessBoard(self, irow, icol): <NEW_LINE> <INDENT> if irow < self.getRowMaxNum() and icol < self.getColMaxNum(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'relative row:{} ,relative col:{} ; row:{} , col:{}'.format( self._rel_row, self._rel_col, self._row, self._col)
|
Get a chessboard in a chessboard.
rwo:new chessboard row.
col:new chessboard col.
relative_row_start_pos:this chessboard row relative to main chessboard.
relative_col_start_pos:this chessboard col relative to main chessboard.
|
6259902991af0d3eaad3addd
|
class Treelet(object): <NEW_LINE> <INDENT> def __init__(self, subtree, frontier_ids): <NEW_LINE> <INDENT> self.subtree = subtree <NEW_LINE> self.frontier_ids = set(frontier_ids) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_monosaccharide(cls, monosaccharide): <NEW_LINE> <INDENT> monosaccharide = root(monosaccharide) <NEW_LINE> subtree = Glycan( monosaccharide.clone(prop_id=True), index_method=None) <NEW_LINE> frontier_ids = set() <NEW_LINE> for pos, child in monosaccharide.children(): <NEW_LINE> <INDENT> frontier_ids.add(child.id) <NEW_LINE> <DEDENT> return cls(subtree, frontier_ids) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.subtree) <NEW_LINE> <DEDENT> def __root__(self): <NEW_LINE> <INDENT> return root(self.subtree) <NEW_LINE> <DEDENT> def __tree__(self): <NEW_LINE> <INDENT> return treep(self.subtree) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.subtree == other.subtree and self.frontier_ids == other.frontier_ids <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return treep(self) == treep(other) <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.subtree) <NEW_LINE> <DEDENT> def canonicalize(self): <NEW_LINE> <INDENT> self.subtree.canonicalize() <NEW_LINE> <DEDENT> def expand(self, reference, frontier_id): <NEW_LINE> <INDENT> node = reference.get(frontier_id) <NEW_LINE> new_node = node.clone(prop_id=True) <NEW_LINE> tree = self.subtree.clone(index_method=None) <NEW_LINE> for pos, link in node.parents(True): <NEW_LINE> <INDENT> new_parent = tree.get(link.parent.id) <NEW_LINE> link.clone(new_parent, new_node) <NEW_LINE> <DEDENT> new_frontier = set(self.frontier_ids) <NEW_LINE> new_frontier.remove(frontier_id) <NEW_LINE> for pos, child in node.children(): <NEW_LINE> <INDENT> new_frontier.add(child.id) <NEW_LINE> <DEDENT> return self.__class__(tree, new_frontier) <NEW_LINE> <DEDENT> def expand_all(self, reference): <NEW_LINE> <INDENT> extent = [] <NEW_LINE> for node_id in self.frontier_ids: <NEW_LINE> <INDENT> extent.append(self.expand(reference, node_id)) <NEW_LINE> <DEDENT> return extent
|
Represents a subgraph of a larger :class:`~.Glycan`, with a frontier
of node ids which are children of the current subgraph.
Attributes
----------
frontier_ids : :class:`set`
The id values of the nodes from the parent :class:`~.Glycan` which
are children of members of :attr:`subtree`
subtree : :class:`~.Glycan`
The subgraph defining the treelet
|
625990291f5feb6acb163ba5
|
class LandsatArchiveError(LandsatError): <NEW_LINE> <INDENT> pass
|
Base class for LandsatArchive exceptions
|
62599029507cdc57c63a5d5c
|
class _ErrorHandler(object): <NEW_LINE> <INDENT> def __init__(self, log, defaultloglevel=logging.INFO, raiseExceptions=True): <NEW_LINE> <INDENT> self.enabled = True <NEW_LINE> if log: <NEW_LINE> <INDENT> self._log = log <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> import sys <NEW_LINE> self._log = logging.getLogger('CSSUTILS') <NEW_LINE> hdlr = logging.StreamHandler(sys.stderr) <NEW_LINE> formatter = logging.Formatter('%(levelname)s\t%(message)s') <NEW_LINE> hdlr.setFormatter(formatter) <NEW_LINE> self._log.addHandler(hdlr) <NEW_LINE> self._log.setLevel(defaultloglevel) <NEW_LINE> <DEDENT> self.raiseExceptions = raiseExceptions <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> calls = ('debug', 'info', 'warn', 'error', 'critical', 'fatal') <NEW_LINE> other = ('setLevel', 'getEffectiveLevel', 'addHandler', 'removeHandler') <NEW_LINE> if name in calls: <NEW_LINE> <INDENT> self._logcall = getattr(self._log, name) <NEW_LINE> return self.__handle <NEW_LINE> <DEDENT> elif name in other: <NEW_LINE> <INDENT> return getattr(self._log, name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError( '(errorhandler) No Attribute %r found' % name) <NEW_LINE> <DEDENT> <DEDENT> def __handle(self, msg=u'', token=None, error=xml.dom.SyntaxErr, neverraise=False, args=None): <NEW_LINE> <INDENT> if self.enabled: <NEW_LINE> <INDENT> line, col = None, None <NEW_LINE> if token: <NEW_LINE> <INDENT> if isinstance(token, tuple): <NEW_LINE> <INDENT> value, line, col = token[1], token[2], token[3] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value, line, col = token.value, token.line, token.col <NEW_LINE> <DEDENT> msg = u'%s [%s:%s: %s]' % ( msg, line, col, value) <NEW_LINE> <DEDENT> if error and self.raiseExceptions and not neverraise: <NEW_LINE> <INDENT> if isinstance(error, urllib2.HTTPError) or isinstance(error, urllib2.URLError): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> elif issubclass(error, xml.dom.DOMException): <NEW_LINE> <INDENT> error.line = line <NEW_LINE> error.col = col <NEW_LINE> <DEDENT> raise error(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._logcall(msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def setLog(self, log): <NEW_LINE> <INDENT> self._log = log
|
handles all errors and log messages
|
6259902930c21e258be997c1
|
class FeedParser(object): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> <DEDENT> @log_function_call(log_result=False) <NEW_LINE> def parse(self, modified=None): <NEW_LINE> <INDENT> feed = feedparser.parse(self.url, modified=self._datetime_to_time_struct(modified)) <NEW_LINE> if feed.bozo: <NEW_LINE> <INDENT> raise feed.bozo_exception <NEW_LINE> <DEDENT> channel = { 'url': feed.url, 'title': feed.channel.title, 'link': feed.channel.link, 'description': feed.channel.get('description', None), 'modified': None } <NEW_LINE> if 'date' in feed: <NEW_LINE> <INDENT> channel['modified'] = feed.date_parsed <NEW_LINE> <DEDENT> elif 'modified' in feed: <NEW_LINE> <INDENT> channel['modified'] = feed.modified_parsed <NEW_LINE> <DEDENT> elif 'published' in feed: <NEW_LINE> <INDENT> channel['modified'] = feed.published_parsed <NEW_LINE> <DEDENT> elif 'updated' in feed: <NEW_LINE> <INDENT> channel['modified'] = feed.updated_parsed <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if feed.entries: <NEW_LINE> <INDENT> if 'modified' in feed.entries[0]: <NEW_LINE> <INDENT> channel['modified'] = feed.entries[0].date_parsed <NEW_LINE> <DEDENT> elif 'published' in feed.entries[0]: <NEW_LINE> <INDENT> channel['modified'] = feed.entries[0].published_parsed <NEW_LINE> <DEDENT> elif 'updated' in feed.entries[0]: <NEW_LINE> <INDENT> channel['modified'] = feed.entries[0].updated_parsed <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if channel['modified'] is not None: <NEW_LINE> <INDENT> channel['modified'] = self._time_struct_to_datetime(channel['modified']) <NEW_LINE> <DEDENT> entries = [] <NEW_LINE> for record in feed.entries: <NEW_LINE> <INDENT> entry = {'title': record.title, 'summary': record.summary, 'link': record.link, 'date': None} <NEW_LINE> if 'date' in record: <NEW_LINE> <INDENT> entry['date'] = record.date_parsed <NEW_LINE> <DEDENT> elif 'published' in record: <NEW_LINE> <INDENT> entry['date'] = record.published_parsed <NEW_LINE> <DEDENT> if entry['date'] is not None: <NEW_LINE> <INDENT> entry['date'] = self._time_struct_to_datetime(entry['date']) <NEW_LINE> <DEDENT> entries.append(entry) <NEW_LINE> <DEDENT> LOG.debug('Got %d new feeds from %s since %s', len(entries), self.url, channel['modified'].isoformat(' ')) <NEW_LINE> return channel, entries <NEW_LINE> <DEDENT> def _datetime_to_time_struct(self, date_time): <NEW_LINE> <INDENT> if date_time: <NEW_LINE> <INDENT> return date_time.timetuple() <NEW_LINE> <DEDENT> <DEDENT> def _time_struct_to_datetime(self, date_time): <NEW_LINE> <INDENT> if date_time: <NEW_LINE> <INDENT> return datetime(date_time.tm_year, date_time.tm_mon, date_time.tm_mday, date_time.tm_hour, date_time.tm_min, date_time.tm_sec)
|
Executive class used to download and parse feed.
|
625990295166f23b2e24438b
|
class FamilyEnrollmentList(generics.ListCreateAPIView): <NEW_LINE> <INDENT> serializer_class = FamilyEnrollmentSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> if len(self.request.query_params) == 0: <NEW_LINE> <INDENT> return FamilyEnrollment.objects.filter( family__organizations__in=Organization.objects.get_read_orgs(self.request.user) ).order_by('-open_date') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> family_id_filter = self.request.query_params.get('family_id', None) <NEW_LINE> if family_id_filter: <NEW_LINE> <INDENT> return FamilyEnrollment.objects.filter(family__family_id=family_id_filter).filter( family__organizations__in=Organization.objects.get_read_orgs(self.request.user) ).order_by('-open_date') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return FamilyEnrollment.objects.filter( family__organizations__in=Organization.objects.get_read_orgs(self.request.user) ).order_by('-open_date')
|
Defines the enrollment status of a family in a given program. The open date is required, but the close date
is optional (if missing, it indicates that the family is still enrolled). Read/write access is determined by
the read/write access of the user to the family. Read access to the supplied program is required for create
and update operations.
|
62599029d164cc6175821f2d
|
class Dashboard: <NEW_LINE> <INDENT> def __init__(self, name=__name__, secret='__secret__'): <NEW_LINE> <INDENT> import os <NEW_LINE> self.app = Flask(name, static_folder=os.path.join(os.path.dirname(__file__), 'static')) <NEW_LINE> self.app.debug = True <NEW_LINE> self.app.config['DEBUG'] = True <NEW_LINE> self.app.config['SECRET_KEY'] = secret <NEW_LINE> self.socket = SocketIO(self.app) <NEW_LINE> self.routes = [] <NEW_LINE> set_socketio(self.socket) <NEW_LINE> self.on_event('handshake', handshake_event) <NEW_LINE> self.on_event('disconnect', disconnect_event) <NEW_LINE> self.on_event('remote_process_result', exec_remote_call) <NEW_LINE> self.on_event('remote_process_error', handle_error) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def init_worker(): <NEW_LINE> <INDENT> import signal <NEW_LINE> signal.signal(signal.SIGINT, signal.SIG_IGN) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> global _pool <NEW_LINE> _pool = Pool(4, self.init_worker) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> if _pool is not None: <NEW_LINE> <INDENT> _pool.close() <NEW_LINE> _pool.terminate() <NEW_LINE> <DEDENT> <DEDENT> def run(self, host='127.0.0.1', port=5000): <NEW_LINE> <INDENT> global _host, _port <NEW_LINE> _port = port <NEW_LINE> _host = host <NEW_LINE> return self.socket.run(self.app, host, port) <NEW_LINE> <DEDENT> def add_page(self, page, route=None, header=None, **kwargs): <NEW_LINE> <INDENT> route = route or page.routes() <NEW_LINE> if not isinstance(route, list): <NEW_LINE> <INDENT> route = [route] <NEW_LINE> <DEDENT> for r in route: <NEW_LINE> <INDENT> self.routes.append((r, type(page).__name__)) <NEW_LINE> self.app.add_url_rule(r, type(page).__name__, page, **kwargs) <NEW_LINE> <DEDENT> if header is not None: <NEW_LINE> <INDENT> page.header = header <NEW_LINE> <DEDENT> <DEDENT> def on_event(self, event, handler, namespace='/'): <NEW_LINE> <INDENT> assert callable(handler) <NEW_LINE> self.socket.on(event, namespace)(handler)
|
Dashboard entry point
|
6259902973bcbd0ca4bcb247
|
class WriteToParquet(PTransform): <NEW_LINE> <INDENT> def __init__( self, file_path_prefix, schema, row_group_buffer_size=64 * 1024 * 1024, record_batch_size=1000, codec='none', use_deprecated_int96_timestamps=False, file_name_suffix='', num_shards=0, shard_name_template=None, mime_type='application/x-parquet'): <NEW_LINE> <INDENT> super(WriteToParquet, self).__init__() <NEW_LINE> self._sink = _create_parquet_sink( file_path_prefix, schema, codec, row_group_buffer_size, record_batch_size, use_deprecated_int96_timestamps, file_name_suffix, num_shards, shard_name_template, mime_type ) <NEW_LINE> <DEDENT> def expand(self, pcoll): <NEW_LINE> <INDENT> return pcoll | Write(self._sink) <NEW_LINE> <DEDENT> def display_data(self): <NEW_LINE> <INDENT> return {'sink_dd': self._sink}
|
A ``PTransform`` for writing parquet files.
This ``PTransform`` is currently experimental. No backward-compatibility
guarantees.
|
62599029e76e3b2f99fd99c2
|
class ListProductsAsyncPager: <NEW_LINE> <INDENT> def __init__( self, method: Callable[..., Awaitable[product_search_service.ListProductsResponse]], request: product_search_service.ListProductsRequest, response: product_search_service.ListProductsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self._request = product_search_service.ListProductsRequest(request) <NEW_LINE> self._response = response <NEW_LINE> self._metadata = metadata <NEW_LINE> <DEDENT> def __getattr__(self, name: str) -> Any: <NEW_LINE> <INDENT> return getattr(self._response, name) <NEW_LINE> <DEDENT> @property <NEW_LINE> async def pages(self) -> AsyncIterator[product_search_service.ListProductsResponse]: <NEW_LINE> <INDENT> yield self._response <NEW_LINE> while self._response.next_page_token: <NEW_LINE> <INDENT> self._request.page_token = self._response.next_page_token <NEW_LINE> self._response = await self._method(self._request, metadata=self._metadata) <NEW_LINE> yield self._response <NEW_LINE> <DEDENT> <DEDENT> def __aiter__(self) -> AsyncIterator[product_search_service.Product]: <NEW_LINE> <INDENT> async def async_generator(): <NEW_LINE> <INDENT> async for page in self.pages: <NEW_LINE> <INDENT> for response in page.products: <NEW_LINE> <INDENT> yield response <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return async_generator() <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
|
A pager for iterating through ``list_products`` requests.
This class thinly wraps an initial
:class:`google.cloud.vision_v1p4beta1.types.ListProductsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``products`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListProducts`` requests and continue to iterate
through the ``products`` field on the
corresponding responses.
All the usual :class:`google.cloud.vision_v1p4beta1.types.ListProductsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
|
6259902923e79379d538d4c0
|
class FormsetForm(object): <NEW_LINE> <INDENT> def _fieldset(self, field_names): <NEW_LINE> <INDENT> fieldset = copy(self) <NEW_LINE> if not hasattr(self, "_fields_done"): <NEW_LINE> <INDENT> self._fields_done = [] <NEW_LINE> <DEDENT> fieldset.non_field_errors = lambda *args: None <NEW_LINE> names = [f for f in field_names if f not in self._fields_done] <NEW_LINE> fieldset.fields = OrderedDict([(f, self.fields[f]) for f in names]) <NEW_LINE> self._fields_done.extend(names) <NEW_LINE> return fieldset <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> for field in self.fields: <NEW_LINE> <INDENT> label = self.fields[field].label <NEW_LINE> if label is None: <NEW_LINE> <INDENT> label = field[0].upper() + field[1:].replace("_", " ") <NEW_LINE> <DEDENT> yield (label, self.initial.get(field, self.data.get(field, ""))) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if name == "errors": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> filters = ( ("^other_fields$", lambda: self.fields.keys()), ("^hidden_fields$", lambda: [n for n, f in self.fields.items() if isinstance(f.widget, forms.HiddenInput)]), ("^(\w*)_fields$", lambda name: [f for f in self.fields.keys() if f.startswith(name)]), ("^(\w*)_field$", lambda name: [f for f in self.fields.keys() if f == name]), ("^fields_before_(\w*)$", lambda name: takewhile(lambda f: f != name, self.fields.keys())), ("^fields_after_(\w*)$", lambda name: dropwhile(lambda f: f != name, self.fields.keys())[1:]), ) <NEW_LINE> for filter_exp, filter_func in filters: <NEW_LINE> <INDENT> filter_args = match(filter_exp, name) <NEW_LINE> if filter_args is not None: <NEW_LINE> <INDENT> return self._fieldset(filter_func(*filter_args.groups())) <NEW_LINE> <DEDENT> <DEDENT> raise AttributeError(name)
|
Form mixin that provides template methods for iterating through
sets of fields by prefix, single fields and finally remaning
fields that haven't been, iterated with each fieldset made up from
a copy of the original form, giving access to as_* methods.
The use case for this is ``OrderForm`` below. It contains a
handful of fields named with the prefixes ``billing_detail_XXX``
and ``shipping_detail_XXX``. Using ``FormsetForm`` we can then
group these into fieldsets in our templates::
<!-- Fields prefixed with "billing_detail_" -->
<fieldset>{{ form.billing_detail_fields.as_p }}</fieldset>
<!-- Fields prefixed with "shipping_detail_" -->
<fieldset>{{ form.shipping_detail_fields.as_p }}</fieldset>
<!-- All remaining fields -->
<fieldset>{{ form.other_fields.as_p }}</fieldset>
Some other helpers exist for use with an individual field name:
- ``XXX_field`` returns a fieldset containing the field named XXX
- ``fields_before_XXX`` returns a fieldset with all fields before
the field named XXX
- ``fields_after_XXX`` returns a fieldset with all fields after
the field named XXX
|
62599029d10714528d69ee67
|
class ALU(): <NEW_LINE> <INDENT> def execute_instruction(): <NEW_LINE> <INDENT> pass
|
Arithmetic Logical Unit handles all of the arithmetic instructions
|
6259902921bff66bcd723c18
|
class TestDistrictsDeleted(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDistrictsDeleted(self): <NEW_LINE> <INDENT> pass
|
DistrictsDeleted unit test stubs
|
62599029796e427e5384f733
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.