code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class IN5_GenerateLogbook_Test(systemtesting.MantidSystemTest): <NEW_LINE> <INDENT> _data_directory = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(IN5_GenerateLogbook_Test, self).__init__() <NEW_LINE> self.setUp() <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> config['default.facility'] = 'ILL' <NEW_LINE> config['default.instrument'] = 'IN5' <NEW_LINE> config['logging.loggers.root.level'] = 'Warning' <NEW_LINE> data_dirs = config['datasearch.directories'].split(';') <NEW_LINE> test_data_dir = [p for p in data_dirs if 'SystemTest' in p][0] <NEW_LINE> in5_dir = os.path.join('ILL', 'IN5') <NEW_LINE> if 'ILL' in test_data_dir: <NEW_LINE> <INDENT> in5_dir = 'IN5' <NEW_LINE> <DEDENT> self._data_directory = os.path.abspath(os.path.join(test_data_dir, in5_dir)) <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> mtd.clear() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> self.tolerance = 1e-3 <NEW_LINE> self.tolerance_is_rel_err = True <NEW_LINE> self.disableChecking = ['Instrument'] <NEW_LINE> return ['in5_logbook', 'IN5_Logbook_Reference.nxs'] <NEW_LINE> <DEDENT> def runTest(self): <NEW_LINE> <INDENT> GenerateLogbook(Directory=self._data_directory, OutputWorkspace='in5_logbook', Facility='ILL', Instrument='IN5', NumorRange='199728,199729', OptionalHeaders='all')
|
Tests generating logbook for IN5 data.
|
6259903b30c21e258be999ff
|
class PanParam: <NEW_LINE> <INDENT> def __init__(self, id, name, shortName, param_type, source, unit=None): <NEW_LINE> <INDENT> self.id=id <NEW_LINE> self.name=name <NEW_LINE> self.shortName=shortName <NEW_LINE> ns=('CF','OS') <NEW_LINE> self.synonym=dict.fromkeys(ns) <NEW_LINE> self.type=param_type <NEW_LINE> self.source=source <NEW_LINE> self.unit=unit <NEW_LINE> <DEDENT> def addSynonym(name, ns): <NEW_LINE> <INDENT> self.synonym[ns]=name
|
PANGAEA Parameter
Shoud be used to create PANGAEA parameter objects. Parameter is used here to represent 'measured variables'
Parameters
----------
id : int
the identifier for the parameter
name : str
A long name or title used for the parameter
shortName : str
A short name or label to identify the parameter
param_type : str
indicates the data type of the parameter (string, numeric, datetime etc..)
source : str
defines the category or source for a parameter (e.g. geocode, data, event)... very PANGAEA specific ;)
unit : str
the unit of measurement used with this parameter (e.g. m/s, kg etc..)
Attributes
----------
id : int
the identifier for the parameter
name : str
A long name or title used for the parameter
shortName : str
A short name or label to identify the parameter
synonym : dict
A diconary of synonyms for the parameter whcih e.g. is used by other archives or communities.
The dict key indicates the namespace (possible values currently are CF and OS)
type : str
indicates the data type of the parameter (string, numeric, datetime etc..)
source : str
defines the category or source for a parameter (e.g. geocode, data, event)... very PANGAEA specific ;)
unit : str
the unit of measurement used with this parameter (e.g. m/s, kg etc..)
|
6259903b1f5feb6acb163de4
|
class CloudErrorBody(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudErrorBody]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CloudErrorBody, self).__init__(**kwargs) <NEW_LINE> self.code = kwargs.get('code', None) <NEW_LINE> self.message = kwargs.get('message', None) <NEW_LINE> self.target = kwargs.get('target', None) <NEW_LINE> self.details = kwargs.get('details', None)
|
An error response from the Batch service.
:param code: An identifier for the error. Codes are invariant and are intended to be consumed
programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable for display in a user
interface.
:type message: str
:param target: The target of the particular error. For example, the name of the property in
error.
:type target: str
:param details: A list of additional details about the error.
:type details: list[~azure.mgmt.network.v2019_04_01.models.CloudErrorBody]
|
6259903b82261d6c527307bc
|
class NefitStatus(NefitSensor): <NEW_LINE> <INDENT> @property <NEW_LINE> def native_value(self) -> StateType: <NEW_LINE> <INDENT> return get_status(self.coordinator.data.get(self.entity_description.key))
|
Representation of the boiler status.
|
6259903ba4f1c619b294f780
|
class Mods_LockLoadOrder(CheckLink): <NEW_LINE> <INDENT> _text = _(u'Lock Load Order') <NEW_LINE> _help = _(u'Will reset mod Load Order to whatever Wrye Bash has saved for' u' them whenever Wrye Bash refreshes data/starts up.') <NEW_LINE> def _check(self): return load_order.locked <NEW_LINE> def Execute(self): <NEW_LINE> <INDENT> def _show_lo_lock_warning(): <NEW_LINE> <INDENT> message = _(u'Lock Load Order is a feature which resets load ' u'order to a previously memorized state. While this ' u'feature is good for maintaining your load order, it ' u'will also undo any load order changes that you have ' u'made outside Bash.') <NEW_LINE> return self._askContinue(message, u'bash.load_order.lock.continue', title=_(u'Lock Load Order')) <NEW_LINE> <DEDENT> load_order.toggle_lock_load_order(_show_lo_lock_warning)
|
Turn on Lock Load Order feature.
|
6259903b07d97122c4217e8f
|
class RecipeViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> queryset = Recipe.objects.all() <NEW_LINE> serializer_class = serializers.RecipeSerializer <NEW_LINE> def _params_to_ints(self, qs): <NEW_LINE> <INDENT> return [int(str_id) for str_id in qs.split(',')] <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> tags = self.request.query_params.get('tags') <NEW_LINE> ingredients = self.request.query_params.get('ingredients') <NEW_LINE> queryset = self.queryset <NEW_LINE> if tags: <NEW_LINE> <INDENT> tag_ids = self._params_to_ints(tags) <NEW_LINE> queryset = queryset.filter(tags__id__in=tag_ids) <NEW_LINE> <DEDENT> if ingredients: <NEW_LINE> <INDENT> ingredient_ids = self._params_to_ints(ingredients) <NEW_LINE> queryset = queryset.filter(ingredients__id__in=ingredient_ids) <NEW_LINE> <DEDENT> return queryset.filter(user=self.request.user) <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == 'retrieve': <NEW_LINE> <INDENT> return serializers.RecipeDetailSerializer <NEW_LINE> <DEDENT> elif self.action == 'upload_image': <NEW_LINE> <INDENT> return serializers.RecipeImageSerializer <NEW_LINE> <DEDENT> return self.serializer_class <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user) <NEW_LINE> <DEDENT> @action(methods=['POST'], detail=True, url_path='upload-image') <NEW_LINE> def upload_image(self, request, pk=None): <NEW_LINE> <INDENT> recipe = self.get_object() <NEW_LINE> serializer = self.get_serializer( recipe, data=request.data ) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response( serializer.data, status=status.HTTP_200_OK ) <NEW_LINE> <DEDENT> return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST )
|
Manage recipes in the database
|
6259903b16aa5153ce4016df
|
class CUCollection(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.collection = [] <NEW_LINE> <DEDENT> def getCollection(self): <NEW_LINE> <INDENT> return(self.collection) <NEW_LINE> <DEDENT> def addCU(self,CU): <NEW_LINE> <INDENT> self.collection.append(CU) <NEW_LINE> <DEDENT> def getCUID(self,CUName,CUType): <NEW_LINE> <INDENT> for (i,CU) in enumerate(self.collection): <NEW_LINE> <INDENT> if ((CU.cuName == CUName) and (CU.cuType == CUType)): <NEW_LINE> <INDENT> return(i) <NEW_LINE> <DEDENT> <DEDENT> return(None) <NEW_LINE> <DEDENT> def addWriter(self,CUName,spriteName,CUType,wait=False): <NEW_LINE> <INDENT> CUID = self.getCUID(CUName,CUType) <NEW_LINE> if (CUID is not None): <NEW_LINE> <INDENT> self.collection[CUID].addWriter(Writer(spriteName)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newCU = CommUnit(CUName,CUType) <NEW_LINE> newCU.addWriter(Writer(spriteName,wait)) <NEW_LINE> self.addCU(newCU) <NEW_LINE> <DEDENT> <DEDENT> def addReader(self,CUName,spriteName,CUType): <NEW_LINE> <INDENT> CUID = self.getCUID(CUName,CUType) <NEW_LINE> if (CUID is not None): <NEW_LINE> <INDENT> self.collection[CUID].addReader(spriteName) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newCU = CommUnit(CUName,CUType) <NEW_LINE> newCU.addReader(spriteName) <NEW_LINE> self.addCU(newCU) <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> returnString = "<table width='70%' border='1'>" <NEW_LINE> for cu in self.collection: <NEW_LINE> <INDENT> returnString += unicode(cu) <NEW_LINE> <DEDENT> returnString += "</table>" <NEW_LINE> return(returnString) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return unicode(self).encode('utf8') <NEW_LINE> <DEDENT> def writeCUStoFile(self,baseName): <NEW_LINE> <INDENT> with open(baseName+".cur",'w') as f: <NEW_LINE> <INDENT> f.write(str(self))
|
CUCollection is the collection of CUs in the project
|
6259903b23e79379d538d6f2
|
class OSLCStats(jsl.Document): <NEW_LINE> <INDENT> class Options(object): <NEW_LINE> <INDENT> definition_id = "oslc_stats" <NEW_LINE> additional_properties = True
|
JSL schema for oslc.
|
6259903b379a373c97d9a21a
|
class Transaction(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'transactions' <NEW_LINE> id = db.Column(db.Integer, primary_key=True,autoincrement=True) <NEW_LINE> accounts_id = db.Column(db.Integer, db.ForeignKey('accounts.id')) <NEW_LINE> amount = db.Column(db.Integer, nullable=False) <NEW_LINE> t_time= db.Column(db.Integer, default=0, nullable=False) <NEW_LINE> trans_at= db.Column(db.DateTime,index=False,unique=False,nullable=False, default=datetime.utcnow)
|
Create an Contract table
|
6259903b73bcbd0ca4bcb47b
|
class SaltCall(parsers.SaltCallOptionParser): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> self.parse_args() <NEW_LINE> if self.options.file_root: <NEW_LINE> <INDENT> file_root = os.path.abspath(self.options.file_root) <NEW_LINE> self.config['file_roots'] = {'base': _expand_glob_path([file_root])} <NEW_LINE> <DEDENT> if self.options.pillar_root: <NEW_LINE> <INDENT> pillar_root = os.path.abspath(self.options.pillar_root) <NEW_LINE> self.config['pillar_roots'] = {'base': _expand_glob_path([pillar_root])} <NEW_LINE> <DEDENT> if self.options.local: <NEW_LINE> <INDENT> self.config['file_client'] = 'local' <NEW_LINE> <DEDENT> if self.options.master: <NEW_LINE> <INDENT> self.config['master'] = self.options.master <NEW_LINE> <DEDENT> self.setup_logfile_logger() <NEW_LINE> verify_log(self.config) <NEW_LINE> caller = salt.cli.caller.Caller.factory(self.config) <NEW_LINE> if self.options.doc: <NEW_LINE> <INDENT> caller.print_docs() <NEW_LINE> self.exit(salt.defaults.exitcodes.EX_OK) <NEW_LINE> <DEDENT> if self.options.grains_run: <NEW_LINE> <INDENT> caller.print_grains() <NEW_LINE> self.exit(salt.defaults.exitcodes.EX_OK) <NEW_LINE> <DEDENT> caller.run()
|
Used to locally execute a salt command
|
6259903bbaa26c4b54d5049a
|
class Logger(): <NEW_LINE> <INDENT> def __init__(self, name, **kwargs): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.level = kwargs.pop('level', LEVEL_ENV_VAR_VAL if LEVEL_ENV_VAR_VAL else DFL_LEVEL) <NEW_LINE> <DEDENT> def get_logger(self): <NEW_LINE> <INDENT> logger = logging.getLogger(self.name) <NEW_LINE> logger.setLevel(self.level) <NEW_LINE> c_handler = logging.StreamHandler() <NEW_LINE> c_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s: %(message)s') <NEW_LINE> c_handler.setFormatter(c_format) <NEW_LINE> logger.addHandler(c_handler) <NEW_LINE> return logger
|
Class initialization
This class is a simple wrapper around the built-in logging module functionality.
Attributes
----------
name : str
the logger name
level : str
the logging level to use, such as ERROR, WARNING, INFO or DEBUG
Methods
-------
get_logger()
Refer to method documentation
Notes
-----
The built-in logging module returns a pointer if the 'named' logger exists
|
6259903bb5575c28eb7135c2
|
class Node: <NEW_LINE> <INDENT> def __init__(self, item = None, pos_item = None): <NEW_LINE> <INDENT> self._item = item <NEW_LINE> self._next = pos_item <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._item) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self._item)
|
'' 链表节点实现
|
6259903b10dbd63aa1c71dc8
|
class CooperativeEngine(BaseEngine): <NEW_LINE> <INDENT> def start_socket_loop(self, socket): <NEW_LINE> <INDENT> socket.loop() <NEW_LINE> return None <NEW_LINE> <DEDENT> def sleep(self, seconds): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> Buffer = NotImplemented
|
An engine that assumes cooperative scheduling for persistent sockets.
|
6259903bb57a9660fecd2c6e
|
class Post(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=67, unique=True) <NEW_LINE> slug = models.SlugField(max_length=100, unique=True, blank=True, editable=False) <NEW_LINE> meta_description = models.CharField("Meta description for SEO", max_length=155) <NEW_LINE> abstract = models.TextField("Abstract (300-500 characters)", max_length=500) <NEW_LINE> pub_date = models.DateField("Date published") <NEW_LINE> keywords = models.CharField(max_length=100, blank=True) <NEW_LINE> authors = models.CharField(max_length=100) <NEW_LINE> site = models.ForeignKey(Site, blank=True, null=True) <NEW_LINE> has_latex_formula = models.BooleanField("Post with LATEX formula?", default=False) <NEW_LINE> content = RichTextUploadingField(blank=True, null=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s" % self.title <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return "/blog/%s/%s/" % (self.pub_date.year, self.slug) <NEW_LINE> <DEDENT> def was_published_recently(self): <NEW_LINE> <INDENT> return self.pub_date >= timezone.now().date() - datetime.timedelta(days=7) <NEW_LINE> <DEDENT> was_published_recently.admin_order_field = "pub_date" <NEW_LINE> was_published_recently.boolean = True <NEW_LINE> was_published_recently.short_description = "Published recently?" <NEW_LINE> def save(self): <NEW_LINE> <INDENT> self.slug = slugify(self.title) <NEW_LINE> self.keywords = self.keywords.lower() <NEW_LINE> super(Post, self).save() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ["-pub_date"]
|
Post class to generate blog posts.
|
6259903b8c3a8732951f774a
|
class MultiprocessingCounter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.val = Value('i', 0) <NEW_LINE> <DEDENT> def increment(self, n=1): <NEW_LINE> <INDENT> with self.val.get_lock(): <NEW_LINE> <INDENT> self.val.value += n <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self.val.value
|
Instance of this class can be shared
safely between threads and between processes
|
6259903b1f5feb6acb163de6
|
class Timing(Builtin): <NEW_LINE> <INDENT> attributes = ("HoldAll",) <NEW_LINE> summary_text = "CPU time to run a Mathics command" <NEW_LINE> def apply(self, expr, evaluation): <NEW_LINE> <INDENT> start = time.process_time() <NEW_LINE> result = expr.evaluate(evaluation) <NEW_LINE> stop = time.process_time() <NEW_LINE> return Expression("List", Real(stop - start), result)
|
<dl>
<dt>'Timing[$expr$]'
<dd>measures the processor time taken to evaluate $expr$.
It returns a list containing the measured time in seconds and the result of the evaluation.
</dl>
>> Timing[50!]
= {..., 30414093201713378043612608166064768844377641568960512000000000000}
>> Attributes[Timing]
= {HoldAll, Protected}
|
6259903b23849d37ff8522ac
|
class WeightedDemParWassGpGan(WeightedDemParWassGan): <NEW_LINE> <INDENT> def __init__(self, *args, gp=10.0, batch_size=64, **kwargs): <NEW_LINE> <INDENT> super(WeightedDemParWassGpGan, self).__init__(*args, **kwargs) <NEW_LINE> self.gp = gp <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.grad_norms = self._gradient_penalty(self.X, self.A) <NEW_LINE> self.aud_loss = self._get_weighted_aud_loss(self.unweighted_aud_loss, self.A_weights, self.Y_weights, self.AY_weights) + self.gp*self.grad_norms <NEW_LINE> self.aud_err = classification_error(self.A, tf.cast(tf.greater(self.A_hat, 0), tf.float32)) <NEW_LINE> self.loss = self._get_loss() <NEW_LINE> <DEDENT> def _get_aud_preds_from_logits(self, logits): <NEW_LINE> <INDENT> return logits <NEW_LINE> <DEDENT> def _gradient_penalty(self, X, A): <NEW_LINE> <INDENT> def _xor(x, y): <NEW_LINE> <INDENT> x_or_y = tf.minimum(x + y, 1.0) <NEW_LINE> not_x_and_y = 1.0 - x*y <NEW_LINE> return tf.squeeze(x_or_y*not_x_and_y) <NEW_LINE> <DEDENT> idx = tf.random_shuffle(tf.range(0, self.batch_size), seed=self.seed) <NEW_LINE> Xs = tf.gather(X, idx) <NEW_LINE> As = tf.gather(A, idx) <NEW_LINE> u = tf.random_uniform(Xs.shape) <NEW_LINE> Xmix = u*X + (1 - u)*Xs <NEW_LINE> Ahat_mix = self._get_sensitive_logits(self._get_latents(Xmix, reuse=True), reuse=True) <NEW_LINE> A_xor_As = _xor(self.A, As) <NEW_LINE> scale_factor = self.batch_size/tf.reduce_sum(A_xor_As) <NEW_LINE> return scale_factor*tf.losses.mean_squared_error( labels=tf.ones([self.batch_size, ]), predictions=tf.norm(tf.gradients(Ahat_mix, Xmix)[0], axis=1), weights=A_xor_As )
|
gradient penalty style training;
want the norm of the auditor gradients close to 1 in regions of Z space between the two groups,
i.e. broaden the decision boundary to give useful gradients, and make the auditor 1-Lipshitz
|
6259903b91af0d3eaad3b028
|
class WebTest(unittest.TestCase): <NEW_LINE> <INDENT> URL = os.environ.get('ARCHELON_TEST_URL', 'http://localhost:8580') <NEW_LINE> TOKEN = os.environ.get('ARCHELON_TEST_TOKEN', '1234') <NEW_LINE> CASSETTE_LIBRARY_BASE = 'archelonc/tests/testdata/cassettes/' <NEW_LINE> VCR = vcr.VCR( serializer='yaml', record_mode='once', match_on=['method', 'scheme', 'path', 'query', 'body'], ) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(WebTest, self).__init__(*args, **kwargs) <NEW_LINE> current_file = os.path.abspath(inspect.getfile(self.__class__)) <NEW_LINE> basename = os.path.basename(current_file) <NEW_LINE> self.VCR.cassette_library_dir = '{0}{1}'.format( self.CASSETTE_LIBRARY_BASE, basename.replace('.py', '', 1) )
|
Battery for verifying the Web history class works as expected.
|
6259903b66673b3332c315ea
|
class EXCEL: <NEW_LINE> <INDENT> def __init__(self, workbook, worksheet): <NEW_LINE> <INDENT> self.workbook = workbook <NEW_LINE> self.worksheet = worksheet <NEW_LINE> try: <NEW_LINE> <INDENT> self.wb = xlrd.open_workbook(self.workbook) <NEW_LINE> self.ws = self.wb.sheet_by_name(self.worksheet) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise Exception("Error opening excel %s :%s" % (self.workbook, e)) <NEW_LINE> <DEDENT> <DEDENT> def _openworkbooknsheet(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.wb = xlrd.open_workbook(self.workbook) <NEW_LINE> self.ws = self.wb.sheet_by_name(self.worksheet) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise Exception("Error opening excel %s :%s" % (self.workbook, e)) <NEW_LINE> <DEDENT> <DEDENT> def _celladress4matchingvalue(self, matchstring): <NEW_LINE> <INDENT> global row_addr, column_addr, cell_addr <NEW_LINE> self.matchstring = matchstring <NEW_LINE> self._openworkbooknsheet() <NEW_LINE> rows = [] <NEW_LINE> columns = [] <NEW_LINE> try: <NEW_LINE> <INDENT> for erowtup in range(self.ws.nrows): <NEW_LINE> <INDENT> row = self.ws.row_values(erowtup) <NEW_LINE> for ecol in range(len(row)): <NEW_LINE> <INDENT> if row[ecol] == matchstring: <NEW_LINE> <INDENT> rows.append(erowtup) <NEW_LINE> columns.append(ecol) <NEW_LINE> row_addr = rows[0] <NEW_LINE> column_addr = columns[0] <NEW_LINE> cell_addr = [rows[0], columns[0]] <NEW_LINE> return cell_addr <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise Exception('TypeError') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise Exception("Error Occurred : %s" % e)
|
EXCEL class contains definitions for many useful and custom utilities required to address the capabilities
of the Dev/Test framework.It contains methods for reading and writing from/to an Excel workbook
|
6259903b3eb6a72ae038b85e
|
class DistribGatherComp(Component): <NEW_LINE> <INDENT> def __init__(self, arr_size=11): <NEW_LINE> <INDENT> super(DistribGatherComp, self).__init__() <NEW_LINE> self.arr_size = arr_size <NEW_LINE> self.add_trait('invec', Array(np.ones(arr_size, float), iotype='in')) <NEW_LINE> self.add_trait('outvec', Array(np.ones(arr_size, float), iotype='out')) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> if self.mpi.comm == MPI.COMM_NULL: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.mpi.comm.Allgatherv(self.invec, [self.outvec, self.sizes, self.offsets, MPI.DOUBLE]) <NEW_LINE> <DEDENT> def get_distrib_idxs(self): <NEW_LINE> <INDENT> comm = self.mpi.comm <NEW_LINE> rank = comm.rank <NEW_LINE> self.sizes, self.offsets = evenly_distrib_idxs(comm.size, self.arr_size) <NEW_LINE> start = self.offsets[rank] <NEW_LINE> end = start + self.sizes[rank] <NEW_LINE> self.invec = np.ones(self.sizes[comm.rank], dtype=float) <NEW_LINE> return { 'invec': make_idx_array(start, end) } <NEW_LINE> <DEDENT> def get_req_cpus(self): <NEW_LINE> <INDENT> return (2, 2)
|
Uses 2 procs gathers a distrib input into a full output
|
6259903b30c21e258be99a02
|
class LocalRegionLayer(Layer): <NEW_LINE> <INDENT> def __init__(self, nofm, sofm, nreg, sreg, ntrd=1, strd=1): <NEW_LINE> <INDENT> super(LocalRegionLayer, self).__init__(nofm, sofm, strd=strd) <NEW_LINE> if isinstance(sreg, int): <NEW_LINE> <INDENT> hreg = sreg <NEW_LINE> wreg = sreg <NEW_LINE> <DEDENT> elif len(sreg) == 2: <NEW_LINE> <INDENT> hreg = sreg[0] <NEW_LINE> wreg = sreg[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('LocalRegionLayer: sreg is invalid ({}), ' 'needs to be either one integer or ' 'a pair of integers'.format(sreg)) <NEW_LINE> <DEDENT> if nreg > 1 and (hreg * wreg) > 1: <NEW_LINE> <INDENT> raise ValueError('LocalRegionLayer: local region cannot be a mix ' 'of both n ({}) and h & w ({}, {})' .format(nreg, hreg, wreg)) <NEW_LINE> <DEDENT> self.nreg = nreg <NEW_LINE> self.hreg = hreg <NEW_LINE> self.wreg = wreg <NEW_LINE> self.ntrd = ntrd <NEW_LINE> nifm = self.nofm * self.ntrd <NEW_LINE> hifm = self.hreg + (self.hofm - 1) * self.htrd <NEW_LINE> wifm = self.wreg + (self.wofm - 1) * self.wtrd <NEW_LINE> self.inlayer = Layer(nifm, (hifm, wifm)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def data_loops(): <NEW_LINE> <INDENT> dls = [None] * de.NUM <NEW_LINE> dls[de.FIL] = DataDimLoops() <NEW_LINE> dls[de.IFM] = DataDimLoops(le.OFM, le.BAT) <NEW_LINE> dls[de.OFM] = DataDimLoops(le.OFM, le.BAT) <NEW_LINE> return tuple(dls) <NEW_LINE> <DEDENT> def input_layer(self): <NEW_LINE> <INDENT> return self.inlayer <NEW_LINE> <DEDENT> def ops_per_neuron(self): <NEW_LINE> <INDENT> return self.region_size() <NEW_LINE> <DEDENT> def region_size(self): <NEW_LINE> <INDENT> return self.nreg * self.hreg * self.wreg <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{}({})'.format( self.__class__.__name__, ', '.join([ 'nofm={}'.format(repr(self.nofm)), 'sofm={}'.format(repr((self.hofm, self.wofm))), 'nreg={}'.format(repr(self.nreg)), 'sreg={}'.format(repr((self.hreg, self.wreg))), 'ntrd={}'.format(repr(self.ntrd)), 'strd={}'.format(repr((self.htrd, self.wtrd)))]))
|
NN layer which computes on a local region. The layer has no or limited
shared weights, whose impact can be ignored during scheduling.
Includes pooling layer, normalization layer, and element-wise layer.
|
6259903b1d351010ab8f4d11
|
class Hemorrhage(TimerBaseClass): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.sinkTerm = 0 <NEW_LINE> self.vesselWithSink = 0 <NEW_LINE> self.update(TimeDict) <NEW_LINE> self.start = int(self.Tstart/self.dt) <NEW_LINE> self.end = int(self.Tend/self.dt) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> if self.currentTimeStep == self.start: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.currentTimeStep == self.end: <NEW_LINE> <INDENT> pass
|
class to implement a hemorrhagic event (blood loss)
|
6259903b15baa72349463186
|
class GraphNeuralNetwork_transition(_base.AbstractModule): <NEW_LINE> <INDENT> def __init__(self, edge_model_fn, node_model_fn, name="fwd_gnn"): <NEW_LINE> <INDENT> super(GraphNeuralNetwork_transition, self).__init__(name=name) <NEW_LINE> with self._enter_variable_scope(): <NEW_LINE> <INDENT> self._edge_block = blocks.EdgeBlock( edge_model_fn=edge_model_fn, use_edges=False, use_receiver_nodes=True, use_sender_nodes=True, use_globals=True, name='edge_block') <NEW_LINE> self._node_block = blocks.NodeBlock( node_model_fn=node_model_fn, use_received_edges=True, use_sent_edges=True, use_nodes=True, use_globals=True, received_edges_reducer=tf.math.unsorted_segment_sum, sent_edges_reducer=tf.math.unsorted_segment_sum, name="node_block") <NEW_LINE> <DEDENT> <DEDENT> def _build(self, graph): <NEW_LINE> <INDENT> next_state_graph_representation = self._node_block(self._edge_block(graph)) <NEW_LINE> graph_with_next_state = next_state_graph_representation.replace(globals = None) <NEW_LINE> return graph_with_next_state
|
GNN-based transition function.
|
6259903bd53ae8145f91965b
|
class TestVirtualizationApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = netbox_client.api.virtualization_api.VirtualizationApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_groups_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_groups_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_groups_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_groups_partial_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_groups_read(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_groups_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_types_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_types_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_types_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_types_partial_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_types_read(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_cluster_types_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_clusters_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_clusters_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_clusters_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_clusters_partial_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_clusters_read(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_clusters_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_interfaces_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_interfaces_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_interfaces_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_interfaces_partial_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_interfaces_read(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_interfaces_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_virtual_machines_create(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_virtual_machines_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_virtual_machines_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_virtual_machines_partial_update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_virtual_machines_read(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_virtualization_virtual_machines_update(self): <NEW_LINE> <INDENT> pass
|
VirtualizationApi unit test stubs
|
6259903b91af0d3eaad3b02a
|
class QuizQuestion(models.Model): <NEW_LINE> <INDENT> QTYPE_CHOICES = ( ("SC", _("Single Choice")), ("MC", _("Multiple Choice")), ("DD", _("Drag and Drop")), ("RG", _("Ranking")), ("HS", _("Hotspot")), ) <NEW_LINE> DIFFICULTY_CHOICES = ((1, 1), (2, 2), (3, 3), (4, 4), (5, 5)) <NEW_LINE> qtext = models.CharField( max_length=500, null=True, verbose_name=_("question text")) <NEW_LINE> qtype = models.CharField( max_length=2, choices=QTYPE_CHOICES, null=True, verbose_name=_("question type")) <NEW_LINE> tags = ArrayField( base_field=models.CharField(max_length=200), null=True, help_text= "If you want to add more than one tag, seperate them with commas.") <NEW_LINE> difficulty = models.IntegerField( choices=DIFFICULTY_CHOICES, null=True, verbose_name=_("difficulty"))
|
This Model Defines Questions for the Self-learn Quiz in the GeoMat App.
|
6259903b50485f2cf55dc177
|
class Add(Operator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Add, self).__init__() <NEW_LINE> <DEDENT> def forward(self, a, b): <NEW_LINE> <INDENT> res = singa.__add__(a, b) <NEW_LINE> if training: <NEW_LINE> <INDENT> self.shape0 = list(a.shape()) <NEW_LINE> self.shape1 = list(b.shape()) <NEW_LINE> self.shape3 = list(res.shape()) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def backward(self, dy): <NEW_LINE> <INDENT> dx0, dx1 = dy, dy <NEW_LINE> if (type(dy) == float) or self.shape0 == self.shape1: <NEW_LINE> <INDENT> assert self.shape0 == self.shape1, ('should have same shape') <NEW_LINE> return dx0, dx1 <NEW_LINE> <DEDENT> dx0 = back_broadcast(self.shape3, self.shape0, dx0) <NEW_LINE> dx1 = back_broadcast(self.shape3, self.shape1, dx1) <NEW_LINE> return dx0, dx1
|
Performs element-wise binary addition.
|
6259903bc432627299fa41ef
|
class match(object): <NEW_LINE> <INDENT> def __init__(self, matcher): <NEW_LINE> <INDENT> self.matcher = matcher <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.matcher.matches(other)
|
Allow use of hamcrest matchers in mock.assert_*(..) methods.
Example:
m = Mock()
m('foo')
m.assert_called_with(match(starts_with('f')))
|
6259903b8da39b475be043e5
|
class MarkAmphoraPendingDeleteInDB(BaseDatabaseTask): <NEW_LINE> <INDENT> def execute(self, amphora): <NEW_LINE> <INDENT> LOG.debug("Mark PENDING DELETE in DB for amphora: %s " "with compute id %s", (amphora.id, amphora.compute_id)) <NEW_LINE> self.amphora_repo.update(db_apis.get_session(), amphora.id, status=constants.PENDING_DELETE) <NEW_LINE> <DEDENT> def revert(self, amphora, *args, **kwargs): <NEW_LINE> <INDENT> LOG.warn(_LW("Reverting mark amphora pending delete in DB " "for amp id %(amp)s and compute id %(comp)s"), {'amp': amphora.id, 'comp': amphora.compute_id}) <NEW_LINE> self.amphora_repo.update(db_apis.get_session(), amphora.id, status=constants.ERROR)
|
Mark the amphora pending delete in the DB.
Since sqlalchemy will likely retry by itself always revert if it fails
|
6259903bb57a9660fecd2c71
|
class ServicePlacementPolicy(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'type': {'required': True}, } <NEW_LINE> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> _subtype_map = { 'type': {'InvalidDomain': 'ServicePlacementInvalidDomainPolicy', 'NonPartiallyPlaceService': 'ServicePlacementNonPartiallyPlaceServicePolicy', 'PreferredPrimaryDomain': 'ServicePlacementPreferPrimaryDomainPolicy', 'RequiredDomain': 'ServicePlacementRequiredDomainPolicy', 'RequiredDomainDistribution': 'ServicePlacementRequireDomainDistributionPolicy'} } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ServicePlacementPolicy, self).__init__(**kwargs) <NEW_LINE> self.type = None
|
Describes the policy to be used for placement of a Service Fabric service.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: ServicePlacementInvalidDomainPolicy, ServicePlacementNonPartiallyPlaceServicePolicy, ServicePlacementPreferPrimaryDomainPolicy, ServicePlacementRequiredDomainPolicy, ServicePlacementRequireDomainDistributionPolicy.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of placement policy for a service fabric service. Following are
the possible values.Constant filled by server. Possible values include: "InvalidDomain",
"RequiredDomain", "PreferredPrimaryDomain", "RequiredDomainDistribution",
"NonPartiallyPlaceService".
:type type: str or
~service_fabric_managed_clusters_management_client.models.ServicePlacementPolicyType
|
6259903b21bff66bcd723e5f
|
class YahooTermExtractor(object): <NEW_LINE> <INDENT> __slots__ = ('r', ) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.r = Retrieve( YahooTermExtractor.__name__ ) <NEW_LINE> <DEDENT> def extractTerms(self, content): <NEW_LINE> <INDENT> params = urlencode( {'appid': YAHOO_APP_ID, 'context': content, 'output': 'json' }) <NEW_LINE> result = eval ( self.r.open(YAHOO_TERM_EXTRACTION_URI, params).read() ) <NEW_LINE> return result['ResultSet']['Result']
|
interfaces with yahoo's search service
* Term extraction: extract terms from yahoo search
http://developer.yahoo.com/search/content/V1/termExtraction.html
|
6259903be76e3b2f99fd9c02
|
class RequestHandler(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> pass
|
The class that users should sub-class and provide implementation. Each of
these functions **should** return an instance of the `Response` class
|
6259903b8e05c05ec3f6f756
|
class ServiceregistryEndpointsUpdateRequest(_messages.Message): <NEW_LINE> <INDENT> endpoint = _messages.StringField(1, required=True) <NEW_LINE> endpointResource = _messages.MessageField('Endpoint', 2) <NEW_LINE> project = _messages.StringField(3, required=True)
|
A ServiceregistryEndpointsUpdateRequest object.
Fields:
endpoint: The name of the endpoint for this request.
endpointResource: A Endpoint resource to be passed as the request body.
project: The project ID for this request.
|
6259903b23e79379d538d6f6
|
class GlobalAveragePoolBlock(BaseBlock): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(regexp='gap', **kwargs) <NEW_LINE> <DEDENT> def _handle_parsed_args(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def inverse_code(self) -> str: <NEW_LINE> <INDENT> raise ValueError('Inverse code for global average pool block is not defined.') <NEW_LINE> <DEDENT> def apply(self, x: tf.Tensor) -> tf.Tensor: <NEW_LINE> <INDENT> ndim = len(x.shape) <NEW_LINE> return tf.reduce_mean(x, axis=[ndim-3, ndim-2])
|
Global average pooling block effectively flattening spatial dimensions of the input feature maps.
.. warning::
Expects ?HWC data format (e.g. BHWC or BTHWC).
|
6259903bd4950a0f3b11173b
|
class StripeResourceMixin: <NEW_LINE> <INDENT> def ensure_stripe_resource(self, resource, attrs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> instance = resource.retrieve(attrs['id']) <NEW_LINE> <DEDENT> except (KeyError, InvalidRequestError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del attrs['id'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return resource.create(**attrs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for (key, val) in list(attrs.items()): <NEW_LINE> <INDENT> setattr(instance, key, val) <NEW_LINE> <DEDENT> instance.save() <NEW_LINE> return instance <NEW_LINE> <DEDENT> <DEDENT> def get_customer_kwargs(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_customer(self): <NEW_LINE> <INDENT> return self.ensure_stripe_resource( resource=Customer, attrs=self.get_customer_kwargs(), ) <NEW_LINE> <DEDENT> def get_subscription_kwargs(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_subscription(self): <NEW_LINE> <INDENT> customer = self.get_customer() <NEW_LINE> return self.ensure_stripe_resource( resource=customer.subscriptions, attrs=self.get_subscription_kwargs(), ) <NEW_LINE> <DEDENT> def get_charge_kwargs(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_charge(self): <NEW_LINE> <INDENT> return self.ensure_stripe_resource( resource=Charge, attrs=self.get_charge_kwargs(), )
|
Stripe actions for resources, available as a Form mixin class.
|
6259903b711fe17d825e1597
|
@Attention.register("dot_product") <NEW_LINE> class DotProductAttention(Attention): <NEW_LINE> <INDENT> @overrides <NEW_LINE> def _forward_internal(self, vector: torch.Tensor, matrix: torch.Tensor) -> torch.Tensor: <NEW_LINE> <INDENT> return matrix.bmm(vector.unsqueeze(-1)).squeeze(-1) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_params(cls, params: Params): <NEW_LINE> <INDENT> normalize = params.pop_bool('normalize', True) <NEW_LINE> params.assert_empty(cls.__name__) <NEW_LINE> return DotProductAttention(normalize)
|
Computes attention between a vector and a matrix using dot product.
|
6259903b287bf620b6272de1
|
class Enrollment(Base): <NEW_LINE> <INDENT> __tablename__ = 'enrollments' <NEW_LINE> id = sa.Column(sa.Integer, sa.Sequence('enrollement_id_seq'), primary_key=True) <NEW_LINE> student_id = sa.Column(sa.Integer, sa.ForeignKey(Student.id)) <NEW_LINE> class_instance_id = sa.Column(sa.Integer, sa.ForeignKey(ClassInstance.id)) <NEW_LINE> attempt = sa.Column(sa.SMALLINT) <NEW_LINE> student_year = sa.Column(sa.Integer) <NEW_LINE> statutes = sa.Column(sa.String(20)) <NEW_LINE> observation = sa.Column(sa.String(30)) <NEW_LINE> attendance = sa.Column(sa.Boolean, nullable=True, default=None) <NEW_LINE> attendance_date = sa.Column(sa.Date, nullable=True, default=None) <NEW_LINE> improved = sa.Column(sa.Boolean, nullable=True, default=None) <NEW_LINE> improvement_grade = sa.Column(sa.SmallInteger, default=0) <NEW_LINE> improvement_grade_date = sa.Column(sa.Date, nullable=True, default=None) <NEW_LINE> continuous_grade = sa.Column(sa.SmallInteger, default=0) <NEW_LINE> continuous_grade_date = sa.Column(sa.Date, nullable=True, default=None) <NEW_LINE> exam_grade = sa.Column(sa.SmallInteger, default=0) <NEW_LINE> exam_grade_date = sa.Column(sa.Date, nullable=True, default=None) <NEW_LINE> special_grade = sa.Column(sa.SmallInteger, default=0) <NEW_LINE> special_grade_date = sa.Column(sa.Date, nullable=True, default=None) <NEW_LINE> approved = sa.Column(sa.Boolean, nullable=True, default=None) <NEW_LINE> student = orm.relationship("Student", back_populates="enrollments") <NEW_LINE> class_instance = orm.relationship("ClassInstance", back_populates="enrollments") <NEW_LINE> __table_args__ = (sa.UniqueConstraint('student_id', 'class_instance_id', name='un_enrollment'),) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{} enrolled to {}, attempt:{}, student year:{}, statutes:{}, obs:{}".format( self.student, self.class_instance, self.attempt, self.student_year, self.statutes, self.observation) <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'student': self.student_id, 'class_instance': self.class_instance_id, 'attempt': self.attempt, 'student_year': self.student_year, 'statutes': self.statutes, 'attendance': self.attendance, 'attendance_date': None if self.attendance_date is None else self.attendance_date.isoformat(), 'improved': self.improved, 'improvement_grade': self.improvement_grade, 'improvement_grade_date': None if self.improvement_grade_date is None else self.improvement_grade_date.isoformat(), 'continuous_grade': self.continuous_grade, 'continuous_grade_date': None if self.continuous_grade_date is None else self.continuous_grade_date.isoformat(), 'exam_grade': self.exam_grade, 'exam_grade_date': self.exam_grade_date, 'special_grade': self.special_grade, 'special_grade_date': None if self.special_grade_date is None else self.special_grade_date.isoformat(), 'approved': self.approved}
|
An enrollment is a :py:class:`Student` to :py:class:`ClassInstance` relationship
|
6259903bbe383301e0254a0e
|
class VideoInfoLectureCommentAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('message', 'register_date', 'author',) <NEW_LINE> list_per_page = 30 <NEW_LINE> list_filter = ('author',) <NEW_LINE> search_fields = ('message',) <NEW_LINE> date_hierarchy = 'register_date' <NEW_LINE> readonly_fields = ('message', 'ascription', 'register_date', 'author') <NEW_LINE> def has_add_permission(self, request): <NEW_LINE> <INDENT> return False
|
视频区评论
|
6259903b4e696a045264e71d
|
class User(Base): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> first_name = Column(Unicode(255), nullable=False) <NEW_LINE> last_name = Column(Unicode(255), nullable=False) <NEW_LINE> email = Column(Unicode(255), nullable=False, unique=True) <NEW_LINE> password = Column(Unicode(255), nullable=False) <NEW_LINE> sign_up_date = Column(Date()) <NEW_LINE> city = Column(Unicode(255), nullable=False) <NEW_LINE> state = Column(Unicode(5), nullable=False) <NEW_LINE> note = Column(UnicodeText()) <NEW_LINE> searches = relationship('Search') <NEW_LINE> def verify_credential(self, email, password): <NEW_LINE> <INDENT> is_authenticated = False <NEW_LINE> if self.email == email: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> is_authenticated = pwd_context.verify( password, self.password ) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return is_authenticated
|
User schema.
|
6259903b0fa83653e46f60d2
|
class QueuePropertiesPaged(AsyncPageIterator): <NEW_LINE> <INDENT> def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None): <NEW_LINE> <INDENT> super(QueuePropertiesPaged, self).__init__( self._get_next_cb, self._extract_data_cb, continuation_token=continuation_token or "" ) <NEW_LINE> self._command = command <NEW_LINE> self.service_endpoint = None <NEW_LINE> self.prefix = prefix <NEW_LINE> self.marker = None <NEW_LINE> self.results_per_page = results_per_page <NEW_LINE> self.location_mode = None <NEW_LINE> <DEDENT> async def _get_next_cb(self, continuation_token): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return await self._command( marker=continuation_token or None, maxresults=self.results_per_page, cls=return_context_and_deserialized, use_location=self.location_mode) <NEW_LINE> <DEDENT> except HttpResponseError as error: <NEW_LINE> <INDENT> process_storage_error(error) <NEW_LINE> <DEDENT> <DEDENT> async def _extract_data_cb(self, get_next_return): <NEW_LINE> <INDENT> self.location_mode, self._response = get_next_return <NEW_LINE> self.service_endpoint = self._response.service_endpoint <NEW_LINE> self.prefix = self._response.prefix <NEW_LINE> self.marker = self._response.marker <NEW_LINE> self.results_per_page = self._response.max_results <NEW_LINE> props_list = [QueueProperties._from_generated(q) for q in self._response.queue_items] <NEW_LINE> return self._response.next_marker or None, props_list
|
An iterable of Queue properties.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A queue name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str next_marker: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:param callable command: Function to retrieve the next page of items.
:param str prefix: Filters the results to return only queues whose names
begin with the specified prefix.
:param int results_per_page: The maximum number of queue names to retrieve per
call.
:param str continuation_token: An opaque continuation token.
|
6259903b30c21e258be99a04
|
class ModelFrame: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass
|
Downsampled representation of a single Frame, produced by the model.
|
6259903b76d4e153a661db6f
|
class List(Base): <NEW_LINE> <INDENT> def __init__(self, value=None, *args, **kwargs): <NEW_LINE> <INDENT> from casper.lib.controls import controls <NEW_LINE> if "content" not in kwargs: <NEW_LINE> <INDENT> raise ValueError("A 'content' argument describing the 'List' " "content is expected.") <NEW_LINE> <DEDENT> inner_desc = kwargs["content"].split("_") <NEW_LINE> control_type = inner_desc[0] <NEW_LINE> inner_kwargs = {"inner": True} <NEW_LINE> if len(inner_desc) > 1: <NEW_LINE> <INDENT> inner_kwargs["content"] = "_".join(inner_desc[1:]) <NEW_LINE> <DEDENT> if control_type not in controls: <NEW_LINE> <INDENT> raise ValueError("List creation: '{0}' is not a valid inner " "control type. Allowed types are {1}.".format( kwargs["content"], controls.keys())) <NEW_LINE> <DEDENT> self.inner_control = controls[control_type](**inner_kwargs) <NEW_LINE> Base.__init__(self, value, *args, **kwargs) <NEW_LINE> self.iterable = True <NEW_LINE> <DEDENT> def _is_valid(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif isinstance(value, list): <NEW_LINE> <INDENT> for item in value: <NEW_LINE> <INDENT> if not self.inner_control._is_valid(item): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
|
Define a list parameter.
|
6259903bcad5886f8bdc5978
|
class l10n_br_base_city(orm.Model): <NEW_LINE> <INDENT> _name = 'l10n_br_base.city' <NEW_LINE> _description = u'Municipio' <NEW_LINE> _columns = { 'name': fields.char('Nome', size=64, required=True), 'state_id': fields.many2one('res.country.state', 'Estado', required=True), 'ibge_code': fields.char('Codigo IBGE', size=7) }
|
Este objeto persite todos os municípios relacionado a um estado.
No Brasil é necesário em alguns documentos fiscais informar o código
do IBGE dos município envolvidos da transação.
|
6259903b73bcbd0ca4bcb481
|
class ImageServerWindow(Window): <NEW_LINE> <INDENT> flipVertical = False <NEW_LINE> gui = False <NEW_LINE> imageWinName = "Image Server Window" <NEW_LINE> def __init__(self, port=ImageServer.default_port, start_server=True, *args, **kwargs): <NEW_LINE> <INDENT> self.logger = logging.getLogger(self.__class__.__name__) <NEW_LINE> self.rect = None <NEW_LINE> self.server = ImageServer(port=port, start_server=start_server) <NEW_LINE> if self.gui and not kwargs.get('fullscr', True): <NEW_LINE> <INDENT> cv2.namedWindow(self.imageWinName) <NEW_LINE> cv2.waitKey(1) <NEW_LINE> <DEDENT> Window.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def flip(self, clearBuffer=True): <NEW_LINE> <INDENT> Window.flip(self, clearBuffer=False) <NEW_LINE> self.updatePygletImage() <NEW_LINE> if clearBuffer: <NEW_LINE> <INDENT> Window.clearBuffer(self) <NEW_LINE> <DEDENT> <DEDENT> def setRect(self, rect): <NEW_LINE> <INDENT> if rect is not None and len(rect) == 4: <NEW_LINE> <INDENT> self.rect = rect <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.error("Invalid selection rect: {}".format(rect)) <NEW_LINE> <DEDENT> <DEDENT> def updatePygletImage(self): <NEW_LINE> <INDENT> rawImage = pyglet.image.get_buffer_manager().get_color_buffer().get_image_data() <NEW_LINE> if self.rect is not None: <NEW_LINE> <INDENT> rawImage = rawImage.get_region(*self.rect) <NEW_LINE> <DEDENT> imageBytes = rawImage.get_data(rawImage.format, rawImage.pitch) <NEW_LINE> imageRGBA = np.ndarray(shape=(rawImage.height, rawImage.width, len(rawImage.format)), buffer=imageBytes, dtype=np.uint8, strides=(rawImage.pitch, len(rawImage.format), 1)) <NEW_LINE> imageR, imageG, imageB, _ = cv2.split(imageRGBA) <NEW_LINE> imageBGR = cv2.merge((imageB, imageG, imageR)) <NEW_LINE> if self.flipVertical: <NEW_LINE> <INDENT> imageBGR = cv2.flip(imageBGR, 0) <NEW_LINE> <DEDENT> self.server.write(imageBGR) <NEW_LINE> if self.gui and not self._isFullScr: <NEW_LINE> <INDENT> cv2.imshow(self.imageWinName, imageBGR) <NEW_LINE> cv2.waitKey(1) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.server.stop() <NEW_LINE> Window.close(self)
|
A variant of psychopy.visual.Window (running on pyglet) that automatically posts screen images over RPC.
Usage:
# Create an ImageServerWindow (drop-in replacement for psychopy.visual.Window)
from lumos.output import ImageServerWindow
win = ImageServerWindow(size=(800, 600), fullscr=True, screen=0, allowGUI=False, allowStencil=False,
monitor=u'testMonitor', color=u'black', colorSpace=u'rgb')
...
win.flip() # must call this when each frame is complete - augments original flip(), copies rendered frame for serving
...
# You can also specify a rectangular region to serve
win.setRect((int(win.size[0] / 4), int(win.size[1] / 4), int(win.size[0] / 2), int(win.size[1] / 2))) # (x, y, w, h)
# It's a good idea to close the window when done; this cleanly releases backend server thread
win.close()
|
6259903b66673b3332c315ee
|
class selectRow(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.editable = True <NEW_LINE> self.enabled = True <NEW_LINE> self.items = range(1, 38) <NEW_LINE> self.dropdownWidth = 'WWWWW' <NEW_LINE> self.width = 'WWW' <NEW_LINE> self.value = "" <NEW_LINE> <DEDENT> def onSelChange(self, selection): <NEW_LINE> <INDENT> loadCode .disableLoad() <NEW_LINE> <DEDENT> def onEditChange(self, text): <NEW_LINE> <INDENT> loadCode .disableLoad() <NEW_LINE> <DEDENT> def onFocus(self, focused): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def onEnter(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> pass
|
Implementation for addin_addin.getRow (ComboBox)
|
6259903bb57a9660fecd2c73
|
class _JSONEditor ( UIEditor ): <NEW_LINE> <INDENT> root = List <NEW_LINE> view = View( UItem( 'root', editor = GridEditor( adapter = JSONAdapter, operations = [] ) ) ) <NEW_LINE> def init_ui ( self, parent ): <NEW_LINE> <INDENT> return self.edit_facets( parent = parent, kind = 'editor' ) <NEW_LINE> <DEDENT> def update_editor ( self ): <NEW_LINE> <INDENT> value = self.value <NEW_LINE> if isinstance( value, basestring ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = loads( value ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.root = [ item_for( 'JSON', value ) ]
|
Defines the implementation of the editor class for viewing the contents
of a Python JSON object.
|
6259903b07d97122c4217e96
|
class TestPruferSequence(object): <NEW_LINE> <INDENT> def test_nontree(self): <NEW_LINE> <INDENT> with pytest.raises(nx.NotATree): <NEW_LINE> <INDENT> G = nx.cycle_graph(3) <NEW_LINE> nx.to_prufer_sequence(G) <NEW_LINE> <DEDENT> <DEDENT> def test_null_graph(self): <NEW_LINE> <INDENT> with pytest.raises(nx.NetworkXPointlessConcept): <NEW_LINE> <INDENT> nx.to_prufer_sequence(nx.null_graph()) <NEW_LINE> <DEDENT> <DEDENT> def test_trivial_graph(self): <NEW_LINE> <INDENT> with pytest.raises(nx.NetworkXPointlessConcept): <NEW_LINE> <INDENT> nx.to_prufer_sequence(nx.trivial_graph()) <NEW_LINE> <DEDENT> <DEDENT> def test_bad_integer_labels(self): <NEW_LINE> <INDENT> with pytest.raises(KeyError): <NEW_LINE> <INDENT> T = nx.Graph(nx.utils.pairwise('abc')) <NEW_LINE> nx.to_prufer_sequence(T) <NEW_LINE> <DEDENT> <DEDENT> def test_encoding(self): <NEW_LINE> <INDENT> tree = nx.Graph([(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)]) <NEW_LINE> sequence = nx.to_prufer_sequence(tree) <NEW_LINE> assert sequence == [3, 3, 3, 4] <NEW_LINE> <DEDENT> def test_decoding(self): <NEW_LINE> <INDENT> sequence = [3, 3, 3, 4] <NEW_LINE> tree = nx.from_prufer_sequence(sequence) <NEW_LINE> assert_nodes_equal(list(tree), list(range(6))) <NEW_LINE> edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)] <NEW_LINE> assert_edges_equal(list(tree.edges()), edges) <NEW_LINE> <DEDENT> def test_decoding2(self): <NEW_LINE> <INDENT> sequence = [2, 4, 0, 1, 3, 3] <NEW_LINE> tree = nx.from_prufer_sequence(sequence) <NEW_LINE> assert_nodes_equal(list(tree), list(range(8))) <NEW_LINE> edges = [(0, 1), (0, 4), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7)] <NEW_LINE> assert_edges_equal(list(tree.edges()), edges) <NEW_LINE> <DEDENT> def test_inverse(self): <NEW_LINE> <INDENT> for T in nx.nonisomorphic_trees(4): <NEW_LINE> <INDENT> T2 = nx.from_prufer_sequence(nx.to_prufer_sequence(T)) <NEW_LINE> assert_nodes_equal(list(T), list(T2)) <NEW_LINE> assert_edges_equal(list(T.edges()), list(T2.edges())) <NEW_LINE> <DEDENT> for seq in product(range(4), repeat=2): <NEW_LINE> <INDENT> seq2 = nx.to_prufer_sequence(nx.from_prufer_sequence(seq)) <NEW_LINE> assert list(seq) == seq2
|
Unit tests for the Prüfer sequence encoding and decoding
functions.
|
6259903b73bcbd0ca4bcb482
|
class KernelCullingTest(NotebookTestBase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_argv(cls): <NEW_LINE> <INDENT> argv = super(KernelCullingTest, cls).get_argv() <NEW_LINE> argv.extend(['--MappingKernelManager.cull_idle_timeout=2', '--MappingKernelManager.cull_interval=1', '--MappingKernelManager.cull_connected=False']) <NEW_LINE> return argv <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.kern_api = KernelAPI(self.request, base_url=self.base_url(), headers=self.auth_headers(), ) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> for k in self.kern_api.list().json(): <NEW_LINE> <INDENT> self.kern_api.shutdown(k['id']) <NEW_LINE> <DEDENT> <DEDENT> def test_culling(self): <NEW_LINE> <INDENT> kid = self.kern_api.start().json()['id'] <NEW_LINE> ws = self.kern_api.websocket(kid) <NEW_LINE> model = self.kern_api.get(kid).json() <NEW_LINE> self.assertEqual(model['connections'], 1) <NEW_LINE> assert not self.get_cull_status(kid) <NEW_LINE> ws.close() <NEW_LINE> assert self.get_cull_status(kid) <NEW_LINE> <DEDENT> def get_cull_status(self, kid): <NEW_LINE> <INDENT> culled = False <NEW_LINE> for i in range(15): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.kern_api.get(kid) <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> assert e.response.status_code == 404 <NEW_LINE> culled = True <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(0.2) <NEW_LINE> <DEDENT> <DEDENT> return culled
|
Test kernel culling
|
6259903bd4950a0f3b11173c
|
class Model(nn.Module): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, args): <NEW_LINE> <INDENT> super(Model, self).__init__() <NEW_LINE> self.l2_norm = args.l2_norm <NEW_LINE> self.Ci = 2 if args.multichannel else 1 <NEW_LINE> self.Co = args.feature_maps <NEW_LINE> self.embeddings = nn.ModuleList([nn.embedding(args.num_embeddings, args.embedding_dim) for i in range(self.Ci)]) <NEW_LINE> if args.multichannel: <NEW_LINE> <INDENT> self.embeddings[-1].weight.requires_grad = False <NEW_LINE> <DEDENT> self.conv_2ds = nn.ModuleList([nn.Conv2d(self.Ci, self.Co, (k, args.embedding_dim)) for k in args.filter_windows]) <NEW_LINE> self.conv_2ds_mc = nn.ModuleList([nn.Conv2d(self.Ci*2, self.Co, (k, args.embedding_dim)) for k in args.filter_windows]) <NEW_LINE> self.maxpool1d = nn.ModuleList([nn.MaxPool1d(args.embedding_dim-k+1) for k in args.filter_windows]) <NEW_LINE> self.dropout = nn.Dropout(args.dropout) <NEW_LINE> self.linear = nn.Linear(self.Co * len(args.filter_windows), args.classes) <NEW_LINE> self.forward = self.forward_multi if args.multichannel else self.forward_static <NEW_LINE> <DEDENT> def forward_static(self, x): <NEW_LINE> <INDENT> embeds = self.embeddings[0](x) <NEW_LINE> fmaps = [torch.squeeze(F.relu(layer(embeds)), 3) for layer in self.conv_2ds] <NEW_LINE> feas = torch.cat([torch.squeeze(self.maxpool1d[i](fmaps[i])) for i in range(self.conv_2ds)], 1) <NEW_LINE> feas = self.dropout(feas) <NEW_LINE> outputs = self.linear(feas) <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def forward_multi(self, x): <NEW_LINE> <INDENT> embeds = [layer(x) for layer in self.embeddings] <NEW_LINE> inputs = torch.cat(embeds, 1) <NEW_LINE> fmaps = [torch.squeeze(F.relu(layer(embeds)), 3) for layer in self.conv_2ds_mc] <NEW_LINE> feas = torch.cat([torch.squeeze(self.maxpool1d[i](fmaps[i])) for i in range(self.conv_2ds)], 1) <NEW_LINE> feas = self.dropout(feas) <NEW_LINE> outputs = self.linear(feas) <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def initialize_embedding(self, embedding_matrix): <NEW_LINE> <INDENT> for i, layer in enumerate(self.embeddings): <NEW_LINE> <INDENT> self.embeddings[i].weight.data.copy_(torch.from_numpy(embedding_matrix)) <NEW_LINE> <DEDENT> if self.Ci == 2: <NEW_LINE> <INDENT> self.embeddings[-1].weight.requires_grad = False
|
Implementing "Convolutional Neural Networks for Sentence Classification"
|
6259903b63f4b57ef0086670
|
class TestUnitManager(unittest.TestCase): <NEW_LINE> <INDENT> def test_small(self): <NEW_LINE> <INDENT> value = 5 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '5') <NEW_LINE> <DEDENT> def test_large(self): <NEW_LINE> <INDENT> value = 12345. <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '12.3k') <NEW_LINE> <DEDENT> def test_negative(self): <NEW_LINE> <INDENT> value = -3 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '-3') <NEW_LINE> <DEDENT> def test_verysmall(self): <NEW_LINE> <INDENT> value = -0.0007 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '-700µ') <NEW_LINE> <DEDENT> def test_fraction(self): <NEW_LINE> <INDENT> value = 0.07 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '70m') <NEW_LINE> <DEDENT> def test_digits(self): <NEW_LINE> <INDENT> value = 12345. <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '12.3k') <NEW_LINE> <DEDENT> def test_digits2(self): <NEW_LINE> <INDENT> value = 0.29 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '290m') <NEW_LINE> <DEDENT> def test_digits3(self): <NEW_LINE> <INDENT> value = 201186 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '201k') <NEW_LINE> <DEDENT> def test_digits4(self): <NEW_LINE> <INDENT> value = -201 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '-201') <NEW_LINE> <DEDENT> def test_digits5(self): <NEW_LINE> <INDENT> value = -4242.2 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '-4.24k') <NEW_LINE> <DEDENT> def test_digits6(self): <NEW_LINE> <INDENT> value = 4242.2 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '4.24k') <NEW_LINE> <DEDENT> def test_digits7(self): <NEW_LINE> <INDENT> value = 301686.572889 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '301k') <NEW_LINE> <DEDENT> def test_digits8(self): <NEW_LINE> <INDENT> value = -30168.572889 <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result == '-30.1k') <NEW_LINE> <DEDENT> def test_None(self): <NEW_LINE> <INDENT> value = None <NEW_LINE> result = convert_with_unit(value) <NEW_LINE> assert(result is None)
|
Conversion utilisant les puissances d'une unité.
|
6259903b1f5feb6acb163dec
|
class CloudFilesUKStorageDriver(CloudFilesStorageDriver): <NEW_LINE> <INDENT> type = Provider.CLOUDFILES_UK <NEW_LINE> name = 'CloudFiles (UK)' <NEW_LINE> _region = 'lon'
|
Cloudfiles storage driver for the UK endpoint.
|
6259903b23849d37ff8522b2
|
class Printer: <NEW_LINE> <INDENT> def __init__(self, file=sys.stderr, header=HEADER, verbose_level=None, debug=None): <NEW_LINE> <INDENT> verbose_level = get_verbose_level(verbose_level) <NEW_LINE> debug = get_debug(debug) <NEW_LINE> self._file = file <NEW_LINE> self._header = header <NEW_LINE> self._prev_line = None <NEW_LINE> self._verbose_level = verbose_level <NEW_LINE> self._debug = debug <NEW_LINE> self._enabled = self._verbose_level or self._debug <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, tback): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> if self._enabled: <NEW_LINE> <INDENT> if self._prev_line and not self._debug: <NEW_LINE> <INDENT> self._file.write('\r' + (' ' * len(self._prev_line)) + '\r') <NEW_LINE> self._file.flush() <NEW_LINE> <DEDENT> self._prev_line = None <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, text, debug=None): <NEW_LINE> <INDENT> if self._enabled: <NEW_LINE> <INDENT> for text_line in text.split('\n'): <NEW_LINE> <INDENT> if debug is None: <NEW_LINE> <INDENT> debug = self._debug <NEW_LINE> <DEDENT> line = self._header + text_line <NEW_LINE> if debug: <NEW_LINE> <INDENT> self._file.write(line + '\n') <NEW_LINE> self._prev_line = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.clear() <NEW_LINE> self._file.write(line) <NEW_LINE> self._prev_line = line <NEW_LINE> <DEDENT> self._file.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run_command(self, cmdline, *args, raising=True, **kwargs): <NEW_LINE> <INDENT> debug = self._debug <NEW_LINE> verbose_level = self._verbose_level <NEW_LINE> result = subprocess.run(cmdline, *args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=False, **kwargs) <NEW_LINE> clist = [cmdline[0]] + [shlex.quote(arg) for arg in cmdline[1:]] <NEW_LINE> cmd = " ".join(clist) <NEW_LINE> kwargs = {} <NEW_LINE> if debug: <NEW_LINE> <INDENT> self("$ " + cmd) <NEW_LINE> if verbose_level > 2 or result.returncode: <NEW_LINE> <INDENT> self(str(result.stdout, 'utf-8'), **kwargs) <NEW_LINE> <DEDENT> <DEDENT> if result.returncode and raising: <NEW_LINE> <INDENT> raise BoxError("command {} failed [{}]".format(cmd, result.returncode)) <NEW_LINE> <DEDENT> return result.returncode
|
The printer context manager
|
6259903b23e79379d538d6f9
|
class Diagnosis(Enum): <NEW_LINE> <INDENT> Undiagnosed = 'undiag' <NEW_LINE> Diagnosed = 'diag'
|
Agent's diagnosis status indicates
whether the diagnosis of SARS-CoV-2/COVID-19 infection was made or not
|
6259903b30dc7b76659a0a2b
|
class GetCategoryBox: <NEW_LINE> <INDENT> def __init__(self, category, img_path): <NEW_LINE> <INDENT> self.category = category <NEW_LINE> self.img_path = img_path <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_main_box(boxes): <NEW_LINE> <INDENT> if len(boxes) > 1: <NEW_LINE> <INDENT> x1 = boxes[:, 0] <NEW_LINE> x2 = boxes[:, 1] <NEW_LINE> y1 = boxes[:, 2] <NEW_LINE> y2 = boxes[:, 3] <NEW_LINE> area = (x2 - x1) * (y2 - y1) <NEW_LINE> main_box = boxes[np.argmax(area)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> main_box = np.squeeze(boxes) <NEW_LINE> <DEDENT> return main_box <NEW_LINE> <DEDENT> def __call__(self, batch_boxes, batch_names): <NEW_LINE> <INDENT> main_boxes = [] <NEW_LINE> images = [] <NEW_LINE> for boxes, name in zip(batch_boxes, batch_names): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> category_boxes = np.array(boxes[self.category]['boxes']) <NEW_LINE> main_box = self.__class__.get_main_box(category_boxes) <NEW_LINE> main_boxes.append(main_box) <NEW_LINE> images.append(name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> logger = logging.getLogger(__name__) <NEW_LINE> logger.error(f"{name}: {self.category} not found") <NEW_LINE> os.remove(os.path.join(self.img_path, name)) <NEW_LINE> <DEDENT> <DEDENT> return main_boxes, images
|
Gets the biggest box from given category
|
6259903b76d4e153a661db70
|
class SettingsCommand(object): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description="ptah settings management") <NEW_LINE> parser.add_argument('-a', '--all', action="store_true", dest='all', help='List all registered settings') <NEW_LINE> parser.add_argument('-l', '--list', dest='section', default='', help='List registered settings') <NEW_LINE> parser.add_argument('-p', '--print', action="store_true", dest='printcfg', help='Print default settings in ConfigParser format') <NEW_LINE> def __init__(self, args): <NEW_LINE> <INDENT> self.options = args <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if self.options.printcfg: <NEW_LINE> <INDENT> data = config.get_cfg_storage(SETTINGS_OB_ID).export(True) <NEW_LINE> parser = configparser.ConfigParser(dict_type=OrderedDict) <NEW_LINE> for key, val in sorted(data.items()): <NEW_LINE> <INDENT> parser.set(configparser.DEFAULTSECT, key, val.replace('%', '%%')) <NEW_LINE> <DEDENT> fp = NativeIO() <NEW_LINE> try: <NEW_LINE> <INDENT> parser.write(fp) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> print (fp.getvalue()) <NEW_LINE> return <NEW_LINE> <DEDENT> if self.options.all: <NEW_LINE> <INDENT> section = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> section = self.options.section <NEW_LINE> <DEDENT> groups = sorted(config.get_cfg_storage(ID_SETTINGS_GROUP).items()) <NEW_LINE> for name, group in groups: <NEW_LINE> <INDENT> if section and name != section: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> print ('') <NEW_LINE> title = group.__title__ or name <NEW_LINE> print (grpTitleWrap.fill(title)) <NEW_LINE> if group.__description__: <NEW_LINE> <INDENT> print (grpDescriptionWrap.fill( group.__description__)) <NEW_LINE> <DEDENT> print ('') <NEW_LINE> for node in group.__fields__.values(): <NEW_LINE> <INDENT> default = '<required>' if node.required else node.default <NEW_LINE> print (nameWrap.fill( ('%s.%s: %s (%s: %s)' % ( name, node.name, node.title, node.__class__.__name__, default)))) <NEW_LINE> print (nameTitleWrap.fill(node.description)) <NEW_LINE> print ('')
|
'settings' command
|
6259903b91af0d3eaad3b02e
|
class PlayList(QFrame): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(PlayList, self).__init__() <NEW_LINE> self.parent = parent <NEW_LINE> self.setParent(self.parent.parent) <NEW_LINE> self.setObjectName("PlayList") <NEW_LINE> self.musicList = [] <NEW_LINE> self.currentRow = -1 <NEW_LINE> self.allRow = 0 <NEW_LINE> self.itemColor = QBrush(QColor.fromRgb(95,95,99)) <NEW_LINE> with open('QSS/playList.qss', 'r') as f: <NEW_LINE> <INDENT> self.setStyleSheet(f.read()) <NEW_LINE> <DEDENT> self.resize(574, 477) <NEW_LINE> self.hide() <NEW_LINE> self.setButtons() <NEW_LINE> self.setLabels() <NEW_LINE> self.setTables() <NEW_LINE> self.setLayouts() <NEW_LINE> <DEDENT> def setButtons(self): <NEW_LINE> <INDENT> self.closeButton = QPushButton("×", self) <NEW_LINE> self.closeButton.setObjectName("closeButton") <NEW_LINE> self.closeButton.clicked.connect(self.hide) <NEW_LINE> <DEDENT> def setLabels(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setTables(self): <NEW_LINE> <INDENT> self.playList = _TableWidget(self) <NEW_LINE> self.playList.setMinimumWidth(self.width()) <NEW_LINE> self.playList.setColumnCount(3) <NEW_LINE> self.playList.setColumnWidth(0, self.width()/3*2) <NEW_LINE> self.playList.horizontalHeader().setVisible(False) <NEW_LINE> self.playList.horizontalHeader().setStretchLastSection(True) <NEW_LINE> self.playList.verticalHeader().setVisible(False) <NEW_LINE> self.playList.setShowGrid(False) <NEW_LINE> self.playList.setAlternatingRowColors(True) <NEW_LINE> self.playList.setEditTriggers(QAbstractItemView.NoEditTriggers) <NEW_LINE> self.playList.setSelectionBehavior(QAbstractItemView.SelectRows) <NEW_LINE> self.playList.itemDoubleClicked.connect(self.play) <NEW_LINE> <DEDENT> def play(self): <NEW_LINE> <INDENT> if self.currentRow == self.playList.currentRow(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> currentRow = self.playList.currentRow() <NEW_LINE> currentMusic = self.musicList[currentRow] <NEW_LINE> self.parent.player.setIndex(currentRow) <NEW_LINE> self.parent.currentMusic.setShortInfo(currentMusic['name'], currentMusic['author']) <NEW_LINE> self.currentRow = currentRow <NEW_LINE> <DEDENT> <DEDENT> def setLayouts(self): <NEW_LINE> <INDENT> self.mainLayout = QVBoxLayout() <NEW_LINE> self.headerLayout = QHBoxLayout() <NEW_LINE> self.headerLayout.addStretch(1) <NEW_LINE> self.headerLayout.addWidget(self.closeButton) <NEW_LINE> self.mainLayout.addLayout(self.headerLayout) <NEW_LINE> self.mainLayout.addWidget(self.playList) <NEW_LINE> self.setLayout(self.mainLayout) <NEW_LINE> <DEDENT> def addMusic(self, data): <NEW_LINE> <INDENT> self.musicList.append(data) <NEW_LINE> <DEDENT> def addMusics(self, mlist): <NEW_LINE> <INDENT> self.musicList.extend(mlist) <NEW_LINE> <DEDENT> def addPlayList(self, name, author, time): <NEW_LINE> <INDENT> self.playList.setRowCount(self.allRow+1) <NEW_LINE> musicName = QTableWidgetItem(name) <NEW_LINE> self.playList.setItem(self.allRow, 0, musicName) <NEW_LINE> musicAuthor = QTableWidgetItem(author) <NEW_LINE> musicAuthor.setForeground(self.itemColor) <NEW_LINE> self.playList.setItem(self.allRow, 1, musicAuthor) <NEW_LINE> musicTime = QTableWidgetItem(time) <NEW_LINE> musicTime.setForeground(self.itemColor) <NEW_LINE> self.playList.setItem(self.allRow, 2, musicTime) <NEW_LINE> self.allRow += 1
|
播放列表。
|
6259903b50485f2cf55dc17b
|
class RelatedTo(Relation): <NEW_LINE> <INDENT> relation_name = 'related_to'
|
related_to relation.
|
6259903bcad5886f8bdc5979
|
class TestPcaEval(unittest.TestCase): <NEW_LINE> <INDENT> def test_pca_plot(self): <NEW_LINE> <INDENT> features_1 = np.random.normal(size=100) <NEW_LINE> features_1 = features_1.reshape(len(features_1), 1) <NEW_LINE> features = features_1 <NEW_LINE> for i in range(2, 11): <NEW_LINE> <INDENT> features = np.concatenate((features, features_1**i), axis=1) <NEW_LINE> <DEDENT> features_2 = np.random.normal(loc=1, scale=0.1, size=100) <NEW_LINE> features_2 = features_2.reshape(len(features_2), 1) <NEW_LINE> for i in range(1, 11): <NEW_LINE> <INDENT> features = np.concatenate((features, features_2**i), axis=1) <NEW_LINE> <DEDENT> features_3 = np.random.normal(loc=2, scale=1.5, size=100) <NEW_LINE> features_3 = features_3.reshape(len(features_3), 1) <NEW_LINE> for i in range(1, 11): <NEW_LINE> <INDENT> features = np.concatenate((features, features_3**i), axis=1) <NEW_LINE> <DEDENT> labels = np.random.randint(0, 2, size=100) <NEW_LINE> train_features, test_features, train_labels, test_labels = train_test_split(features, labels, random_state=0) <NEW_LINE> optimal_dimensions = pca_evaluate.pca_viz_and_opt_dimensions( train_features, train_labels, test_features, test_labels, plot_figure=False) <NEW_LINE> self.assertTrue(optimal_dimensions >= 1) <NEW_LINE> <DEDENT> def test_pca_plot_figure(self): <NEW_LINE> <INDENT> features_1 = np.random.normal(size=100) <NEW_LINE> features_1 = features_1.reshape(len(features_1), 1) <NEW_LINE> features = features_1 <NEW_LINE> for i in range(2, 11): <NEW_LINE> <INDENT> features = np.concatenate((features, features_1**i), axis=1) <NEW_LINE> <DEDENT> features_2 = np.random.normal(loc=1, scale=0.1, size=100) <NEW_LINE> features_2 = features_2.reshape(len(features_2), 1) <NEW_LINE> for i in range(1, 11): <NEW_LINE> <INDENT> features = np.concatenate((features, features_2**i), axis=1) <NEW_LINE> <DEDENT> features_3 = np.random.normal(loc=2, scale=1.5, size=100) <NEW_LINE> features_3 = features_3.reshape(len(features_3), 1) <NEW_LINE> for i in range(1, 11): <NEW_LINE> <INDENT> features = np.concatenate((features, features_3**i), axis=1) <NEW_LINE> <DEDENT> labels = np.random.randint(0, 2, size=100) <NEW_LINE> train_features, test_features, train_labels, test_labels = train_test_split(features, labels, random_state=0) <NEW_LINE> fig, optimal_dimensions = pca_evaluate.pca_viz_and_opt_dimensions( train_features, train_labels, test_features, test_labels, plot_figure=True) <NEW_LINE> self.assertTrue(optimal_dimensions >= 1)
|
Testing the function to run the pca algo for various number of
dimensions on the given dataset
|
6259903bbaa26c4b54d504a2
|
class Trace(models.Model): <NEW_LINE> <INDENT> creation_date = models.DateTimeField( auto_now_add=True, editable=False, verbose_name=('Creation date'), ) <NEW_LINE> view_name = models.CharField( max_length=50, verbose_name=('View name'), ) <NEW_LINE> ip = models.IPAddressField( verbose_name=('IP'), ) <NEW_LINE> session_key = models.CharField( max_length=40, verbose_name=('Session key'), ) <NEW_LINE> user_agent = models.CharField( max_length=255, verbose_name=('User agent'), ) <NEW_LINE> user = models.ForeignKey( settings.AUTH_USER_MODEL, null=True, blank=True, verbose_name=('User'), ) <NEW_LINE> content_type = models.ForeignKey(ContentType, blank=True, null=True) <NEW_LINE> object_id = models.PositiveIntegerField(blank=True, null=True) <NEW_LINE> view_object = generic.GenericForeignKey('content_type', 'object_id') <NEW_LINE> hits = models.PositiveIntegerField( default=1, verbose_name=('Hits'), ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('-creation_date', ) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'{} ({})'.format(self.view_name, self.ip)
|
Model to track a user's view hits.
:creation_date: Auto-added datetime of the model creation.
:view_name: Name of the called view.
:ip: IP of the visitor.
:user_agent: User agent of the request.
:session_key: Session key in the request.
:user: User who called the website, if not anonymous.
:view_object: Object of the called view.
:hits: Hit count of the user/view combination.
|
6259903b8da39b475be043e9
|
class Command(BaseCommand): <NEW_LINE> <INDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> self.stdout.write('Waiting for database...') <NEW_LINE> db_conn = None <NEW_LINE> while not db_conn: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> db_conn = connections['default'] <NEW_LINE> <DEDENT> except OperationalError: <NEW_LINE> <INDENT> self.stdout.write('Database unavailable, waiting 1 second...') <NEW_LINE> time.sleep(1) <NEW_LINE> <DEDENT> self.stdout.write(self.style.SUCCESS('Database available!'))
|
Django command to pause execution until database is available
|
6259903b8a349b6b4368743e
|
class TestReview(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.new_review = Review() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> if os.path.exists("file.json"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove("file.json") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test__init__id(self): <NEW_LINE> <INDENT> this_dict = self.new_review.__dict__ <NEW_LINE> self.assertIsNotNone(this_dict.get("id")) <NEW_LINE> <DEDENT> def test__init__created_at(self): <NEW_LINE> <INDENT> this_dict = self.new_review.__dict__ <NEW_LINE> self.assertIsNotNone(this_dict.get("created_at")) <NEW_LINE> <DEDENT> def test_attributes(self): <NEW_LINE> <INDENT> self.assertTrue(hasattr(self.new_review, "place_id")) <NEW_LINE> self.assertEqual(self.new_review.place_id, "") <NEW_LINE> self.assertTrue(hasattr(self.new_review, "user_id")) <NEW_LINE> self.assertEqual(self.new_review.user_id, "") <NEW_LINE> self.assertTrue(hasattr(self.new_review, "text")) <NEW_LINE> self.assertEqual(self.new_review.text, "") <NEW_LINE> self.assertFalse(hasattr(self.new_review, "updated_at")) <NEW_LINE> self.assertFalse(hasattr(self.new_review, "my_number")) <NEW_LINE> self.assertFalse(hasattr(self.new_review, "random_attr")) <NEW_LINE> self.new_review.name = "TeamTeam!" <NEW_LINE> self.new_review.age = 100 <NEW_LINE> self.assertTrue(hasattr(self.new_review, "name")) <NEW_LINE> self.assertEqual(self.new_review.name, "TeamTeam!") <NEW_LINE> self.assertTrue(hasattr(self.new_review, "age")) <NEW_LINE> delattr(self.new_review, "name") <NEW_LINE> self.assertFalse(hasattr(self.new_review, "name")) <NEW_LINE> self.assertEqual(self.new_review.__class__.__name__, "BaseModel") <NEW_LINE> <DEDENT> def test_save_init(self): <NEW_LINE> <INDENT> this_dict = self.new_review.__dict__ <NEW_LINE> self.assertIsNone(this_dict.get("updated_at")) <NEW_LINE> <DEDENT> def test_save_update(self): <NEW_LINE> <INDENT> this_dict = self.new_review.__dict__ <NEW_LINE> before = this_dict.get("updated_at") <NEW_LINE> self.new_review.save() <NEW_LINE> this_dict = self.new_review.__dict__ <NEW_LINE> after = this_dict.get("updated_at") <NEW_LINE> self.assertNotEqual(before, after) <NEW_LINE> <DEDENT> def test___str__(self): <NEW_LINE> <INDENT> correct_format = ("[{}] ({}) {}".format (self.new_review.__class__.__name__, self.new_review.id, self.new_review.__dict__)) <NEW_LINE> self.assertEqual(print(correct_format), print(self.new_review)) <NEW_LINE> <DEDENT> def test_repr(self): <NEW_LINE> <INDENT> str_return = self.new_review.__str__ <NEW_LINE> self.assertIsNotNone(str_return) <NEW_LINE> <DEDENT> def test_to_json(self): <NEW_LINE> <INDENT> json_return = BaseModel.to_json(self.new_review) <NEW_LINE> self.assertEqual(type(json_return), dict)
|
class for testing review
|
6259903bb5575c28eb7135c6
|
class FunctionType(Enum): <NEW_LINE> <INDENT> DCV = 1 <NEW_LINE> ACV = 2 <NEW_LINE> OHM = 3 <NEW_LINE> OHMF = 4 <NEW_LINE> DCI = 5 <NEW_LINE> ACI = 6 <NEW_LINE> OHM_EXT = 7 <NEW_LINE> NTC = 8 <NEW_LINE> NTCF = 9
|
The measurement functions. See page 55 of the extented ohms setting.
|
6259903b10dbd63aa1c71dd0
|
class IPostPoolSheet(ISheet): <NEW_LINE> <INDENT> pass
|
Marker interfaces for sheets with :term:`post_pool` Attributes.
This implies the sheet schema is a subtype of
:class:`adhocracy_core.schema.PostPoolSchema` or has at least a
field node with :class:`adhocracy_core.Schema.PostPool`.
|
6259903b287bf620b6272de5
|
class FakeClockCounter(object): <NEW_LINE> <INDENT> def __init__(self, fake_clock, num_waiters): <NEW_LINE> <INDENT> self.__fake_clock = fake_clock <NEW_LINE> self.__num_waiters = num_waiters <NEW_LINE> self.__count = 0 <NEW_LINE> self.__condition = threading.Condition() <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> self.__condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> return self.__count <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.__condition.release() <NEW_LINE> <DEDENT> <DEDENT> def increment(self): <NEW_LINE> <INDENT> self.__condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self.__count += 1 <NEW_LINE> self.__condition.notifyAll() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.__condition.release() <NEW_LINE> <DEDENT> <DEDENT> def __wait_for_increment(self, old_count, timeout=None): <NEW_LINE> <INDENT> remaining = timeout <NEW_LINE> self.__condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> while self.__count == old_count and remaining > 0: <NEW_LINE> <INDENT> t1 = time.time() <NEW_LINE> self.__condition.wait(remaining) <NEW_LINE> remaining -= time.time() - t1 <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self.__condition.release() <NEW_LINE> <DEDENT> <DEDENT> def sleep_until_count_or_maxwait(self, target, fake_increment_sec, maxwait): <NEW_LINE> <INDENT> deadline = time.time() + maxwait <NEW_LINE> while self.count() < target and time.time() < deadline: <NEW_LINE> <INDENT> self.__fake_clock.block_until_n_waiting_threads(self.__num_waiters) <NEW_LINE> old_count = self.count() <NEW_LINE> self.__fake_clock.advance_time(increment_by=fake_increment_sec) <NEW_LINE> self.__wait_for_increment(old_count, timeout=deadline - time.time()) <NEW_LINE> <DEDENT> return self.count() == target
|
Helper class for multithreaded testing. Provides a method for a thread to block until a count has reached target
value. Moreover, for every successfully observed increment, it will advance the fake_clock and wait for all
other threads (driven by the fake clock) to wait on the clock. For example usage, see MonitorsManagerTest.
|
6259903b8c3a8732951f7752
|
class SubmittedComponent(models.Model): <NEW_LINE> <INDENT> submission = models.ForeignKey(Submission, on_delete=models.PROTECT) <NEW_LINE> submit_time = models.DateTimeField(auto_now_add = True) <NEW_LINE> def get_time(self): <NEW_LINE> <INDENT> return self.submit_time.strftime("%Y-%m-%d %H:%M:%S") <NEW_LINE> <DEDENT> def get_late_time(self): <NEW_LINE> <INDENT> time = self.submission.create_at - self.activity.due_date <NEW_LINE> if time < datetime.datedelta(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return time <NEW_LINE> <DEDENT> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("This object cannot be deleted because it is used as a foreign key.") <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return other.submit_time < self.submit_time <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['submit_time'] <NEW_LINE> app_label = 'submission' <NEW_LINE> <DEDENT> def get_size_in_kb(self): <NEW_LINE> <INDENT> res = int(self.get_size())/1024 <NEW_LINE> return res <NEW_LINE> <DEDENT> def get_submitter(self): <NEW_LINE> <INDENT> group = GroupSubmission.objects.filter(id=self.submission.id) <NEW_LINE> if len(group) == 0: <NEW_LINE> <INDENT> student = StudentSubmission.objects.filter(id=self.submission.id) <NEW_LINE> return student.member.person <NEW_LINE> <DEDENT> return group[0].creator.person <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s@%s" % (self.submission.activity, self.submission.created_at) <NEW_LINE> <DEDENT> def sendfile(self, upfile, response): <NEW_LINE> <INDENT> path, filename = os.path.split(upfile.name) <NEW_LINE> response['Content-Disposition'] = 'inline; filename="' + filename + '"' <NEW_LINE> try: <NEW_LINE> <INDENT> upfile.open('rb') <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> response['Content-type'] = "text/plain" <NEW_LINE> response.write("File missing. It has likely been archived.") <NEW_LINE> return <NEW_LINE> <DEDENT> for data in upfile: <NEW_LINE> <INDENT> response.write(data) <NEW_LINE> <DEDENT> <DEDENT> def file_filename(self, upfile, prefix=None): <NEW_LINE> <INDENT> filename = os.path.split(upfile.name)[1] <NEW_LINE> if prefix: <NEW_LINE> <INDENT> filename = os.path.join(prefix, filename) <NEW_LINE> <DEDENT> return filename
|
Part of a student's/group's submission
|
6259903b4e696a045264e71f
|
class BooleanImages ( SingletonHasPrivateFacets ): <NEW_LINE> <INDENT> true = Image( '@facets:on2' ) <NEW_LINE> false = Image( '@facets:off5' ) <NEW_LINE> check = Image( '@facets:on1' )
|
Helper class use to define the true/false images used by the
boolean_cell_paint function.
|
6259903bd10714528d69ef89
|
class TdlcIdentity(IanaInterfaceTypeIdentity): <NEW_LINE> <INDENT> _prefix = 'ianaift' <NEW_LINE> _revision = '2014-05-08' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> IanaInterfaceTypeIdentity.__init__(self) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.ietf._meta import _iana_if_type as meta <NEW_LINE> return meta._meta_table['TdlcIdentity']['meta_info']
|
IBM twinaxial data link control.
|
6259903b30dc7b76659a0a2d
|
@skip_unless_lms <NEW_LINE> @ddt.ddt <NEW_LINE> class TestCohortOauth(SharedModuleStoreTestCase): <NEW_LINE> <INDENT> password = 'password' <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super().setUpClass() <NEW_LINE> cls.user = UserFactory(username=USERNAME, email=USER_MAIL, password=cls.password) <NEW_LINE> cls.staff_user = UserFactory(is_staff=True, password=cls.password) <NEW_LINE> cls.course_key = ToyCourseFactory.create().id <NEW_LINE> cls.course_str = str(cls.course_key) <NEW_LINE> <DEDENT> @ddt.data({'path_name': 'api_cohorts:cohort_settings'}, {'path_name': 'api_cohorts:cohort_handler'}, ) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_oauth_list(self, path_name): <NEW_LINE> <INDENT> path = reverse(path_name, kwargs={'course_key_string': self.course_str}) <NEW_LINE> user = UserFactory(is_staff=False) <NEW_LINE> oauth_client = ApplicationFactory.create() <NEW_LINE> access_token = AccessTokenFactory.create(user=user, application=oauth_client).token <NEW_LINE> headers = { 'HTTP_AUTHORIZATION': 'Bearer ' + access_token } <NEW_LINE> response = self.client.get(path=path, **headers) <NEW_LINE> assert response.status_code == 403 <NEW_LINE> user.is_staff = True <NEW_LINE> user.save() <NEW_LINE> response = self.client.get(path=path, **headers) <NEW_LINE> assert response.status_code == 200 <NEW_LINE> <DEDENT> def test_oauth_users(self): <NEW_LINE> <INDENT> cohorts.add_cohort(self.course_key, "DEFAULT", "random") <NEW_LINE> path = reverse('api_cohorts:cohort_users', kwargs={'course_key_string': self.course_str, 'cohort_id': 1}) <NEW_LINE> user = UserFactory(is_staff=False) <NEW_LINE> oauth_client = ApplicationFactory.create() <NEW_LINE> access_token = AccessTokenFactory.create(user=user, application=oauth_client).token <NEW_LINE> headers = { 'HTTP_AUTHORIZATION': 'Bearer ' + access_token } <NEW_LINE> data = { 'users': [user.username] } <NEW_LINE> response = self.client.post(path=path, data=data, **headers) <NEW_LINE> assert response.status_code == 403 <NEW_LINE> user.is_staff = True <NEW_LINE> user.save() <NEW_LINE> response = self.client.post(path=path, data=data, **headers) <NEW_LINE> assert response.status_code == 200 <NEW_LINE> <DEDENT> def test_oauth_csv(self): <NEW_LINE> <INDENT> cohorts.add_cohort(self.course_key, "DEFAULT", "random") <NEW_LINE> path = reverse('api_cohorts:cohort_users_csv', kwargs={'course_key_string': self.course_str}) <NEW_LINE> user = UserFactory(is_staff=False) <NEW_LINE> oauth_client = ApplicationFactory.create() <NEW_LINE> access_token = AccessTokenFactory.create(user=user, application=oauth_client).token <NEW_LINE> headers = { 'HTTP_AUTHORIZATION': 'Bearer ' + access_token } <NEW_LINE> response = self.client.post(path=path, **headers) <NEW_LINE> assert response.status_code == 403 <NEW_LINE> user.is_staff = True <NEW_LINE> user.save() <NEW_LINE> response = self.client.post(path=path, **headers) <NEW_LINE> assert response.status_code == 400
|
Tests for cohort API oauth authentication
|
6259903b94891a1f408b9ff5
|
class Object_OT_MBDyn_update_beam3(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "update.mbdyn_beam3" <NEW_LINE> bl_label = "Updates beam3 curve configuration" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> ed = context.scene.mbdyn.elems <NEW_LINE> ret_val = update_beam3(ed[context.object.name]) <NEW_LINE> if ret_val == 'OBJECTS_NOTFOUND': <NEW_LINE> <INDENT> self.report({'ERROR'}, "Unable to find Blender objects needed") <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ret_val <NEW_LINE> <DEDENT> pass
|
Calls the update_beam3() function to update the current configuration
of the curve representing the beam3 element
|
6259903b1d351010ab8f4d18
|
class CallableBool: <NEW_LINE> <INDENT> do_not_call_in_templates = True <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> warnings.warn( "Using user.is_authenticated() and user.is_anonymous() as a method " "is deprecated. Remove the parentheses to use it as an attribute.", RemovedInDjango20Warning, stacklevel=2 ) <NEW_LINE> return self.value <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'CallableBool(%r)' % self.value <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.value == other <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return self.value != other <NEW_LINE> <DEDENT> def __or__(self, other): <NEW_LINE> <INDENT> return bool(self.value or other)
|
An boolean-like object that is also callable for backwards compatibility.
|
6259903b15baa7234946318d
|
class KafkaConnection(object): <NEW_LINE> <INDENT> def __init__(self, host, port, bufsize=4096): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.bufsize = bufsize <NEW_LINE> self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self._sock.connect((host, port)) <NEW_LINE> self._sock.settimeout(10) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "<KafkaConnection host=%s port=%d>" % (self.host, self.port) <NEW_LINE> <DEDENT> def _consume_response(self): <NEW_LINE> <INDENT> data = "" <NEW_LINE> for chunk in self._consume_response_iter(): <NEW_LINE> <INDENT> data += chunk <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def _consume_response_iter(self): <NEW_LINE> <INDENT> log.debug("Handling response from Kafka") <NEW_LINE> resp = self._sock.recv(4) <NEW_LINE> if resp == "": <NEW_LINE> <INDENT> raise Exception("Got no response from Kafka") <NEW_LINE> <DEDENT> (size,) = struct.unpack('>i', resp) <NEW_LINE> messageSize = size - 4 <NEW_LINE> log.debug("About to read %d bytes from Kafka", messageSize) <NEW_LINE> total = 0 <NEW_LINE> while total < messageSize: <NEW_LINE> <INDENT> resp = self._sock.recv(self.bufsize) <NEW_LINE> log.debug("Read %d bytes from Kafka", len(resp)) <NEW_LINE> if resp == "": <NEW_LINE> <INDENT> raise BufferUnderflowError("Not enough data to read this response") <NEW_LINE> <DEDENT> total += len(resp) <NEW_LINE> yield resp <NEW_LINE> <DEDENT> <DEDENT> def send(self, requestId, payload): <NEW_LINE> <INDENT> log.debug("About to send %d bytes to Kafka, request %d" % (len(payload), requestId)) <NEW_LINE> sent = self._sock.sendall(payload) <NEW_LINE> if sent != None: <NEW_LINE> <INDENT> raise RuntimeError("Kafka went away") <NEW_LINE> <DEDENT> self.data = self._consume_response() <NEW_LINE> <DEDENT> def recv(self, requestId): <NEW_LINE> <INDENT> log.debug("Reading response %d from Kafka" % requestId) <NEW_LINE> return self.data <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._sock.close()
|
A socket connection to a single Kafka broker
This class is _not_ thread safe. Each call to `send` must be followed
by a call to `recv` in order to get the correct response. Eventually,
we can do something in here to facilitate multiplexed requests/responses
since the Kafka API includes a correlation id.
|
6259903b71ff763f4b5e8998
|
class GetKIFTestFilterTest(TestCase): <NEW_LINE> <INDENT> def test_correct(self): <NEW_LINE> <INDENT> tests = [ 'KIF.test1', 'KIF.test2', ] <NEW_LINE> expected = 'NAME:test1|test2' <NEW_LINE> self.assertEqual(test_runner.get_kif_test_filter(tests), expected) <NEW_LINE> <DEDENT> def test_correct_inverted(self): <NEW_LINE> <INDENT> tests = [ 'KIF.test1', 'KIF.test2', ] <NEW_LINE> expected = '-NAME:test1|test2' <NEW_LINE> self.assertEqual( test_runner.get_kif_test_filter(tests, invert=True), expected)
|
Tests for test_runner.get_kif_test_filter.
|
6259903b73bcbd0ca4bcb486
|
class PostPage(BlogHandler): <NEW_LINE> <INDENT> def get(self, post_id): <NEW_LINE> <INDENT> key = ndb.Key('Post', int(post_id), parent=blog_key()) <NEW_LINE> post = key.get() <NEW_LINE> comments = Comment.gql("WHERE post_id=%s ORDER BY created DESC" % int(post_id)) <NEW_LINE> liked = None <NEW_LINE> if self.user: <NEW_LINE> <INDENT> liked = Like.gql("WHERE post_id=:1 AND author.username=:2", int(post_id), self.user.username).get() <NEW_LINE> <DEDENT> if not post: <NEW_LINE> <INDENT> self.error(404) <NEW_LINE> return <NEW_LINE> <DEDENT> self.render("post_view.html", post=post, comments=comments, liked=liked) <NEW_LINE> <DEDENT> def post(self, post_id): <NEW_LINE> <INDENT> key = ndb.Key('Post', int(post_id), parent=blog_key()) <NEW_LINE> post = key.get() <NEW_LINE> if self.request.get("like"): <NEW_LINE> <INDENT> if post and self.user: <NEW_LINE> <INDENT> post.likes += 1 <NEW_LINE> like = Like(post_id=int(post_id), author=self.user) <NEW_LINE> like.put() <NEW_LINE> post.put() <NEW_LINE> <DEDENT> self.redirect("/%s" % post_id) <NEW_LINE> <DEDENT> elif self.request.get("unlike"): <NEW_LINE> <INDENT> if post and self.user: <NEW_LINE> <INDENT> post.likes -= 1 <NEW_LINE> like = Like.gql("WHERE post_id=:1 AND author.username=:2", int(post_id), self.user.username).get() <NEW_LINE> key = like.key <NEW_LINE> key.delete() <NEW_LINE> post.put() <NEW_LINE> <DEDENT> self.redirect("/%s" % post_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.content = self.request.get("content") <NEW_LINE> if self.content: <NEW_LINE> <INDENT> comment = Comment(content=str(self.content), author=self.user, post_id=int(post_id)) <NEW_LINE> comment.put() <NEW_LINE> self.redirect("/%s" % post_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.render("post_view.html", post=post)
|
Renders the page for a single post, handles comments and likes on post
|
6259903bbe383301e0254a14
|
class TestDirectoriesManager(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.directory_manager = DirectoriesManager() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> shutil.rmtree(cls.directory_manager.get_directory(DefaultDirectories.OUTPUT)) <NEW_LINE> del cls.directory_manager <NEW_LINE> <DEDENT> def test_singleton(self): <NEW_LINE> <INDENT> directory_manager_new = DirectoriesManager() <NEW_LINE> self.assertEqual(directory_manager_new, self.directory_manager) <NEW_LINE> del directory_manager_new <NEW_LINE> <DEDENT> def test_get_directory_exists(self, target_directory=None): <NEW_LINE> <INDENT> if target_directory is None: <NEW_LINE> <INDENT> self.directory_manager.setup_default_directories('') <NEW_LINE> target_directory = DefaultDirectories.OUTPUT <NEW_LINE> <DEDENT> self.assertTrue(os.path.isdir(self.directory_manager. get_directory(target_directory))) <NEW_LINE> <DEDENT> def test_get_directory_not_exists(self): <NEW_LINE> <INDENT> target_dir = 'not_exists' <NEW_LINE> with self.assertRaises(Exception) as context: <NEW_LINE> <INDENT> self.directory_manager.get_directory(target_dir) <NEW_LINE> <DEDENT> self.assertTrue('directory not found' in str(context.exception)) <NEW_LINE> <DEDENT> def test_default_directories_created(self): <NEW_LINE> <INDENT> self.directory_manager.setup_default_directories('') <NEW_LINE> self.test_get_directory_exists(DefaultDirectories.OUTPUT) <NEW_LINE> self.test_get_directory_exists(DefaultDirectories.LOGS) <NEW_LINE> self.test_get_directory_exists(DefaultDirectories.RESULTS) <NEW_LINE> self.test_get_directory_exists(DefaultDirectories.FIGURES) <NEW_LINE> self.test_get_directory_exists(DefaultDirectories.MONITORING_DATA) <NEW_LINE> <DEDENT> def test_make_directory(self): <NEW_LINE> <INDENT> temp_dir = self.directory_manager.make_directory('temp_dir', '') <NEW_LINE> self.assertTrue(os.path.isdir(temp_dir)) <NEW_LINE> shutil.rmtree(temp_dir) <NEW_LINE> <DEDENT> def test_make_directory_location_correctness(self): <NEW_LINE> <INDENT> temp_dir = self.directory_manager.make_directory('temp_dir_location_test', '') <NEW_LINE> temp_dir_path = Path(self.directory_manager.get_directory('temp_dir_location_test')) <NEW_LINE> self.assertTrue(Path.exists(temp_dir_path)) <NEW_LINE> shutil.rmtree(temp_dir)
|
Tests the behavior of DirectoriesManager class.
|
6259903b4e696a045264e720
|
@override_settings(**dict( TEST_SETTINGS, STATICFILES_STORAGE='django.contrib.staticfiles.storage.CachedStaticFilesStorage', DEBUG=False, )) <NEW_LINE> class TestCollectionCachedStorage(TestHashedFiles, BaseCollectionTestCase, BaseStaticFilesTestCase, TestCase): <NEW_LINE> <INDENT> def test_cache_invalidation(self): <NEW_LINE> <INDENT> name = "cached/styles.css" <NEW_LINE> hashed_name = "cached/styles.bb84a0240107.css" <NEW_LINE> cache_key = storage.staticfiles_storage.hash_key(name) <NEW_LINE> cached_name = storage.staticfiles_storage.hashed_files.get(cache_key) <NEW_LINE> self.assertEqual(self.hashed_file_path(name), cached_name) <NEW_LINE> storage.staticfiles_storage.hashed_files.clear() <NEW_LINE> cached_name = storage.staticfiles_storage.hashed_files.get(cache_key) <NEW_LINE> self.assertEqual(cached_name, None) <NEW_LINE> self.assertEqual(self.hashed_file_path(name), hashed_name) <NEW_LINE> cached_name = storage.staticfiles_storage.hashed_files.get(cache_key) <NEW_LINE> self.assertEqual(cached_name, hashed_name) <NEW_LINE> <DEDENT> def test_cache_key_memcache_validation(self): <NEW_LINE> <INDENT> name = "/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff/some crazy/" + "\x16" + "\xb4" <NEW_LINE> cache_key = storage.staticfiles_storage.hash_key(name) <NEW_LINE> cache_validator = BaseCache({}) <NEW_LINE> cache_validator.validate_key(cache_key) <NEW_LINE> self.assertEqual(cache_key, 'staticfiles:821ea71ef36f95b3922a77f7364670e7')
|
Tests for the Cache busting storage
|
6259903bd10714528d69ef8a
|
class DUIK_OT_remove_texanim_image( bpy.types.Operator ): <NEW_LINE> <INDENT> bl_idname = "texanim.remove_texanim_image" <NEW_LINE> bl_label = "Remove Image" <NEW_LINE> bl_options = {'REGISTER','UNDO'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> node = dublf.context.get_active_node(context) <NEW_LINE> if node is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if node.bl_idname == 'ShaderNodeTexImage': <NEW_LINE> <INDENT> return len(node.duik_texanim_images) > 0 <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> dublf.ui.redraw() <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> node = dublf.context.get_active_node(context) <NEW_LINE> tree = bpy.context.material.node_tree <NEW_LINE> current_index = node.duik_texanim_current_index <NEW_LINE> dublf.animation.remove_animated_index(tree, 'nodes[\"' + node.name + '\"].duik_texanim_current_index', current_index) <NEW_LINE> node.duik_texanim_images.remove(current_index) <NEW_LINE> return {'FINISHED'}
|
Removes the active Image
|
6259903b76d4e153a661db72
|
class Person(object): <NEW_LINE> <INDENT> def __init__(self, _id, is_vaccinated, infection=None): <NEW_LINE> <INDENT> self._id = _id <NEW_LINE> self.is_alive = True <NEW_LINE> self.is_vaccinated = is_vaccinated <NEW_LINE> self.infection = infection <NEW_LINE> <DEDENT> def did_survive_infection(self): <NEW_LINE> <INDENT> immunity_strength = random.random() <NEW_LINE> if immunity_strength < self.infection.mortality_rate: <NEW_LINE> <INDENT> self.is_alive = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.is_vaccinated = True <NEW_LINE> self.infection = None <NEW_LINE> <DEDENT> return self.is_alive
|
Person objects will populate the simulation.
|
6259903b91af0d3eaad3b032
|
class SPSAAdam(UnrolledAdam): <NEW_LINE> <INDENT> def __init__(self, lr=0.01, delta=0.01, num_samples=128, num_iters=1, compare_to_analytic_grad=False): <NEW_LINE> <INDENT> super(SPSAAdam, self).__init__(lr=lr) <NEW_LINE> assert num_samples % 2 == 0, "number of samples must be even" <NEW_LINE> self._delta = delta <NEW_LINE> self._num_samples = num_samples // 2 <NEW_LINE> self._num_iters = num_iters <NEW_LINE> self._compare_to_analytic_grad = compare_to_analytic_grad <NEW_LINE> <DEDENT> def _get_delta(self, x, delta): <NEW_LINE> <INDENT> x_shape = x.get_shape().as_list() <NEW_LINE> delta_x = delta * tf.sign( tf.random_uniform( [self._num_samples] + x_shape[1:], minval=-1., maxval=1., dtype=tf_dtype)) <NEW_LINE> return delta_x <NEW_LINE> <DEDENT> def _compute_gradients(self, loss_fn, x, unused_optim_state): <NEW_LINE> <INDENT> assert len(x) == 1 and x[0].get_shape().as_list()[0] == 1 <NEW_LINE> x = x[0] <NEW_LINE> x_shape = x.get_shape().as_list() <NEW_LINE> def body(i, grad_array): <NEW_LINE> <INDENT> delta = self._delta <NEW_LINE> delta_x = self._get_delta(x, delta) <NEW_LINE> delta_x = tf.concat([delta_x, -delta_x], axis=0) <NEW_LINE> loss_vals = tf.reshape( loss_fn(x + delta_x), [2 * self._num_samples] + [1] * (len(x_shape) - 1)) <NEW_LINE> avg_grad = reduce_mean(loss_vals * delta_x, axis=0) / delta <NEW_LINE> avg_grad = tf.expand_dims(avg_grad, axis=0) <NEW_LINE> new_grad_array = grad_array.write(i, avg_grad) <NEW_LINE> return i + 1, new_grad_array <NEW_LINE> <DEDENT> def cond(i, _): <NEW_LINE> <INDENT> return i < self._num_iters <NEW_LINE> <DEDENT> _, all_grads = tf.while_loop( cond, body, loop_vars=[ 0, tf.TensorArray(size=self._num_iters, dtype=tf_dtype) ], back_prop=False, parallel_iterations=1) <NEW_LINE> avg_grad = reduce_sum(all_grads.stack(), axis=0) <NEW_LINE> return [avg_grad]
|
Optimizer for gradient-free attacks in https://arxiv.org/abs/1802.05666.
Gradients estimates are computed using Simultaneous Perturbation Stochastic
Approximation (SPSA), combined with the ADAM update rule.
|
6259903be76e3b2f99fd9c0a
|
class Platform(object): <NEW_LINE> <INDENT> def __init__(self, operating_system, architecture): <NEW_LINE> <INDENT> self.operating_system = operating_system <NEW_LINE> self.architecture = architecture <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def Current(os_override=None, arch_override=None): <NEW_LINE> <INDENT> return Platform( os_override if os_override else OperatingSystem.Current(), arch_override if arch_override else Architecture.Current()) <NEW_LINE> <DEDENT> def UserAgentFragment(self): <NEW_LINE> <INDENT> if self.operating_system == OperatingSystem.LINUX: <NEW_LINE> <INDENT> return '({name} {version})'.format( name=self.operating_system.name, version=platform.release()) <NEW_LINE> <DEDENT> elif self.operating_system == OperatingSystem.WINDOWS: <NEW_LINE> <INDENT> return '({name} NT {version})'.format( name=self.operating_system.name, version=platform.version()) <NEW_LINE> <DEDENT> elif self.operating_system == OperatingSystem.MACOSX: <NEW_LINE> <INDENT> format_string = '(Macintosh; {name} Mac OS X {version})' <NEW_LINE> arch_string = (self.architecture.name if self.architecture == Architecture.ppc else 'Intel') <NEW_LINE> return format_string.format( name=arch_string, version=platform.release()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '()' <NEW_LINE> <DEDENT> <DEDENT> def AsyncPopenArgs(self): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if self.operating_system == OperatingSystem.WINDOWS: <NEW_LINE> <INDENT> args['close_fds'] = True <NEW_LINE> detached_process = 0x00000008 <NEW_LINE> args['creationflags'] = detached_process <NEW_LINE> <DEDENT> return args <NEW_LINE> <DEDENT> def IsSupported(self): <NEW_LINE> <INDENT> if (self.operating_system == OperatingSystem.CYGWIN and self.architecture == Architecture.x86_64): <NEW_LINE> <INDENT> sys.stderr.write('ERROR: Cygwin 64 bit is not supported by the Google ' 'Cloud SDK. Please use a 32 bit version of Cygwin.') <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
|
Holds an operating system and architecture.
|
6259903b6e29344779b01850
|
class TestSimplifyIndex(object): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.shape = (3, 4, 5) <NEW_LINE> self.data = np.arange(np.product(self.shape)).reshape(self.shape) <NEW_LINE> <DEDENT> def _test_with(self, indices): <NEW_LINE> <INDENT> expected = self.data[indices] <NEW_LINE> simplified = _simplify_index(indices, self.data.shape) <NEW_LINE> actual = self.data[simplified] <NEW_LINE> np.testing.assert_array_equal(actual, expected) <NEW_LINE> <DEDENT> def _test_index_error(self, indices): <NEW_LINE> <INDENT> with assert_raises(IndexError): <NEW_LINE> <INDENT> simplified = _simplify_index(indices, self.data.shape) <NEW_LINE> self.data[simplified] <NEW_LINE> <DEDENT> with assert_raises(IndexError): <NEW_LINE> <INDENT> self.data[indices] <NEW_LINE> <DEDENT> <DEDENT> def test_1d(self): <NEW_LINE> <INDENT> self._test_with(np.s_[np.array([False, True, False])]) <NEW_LINE> self._test_with(np.s_[[1]]) <NEW_LINE> <DEDENT> def test_contiguous(self): <NEW_LINE> <INDENT> self._test_with(np.s_[:, np.array([False, True, True, False]), :]) <NEW_LINE> self._test_with(np.s_[:, [1, 2], :]) <NEW_LINE> <DEDENT> def test_discontiguous_but_regular(self): <NEW_LINE> <INDENT> self._test_with(np.s_[:, [False, True, False, True], :]) <NEW_LINE> self._test_with(np.s_[:, [1, 3], :]) <NEW_LINE> <DEDENT> def test_discontiguous(self): <NEW_LINE> <INDENT> self._test_with(np.s_[:, [True, True, False, True], :]) <NEW_LINE> self._test_with(np.s_[:, [0, 1, 3], :]) <NEW_LINE> <DEDENT> def test_all_false(self): <NEW_LINE> <INDENT> self._test_with(np.s_[:, np.array([False, False, False, False]), :]) <NEW_LINE> <DEDENT> def test_all_true(self): <NEW_LINE> <INDENT> self._test_with(np.s_[:, np.array([True, True, True, True]), :]) <NEW_LINE> <DEDENT> def test_newaxis(self): <NEW_LINE> <INDENT> self._test_with(np.s_[np.newaxis, np.array([True, True, False])]) <NEW_LINE> <DEDENT> def test_ellipsis(self): <NEW_LINE> <INDENT> self._test_with(np.s_[..., np.array([True, False, True, False, True])]) <NEW_LINE> <DEDENT> def test_wrong_length(self): <NEW_LINE> <INDENT> self._test_index_error(np.s_[:, np.array([True, False]), :]) <NEW_LINE> <DEDENT> def test_too_many_axes(self): <NEW_LINE> <INDENT> self._test_index_error(np.s_[0, 0, 0, 0]) <NEW_LINE> <DEDENT> def test_bad_index_dtype(self): <NEW_LINE> <INDENT> self._test_index_error(np.s_[:, np.array([1.2, 3.4])])
|
Test the :func:`~katdal.lazy_indexer._simplify_index` function.
|
6259903b07d97122c4217e9c
|
class AssetCollection(object): <NEW_LINE> <INDENT> def __init__(self, parent=None, guid=None, *args, **kwargs): <NEW_LINE> <INDENT> super(AssetCollection, self).__init__(*args, **kwargs) <NEW_LINE> if not guid: <NEW_LINE> <INDENT> guid = str(uuid.uuid4()) <NEW_LINE> <DEDENT> collection = self.get_collection() <NEW_LINE> self.uri = '/' + str.join('/', [collection, guid]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return json.dumps(self.__dict__) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self.__dict__) <NEW_LINE> <DEDENT> def get_collection(self): <NEW_LINE> <INDENT> return type(self).__name__.lower() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return
|
User Defined Domain Objects are the customizable collections to represent
data in the Asset Service.
This is experimental to provide a base class for a sort of ORM between
domain objects to marshall and unmarshall between Python and the REST
endpoints.
|
6259903b71ff763f4b5e899b
|
class UserProfile(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserProfileManager() <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['name'] <NEW_LINE> def get_full_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email
|
Represent a "user profile" inside our system
|
6259903bb57a9660fecd2c79
|
class PART_ENTRY(v_types.VStruct): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(PART_ENTRY, self).__init__() <NEW_LINE> self.BootIndicator = v_types.uint8(enum=BOOTINDICATOR) <NEW_LINE> self.StartingHead = v_types.uint8() <NEW_LINE> self.StartingSectCylinder = v_types.uint16() <NEW_LINE> self.SystemID = v_types.uint8(enum=SYSTEMID) <NEW_LINE> self.EndingHead = v_types.uint8() <NEW_LINE> self.EndingSectCylinder = v_types.uint16() <NEW_LINE> self.RelativeSector = v_types.uint32() <NEW_LINE> self.TotalSectors = v_types.uint32()
|
partition entry in the MBR.
|
6259903bb57a9660fecd2c7a
|
class SphericalCoords(Distribution): <NEW_LINE> <INDENT> def __init__(self, m): <NEW_LINE> <INDENT> super(SphericalCoords, self).__init__() <NEW_LINE> self.m = m <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r)" % (type(self).__name__, self.m) <NEW_LINE> <DEDENT> def sample(self, n, d=None, rng=np.random): <NEW_LINE> <INDENT> shape = self._sample_shape(n, d) <NEW_LINE> y = rng.uniform(size=shape) <NEW_LINE> return self.ppf(y) <NEW_LINE> <DEDENT> def pdf(self, x): <NEW_LINE> <INDENT> return (np.pi * np.sin(np.pi * x) ** (self.m-1) / beta(self.m / 2., .5)) <NEW_LINE> <DEDENT> def cdf(self, x): <NEW_LINE> <INDENT> y = .5 * betainc(self.m / 2., .5, np.sin(np.pi * x) ** 2) <NEW_LINE> return np.where(x < .5, y, 1 - y) <NEW_LINE> <DEDENT> def ppf(self, y): <NEW_LINE> <INDENT> y_reflect = np.where(y < .5, y, 1 - y) <NEW_LINE> z_sq = betaincinv(self.m / 2., .5, 2 * y_reflect) <NEW_LINE> x = np.arcsin(np.sqrt(z_sq)) / np.pi <NEW_LINE> return np.where(y < .5, x, 1 - x)
|
Spherical coordinates for inverse transform method.
This is used to map the hypercube onto the hypersphere and hyperball. [#]_
Parameters
----------
m : ``integer``
Positive index for spherical coordinate.
See Also
--------
:func:`.spherical_transform`
:class:`nengo.dists.SqrtBeta`
References
----------
.. [#] K.-T. Fang and Y. Wang, Number-Theoretic Methods in Statistics.
Chapman & Hall, 1994.
Examples
--------
>>> from nengolib.stats import SphericalCoords
>>> coords = SphericalCoords(3)
>>> import matplotlib.pyplot as plt
>>> x = np.linspace(0, 1, 1000)
>>> plt.figure(figsize=(8, 8))
>>> plt.subplot(411)
>>> plt.title(str(coords))
>>> plt.ylabel("Samples")
>>> plt.hist(coords.sample(1000), bins=50, normed=True)
>>> plt.subplot(412)
>>> plt.ylabel("PDF")
>>> plt.plot(x, coords.pdf(x))
>>> plt.subplot(413)
>>> plt.ylabel("CDF")
>>> plt.plot(x, coords.cdf(x))
>>> plt.subplot(414)
>>> plt.ylabel("PPF")
>>> plt.plot(x, coords.ppf(x))
>>> plt.xlabel("x")
>>> plt.show()
|
6259903b82261d6c527307c3
|
class MySingleOrderApi(LoginBaseApi): <NEW_LINE> <INDENT> url = "/info/mySingleOrder" <NEW_LINE> def build_custom_param(self, data): <NEW_LINE> <INDENT> return {'status':data['status'],'page': 1, 'length': 20}
|
获取用户单买(自买)订单记录信息
status
状态
否
int
0待开奖
1未中奖
2中小奖
3中大奖
默认为全部
|
6259903b23849d37ff8522b8
|
class CarliniWagnerL2(Attack): <NEW_LINE> <INDENT> def __init__(self, model, back='tf', sess=None): <NEW_LINE> <INDENT> super(CarliniWagnerL2, self).__init__(model, back, sess) <NEW_LINE> import tensorflow as tf <NEW_LINE> self.feedable_kwargs = {'y': tf.float32, 'y_target': tf.float32} <NEW_LINE> self.structural_kwargs = ['batch_size', 'confidence', 'targeted', 'learning_rate', 'binary_search_steps', 'max_iterations', 'abort_early', 'initial_const', 'clip_min', 'clip_max'] <NEW_LINE> if not isinstance(self.model, Model): <NEW_LINE> <INDENT> self.model = CallableModelWrapper(self.model, 'logits') <NEW_LINE> <DEDENT> <DEDENT> def generate(self, x, **kwargs): <NEW_LINE> <INDENT> import tensorflow as tf <NEW_LINE> from .attacks_tf import CarliniWagnerL2 as CWL2 <NEW_LINE> self.parse_params(**kwargs) <NEW_LINE> labels, nb_classes = self.get_or_guess_labels(x, kwargs) <NEW_LINE> attack = CWL2(self.sess, self.model, self.batch_size, self.confidence, 'y_target' in kwargs, self.learning_rate, self.binary_search_steps, self.max_iterations, self.abort_early, self.initial_const, self.clip_min, self.clip_max, nb_classes, x.get_shape().as_list()[1:]) <NEW_LINE> def cw_wrap(x_val, y_val): <NEW_LINE> <INDENT> return np.array(attack.attack(x_val, y_val), dtype=np.float32) <NEW_LINE> <DEDENT> wrap = tf.py_func(cw_wrap, [x, labels], tf.float32) <NEW_LINE> return wrap <NEW_LINE> <DEDENT> def parse_params(self, y=None, y_target=None, nb_classes=None, batch_size=1, confidence=0, learning_rate=5e-3, binary_search_steps=5, max_iterations=1000, abort_early=True, initial_const=1e-2, clip_min=0, clip_max=1): <NEW_LINE> <INDENT> if nb_classes is not None: <NEW_LINE> <INDENT> warnings.warn("The nb_classes argument is depricated and will " "be removed on 2018-02-11") <NEW_LINE> <DEDENT> self.batch_size = batch_size <NEW_LINE> self.confidence = confidence <NEW_LINE> self.learning_rate = learning_rate <NEW_LINE> self.binary_search_steps = binary_search_steps <NEW_LINE> self.max_iterations = max_iterations <NEW_LINE> self.abort_early = abort_early <NEW_LINE> self.initial_const = initial_const <NEW_LINE> self.clip_min = clip_min <NEW_LINE> self.clip_max = clip_max
|
This attack was originally proposed by Carlini and Wagner. It is an
iterative attack that finds adversarial examples on many defenses that
are robust to other attacks.
Paper link: https://arxiv.org/abs/1608.04644
At a high level, this attack is an iterative attack using Adam and
a specially-chosen loss function to find adversarial examples with
lower distortion than other attacks. This comes at the cost of speed,
as this attack is often much slower than others.
|
6259903b30dc7b76659a0a31
|
class MongologTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.log = teamFourMongolog.Logger() <NEW_LINE> self.testRecord = self.log.prepRecord(testEventString, testPayload) <NEW_LINE> self.t = datetime.datetime.utcnow() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.log = None <NEW_LINE> self.testRecord = None <NEW_LINE> self.t = None <NEW_LINE> <DEDENT> def test_prepRecord(self): <NEW_LINE> <INDENT> self.assertEqual(len(self.testRecord), 3) <NEW_LINE> self.assertIs(type(self.testRecord), dict) <NEW_LINE> <DEDENT> def test_insertRecord(self): <NEW_LINE> <INDENT> self.assertIsNot(self.log.insertRecord(testEventString, testPayload), None) <NEW_LINE> <DEDENT> def test_getCurrentIterRecords(self): <NEW_LINE> <INDENT> self.log.insertRecord(testEventString, testPayload) <NEW_LINE> self.assertIsNot(self.log.getCurrentIterRecords(self.t), None) <NEW_LINE> self.assertIs(type(self.log.getCurrentIterRecords(self.t)), list) <NEW_LINE> <DEDENT> def test_parseTimes(self): <NEW_LINE> <INDENT> for i in range(0, 3): self.log.insertRecord(testEventString, testPayload) <NEW_LINE> self.assertIsNot(self.log.parseTimes(self.t), None) <NEW_LINE> self.assertIs(type(self.log.parseTimes(self.t)), list) <NEW_LINE> <DEDENT> def printTimes(self): <NEW_LINE> <INDENT> for i in range(0, 3): self.log.insertRecord(testEventString, testPayload) <NEW_LINE> self.assertIsNot(self.log.printTimes(self.t), None) <NEW_LINE> self.assertIs(type(self.log.printTimes(self.t)), list)
|
Unit tests for the teamFourMongolog module.
The teamFourMongolog module contains the logging and timer functionality.
|
6259903c30c21e258be99a0c
|
class NoneType(Type): <NEW_LINE> <INDENT> def optimize(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def merge(self, another): <NEW_LINE> <INDENT> if isinstance(another, NoneType): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return another.merge(self) <NEW_LINE> <DEDENT> def to_string(self, shift=0, indent=0): <NEW_LINE> <INDENT> return "null"
|
Represent null
|
6259903cdc8b845886d547b5
|
class _Holder(Process): <NEW_LINE> <INDENT> def __init__(self,name,sim=None): <NEW_LINE> <INDENT> Process.__init__(self,name=name,sim=sim) <NEW_LINE> <DEDENT> def trigger(self, delay): <NEW_LINE> <INDENT> yield hold, self, delay <NEW_LINE> if not proc in b[2].activeQ: <NEW_LINE> <INDENT> proc.sim.reactivate(proc)
|
Provides timeout process
|
6259903cb830903b9686ed79
|
class AttributeDict(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AttributeDict, self).__init__(*args, **kwargs) <NEW_LINE> self.__dict__ = self
|
Dict which can handle access to keys using attributes.
Example:
>>> ad = AttributeDict({'a': 'b'})
>>> ad.a
... b
|
6259903ce76e3b2f99fd9c0c
|
class TestApisMaxTps(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testApisMaxTps(self): <NEW_LINE> <INDENT> pass
|
ApisMaxTps unit test stubs
|
6259903c71ff763f4b5e899d
|
class TestBlobIO(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> os.mkdir("temp") <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> init_catalog("./data/blob_resource_glossary.json", "temp") <NEW_LINE> with open("./data/blob_resource_glossary.json", "r") as f: <NEW_LINE> <INDENT> glossary = json.load(f) <NEW_LINE> <DEDENT> catalog_folders = os.listdir("./temp/catalog") <NEW_LINE> task_folders = os.listdir("./temp/tasks") <NEW_LINE> n_catalog = len(catalog_folders) <NEW_LINE> n_tasks = len(task_folders) <NEW_LINE> n_expected = len(set([entry['resource'] for entry in glossary])) <NEW_LINE> assert n_catalog == n_expected <NEW_LINE> assert n_tasks == n_expected <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree("temp")
|
Test that write behavior is as-expected when passed a glossary of entries corresponding solely with the files
contained in a single blob resource. In this case we expect that only one catalog folder, one task folder,
one datapackage, and one depositor be written to the file.
|
6259903c15baa72349463191
|
class MinimalPeakGroup(PeakGroupBase): <NEW_LINE> <INDENT> def __init__(self, unique_id, fdr_score, assay_rt, selected, cluster_id, peptide, intensity=None, dscore=None): <NEW_LINE> <INDENT> super(MinimalPeakGroup, self).__init__() <NEW_LINE> self.id_ = unique_id <NEW_LINE> self.fdr_score = fdr_score <NEW_LINE> self.normalized_retentiontime = assay_rt <NEW_LINE> self.cluster_id_ = cluster_id <NEW_LINE> self.peptide = peptide <NEW_LINE> self.intensity_ = intensity <NEW_LINE> self.dscore_ = dscore <NEW_LINE> <DEDENT> def print_out(self): <NEW_LINE> <INDENT> return self.peptide.run.get_id() + "/" + self.get_feature_id() + " score:" + str(self.get_fdr_score()) + " RT:" + str(self.get_normalized_retentiontime()) <NEW_LINE> <DEDENT> def set_fdr_score(self, fdr_score): <NEW_LINE> <INDENT> raise Exception("Cannot set in immutable object") <NEW_LINE> <DEDENT> def set_normalized_retentiontime(self, normalized_retentiontime): <NEW_LINE> <INDENT> raise Exception("Cannot set in immutable object") <NEW_LINE> <DEDENT> def set_feature_id(self, id_): <NEW_LINE> <INDENT> raise Exception("Cannot set in immutable object") <NEW_LINE> <DEDENT> def set_intensity(self, intensity): <NEW_LINE> <INDENT> raise Exception("Cannot set in immutable object") <NEW_LINE> <DEDENT> def get_dscore(self): <NEW_LINE> <INDENT> return self.dscore_ <NEW_LINE> <DEDENT> def select_this_peakgroup(self): <NEW_LINE> <INDENT> self.peptide.select_pg(self.get_feature_id()) <NEW_LINE> <DEDENT> def setClusterID(self, id_): <NEW_LINE> <INDENT> self.cluster_id_ = id_ <NEW_LINE> self.peptide.setClusterID(self.get_feature_id(), id_) <NEW_LINE> <DEDENT> def get_cluster_id(self): <NEW_LINE> <INDENT> return self.cluster_id_
|
A single peakgroup that is defined by a retention time in a chromatogram
of multiple transitions. Additionally it has an fdr_score and it has an
aligned RT (e.g. retention time in normalized space).
A peakgroup can be selected for quantification or not (this is stored as
having cluster_id == 1).
Note that for performance reasons, the peakgroups are created on-the-fly
and not stored as objects but rather as tuples in "Peptide".
Each peak group has a unique id, a score (fdr score usually), a retention
time as well as a back-reference to the precursor that generated the
peakgroup.
In this case, the peak group can also be assigned a cluster id (where the
cluster 1 is special as the one we will use for quantification).
|
6259903cb57a9660fecd2c7b
|
class Database(ABC): <NEW_LINE> <INDENT> def __init__(self, library: Optional[papis.library.Library] = None): <NEW_LINE> <INDENT> self.lib = library or papis.config.get_lib() <NEW_LINE> assert(isinstance(self.lib, papis.library.Library)) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def initialize(self) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_backend_name(self) -> str: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> def get_lib(self) -> str: <NEW_LINE> <INDENT> return self.lib.name <NEW_LINE> <DEDENT> def get_dirs(self) -> List[str]: <NEW_LINE> <INDENT> return self.lib.paths <NEW_LINE> <DEDENT> def match( self, document: papis.document.Document, query_string: str) -> bool: <NEW_LINE> <INDENT> raise NotImplementedError("Match not defined for this class") <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def clear(self) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def add(self, document: papis.document.Document) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def update(self, document: papis.document.Document) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def delete(self, document: papis.document.Document) -> None: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def query(self, query_string: str) -> List[papis.document.Document]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def query_dict( self, query: Dict[str, str]) -> List[papis.document.Document]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_all_documents(self) -> List[papis.document.Document]: <NEW_LINE> <INDENT> ... <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_all_query_string(self) -> str: <NEW_LINE> <INDENT> ...
|
Abstract class for the database backends
|
6259903c1f5feb6acb163df4
|
class PostDeleteView(AutoPermissionRequiredMixin, UpdateView): <NEW_LINE> <INDENT> model = Post <NEW_LINE> template_name = "blog/post_confirm_delete.html" <NEW_LINE> form_class = PostDeleteForm <NEW_LINE> success_url = reverse_lazy("blog:post_list") <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> if self.request.user.is_superuser: <NEW_LINE> <INDENT> self.removing_post = get_object_or_404(Post, slug=self.kwargs["slug"]) <NEW_LINE> if self.get_form().is_valid(): <NEW_LINE> <INDENT> self.removing_post.remove() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise PermissionDenied() <NEW_LINE> <DEDENT> return super().get_queryset() <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context["posts"] = self.model.objects.get_without_removed().order_by("-pk") <NEW_LINE> context["title"] = "Delete Post" <NEW_LINE> context["side_title"] = "Post List" <NEW_LINE> return context
|
PostDeleteView
View to delete a Post
Args:
AutoPermissionRequiredMixin ([type]): Tests if the User has the permission to do that
UpdateView ([type]): [description]
Raises:
PermissionDenied: [description]
Returns:
[type]: [description]
|
6259903cd10714528d69ef8c
|
class MalformedManifest(Exception): <NEW_LINE> <INDENT> def __init__(self, value=''): <NEW_LINE> <INDENT> Exception.__init__(self, value)
|
Invalid manifest form
|
6259903c0fa83653e46f60dc
|
class set_root_password(my_cnf): <NEW_LINE> <INDENT> def __init__(self, password=None, **kwargs): <NEW_LINE> <INDENT> kwargs.update(password=password, switch_user='root') <NEW_LINE> super(set_root_password, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def do(self): <NEW_LINE> <INDENT> new_password = self.args['password'] <NEW_LINE> old_password = None <NEW_LINE> dst = os.path.expanduser('/root/.my.cnf') <NEW_LINE> if os.path.isfile(dst): <NEW_LINE> <INDENT> with codecs.open(dst, 'r', 'utf8') as fd: <NEW_LINE> <INDENT> for line in fd: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if line.startswith('password'): <NEW_LINE> <INDENT> old_password = line.split('=')[1].strip() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if old_password != new_password: <NEW_LINE> <INDENT> cmd = ['mysqladmin', 'password', new_password] <NEW_LINE> if old_password: <NEW_LINE> <INDENT> cmd[1:1] = ['--password=' + old_password] <NEW_LINE> <DEDENT> self.sh(cmd) <NEW_LINE> <DEDENT> return super(set_root_password, self).do()
|
set mysql root password and store it in ~/.my.cnf
|
6259903c23e79379d538d701
|
class Altitude(Value): <NEW_LINE> <INDENT> default_unit = 'm' <NEW_LINE> units = { 'm': Unit(1, 'm', 'meters', 0), 'ft': Unit(3.2808398950131, 'ft', 'feet', 0) }
|
Represents a height
|
6259903c50485f2cf55dc183
|
class Scheduler: <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self._app = app <NEW_LINE> self._queue = TaskQueue() <NEW_LINE> self._runners = [ Runner(self._queue, self._app) for _ in range(NB_RUNNER) ] <NEW_LINE> self._running = False <NEW_LINE> <DEDENT> def add_task(self, task): <NEW_LINE> <INDENT> if self._running: <NEW_LINE> <INDENT> self._add_task_if_not_already_enqueue(task) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_app.logger.warning( "cannot add task to queue when scheduler is not running" ) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self._running = True <NEW_LINE> for runner in self._runners: <NEW_LINE> <INDENT> runner.start() <NEW_LINE> <DEDENT> <DEDENT> def join(self): <NEW_LINE> <INDENT> self._app.logger.debug("finishing the queue") <NEW_LINE> self._queue.join() <NEW_LINE> self._app.logger.debug("stopping all threads") <NEW_LINE> self._running = False <NEW_LINE> for runner in self._runners: <NEW_LINE> <INDENT> runner.stop() <NEW_LINE> <DEDENT> for runner in self._runners: <NEW_LINE> <INDENT> runner.join() <NEW_LINE> <DEDENT> self._app.logger.debug("finished stopping all threads") <NEW_LINE> <DEDENT> def contain_task(self, task): <NEW_LINE> <INDENT> return task in self._queue <NEW_LINE> <DEDENT> def _add_task_if_not_already_enqueue(self, task): <NEW_LINE> <INDENT> if self.contain_task(task): <NEW_LINE> <INDENT> current_app.logger.debug("'%s' already in queue", task.url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_app.logger.debug("adding '%s' to queue", task.url) <NEW_LINE> self._queue.put_nowait(task) <NEW_LINE> current_app.logger.debug( "size of queue is %d", self._queue.qsize() )
|
'Scheduler' is probably a bad name for this class, but I do not know
what else to call it. It takes tasks and distributes them to runners.
Except it doesn't even do that, because the runners themselves take the
tasks from the queue.
In essence, this is just a container class to hold a queue and some
runners, and to provide some convenience functions to manage them both.
|
6259903cd99f1b3c44d068a8
|
@base.ReleaseTracks(base.ReleaseTrack.ALPHA) <NEW_LINE> class CreateAlpha(Create): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def Args(cls, parser): <NEW_LINE> <INDENT> flags.MakeCommitmentArg(False).AddArgument(parser, operation_type='create') <NEW_LINE> flags.AddCreateFlags(parser, enable_ssd_and_accelerator_support=True) <NEW_LINE> flags.AddReservationArgGroup(parser) <NEW_LINE> messages = apis.GetMessagesModule('compute', 'alpha') <NEW_LINE> flags.GetTypeMapperFlag(messages).choice_arg.AddToParser(parser) <NEW_LINE> <DEDENT> def _MakeCreateRequest( self, args, messages, project, region, commitment_ref, holder): <NEW_LINE> <INDENT> commitment_type_flag = flags.GetTypeMapperFlag(messages) <NEW_LINE> commitment_type = commitment_type_flag.GetEnumForChoice(args.type) <NEW_LINE> commitment = messages.Commitment( reservations=reservation_helper.MakeReservations( args, messages, holder), name=commitment_ref.Name(), plan=flags.TranslatePlanArg(messages, args.plan), resources=flags.TranslateResourcesArgGroup(messages, args), type=commitment_type) <NEW_LINE> return messages.ComputeRegionCommitmentsInsertRequest( commitment=commitment, project=project, region=commitment_ref.region, )
|
Create Google Compute Engine commitments.
|
6259903c8a349b6b43687446
|
class Result(SampleableEnum): <NEW_LINE> <INDENT> Zero = 0 <NEW_LINE> One = 1
|
Represents the `Result` Q# type.
|
6259903cbaa26c4b54d504aa
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.