code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class StackStrapCLI(CommandLoader): <NEW_LINE> <INDENT> commands_to_load = (Create, Template) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.parser = argparse.ArgumentParser( description='Making development with Vagrant + Salt more awesome' ) <NEW_LINE> self.subparsers = self.parser.add_subparsers( title='commands', description='', help='', dest='command' ) <NEW_LINE> self.parser.add_argument( '-V', '--version', action='version', version=__version__ ) <NEW_LINE> self.parser.add_argument( '-d', '--debug', action='store_true', dest='debug', help='copious amounts of output' ) <NEW_LINE> self.parser.add_argument( '-q', '--quiet', action='store_true', dest='quiet', help='show only warnings and errors' ) <NEW_LINE> self.load_commands() <NEW_LINE> <DEDENT> def main(self, args=sys.argv[1:]): <NEW_LINE> <INDENT> args = self.parser.parse_args(args) <NEW_LINE> log_level = logging.INFO <NEW_LINE> log_format = '%(message)s' <NEW_LINE> if args.quiet: <NEW_LINE> <INDENT> log_level = logging.WARN <NEW_LINE> <DEDENT> elif args.debug: <NEW_LINE> <INDENT> log_level = logging.DEBUG <NEW_LINE> log_format = '[%(asctime)s] %(name)s - %(levelname)s: %(message)s' <NEW_LINE> <DEDENT> logging.basicConfig(level=log_level, format=log_format) <NEW_LINE> self.log = logging.getLogger("main") <NEW_LINE> self.log.debug("StackStrap starting up") <NEW_LINE> self.log.debug("Command: %s" % args.command) <NEW_LINE> try: <NEW_LINE> <INDENT> self.commands[args.command].main(args) <NEW_LINE> <DEDENT> except CommandError as e: <NEW_LINE> <INDENT> self.log.error(str(e)) <NEW_LINE> sys.exit(1)
The main CLI interface for StackStrap
6259902056b00c62f0fb3742
class TimeCodesTreeprocessor(markdown.treeprocessors.Treeprocessor): <NEW_LINE> <INDENT> def run(self, doc): <NEW_LINE> <INDENT> fill_missing_ends(doc)
This Tree Processor adds explicit endtimes to timed sections where a subsequent sibling element has a start time.
62599020462c4b4f79dbc88e
class EncoderLayerDiff(nn.Module): <NEW_LINE> <INDENT> def __init__(self, size, attr_num, self_attn_h, self_attn_g, feed_forward_h, feed_forward_g, dropout): <NEW_LINE> <INDENT> super(EncoderLayerDiff, self).__init__() <NEW_LINE> self.attr_num = attr_num <NEW_LINE> self.self_attn_h = self_attn_h <NEW_LINE> self.self_attn_g = self_attn_g <NEW_LINE> self.feed_forward_h = clones(feed_forward_h, attr_num) <NEW_LINE> self.feed_forward_g = clones(feed_forward_g, attr_num) <NEW_LINE> self.sublayer = clones(SublayerConnection(size, dropout), 2) <NEW_LINE> self.sublayer_g = clones(SublayerConnection(size, dropout), attr_num) <NEW_LINE> self.sublayer_h = clones(SublayerConnection(size, dropout), attr_num) <NEW_LINE> self.norm = LayerNorm(size) <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> def forward(self, x_h, x_g): <NEW_LINE> <INDENT> x_h_new = self.sublayer[0](x_h, lambda x: self.self_attn_h(x, x, x, False)) <NEW_LINE> x_g_new = self.sublayer[1](x_g, lambda x: self.self_attn_g(x, self.norm(x_h), self.norm(x_h), False)) <NEW_LINE> g_group = [] <NEW_LINE> h_group = [] <NEW_LINE> for attr_id in range(self.attr_num): <NEW_LINE> <INDENT> h = self.sublayer_h[attr_id](x_h_new[:, attr_id, :], self.feed_forward_h[attr_id]) <NEW_LINE> h_group.append(h) <NEW_LINE> g = self.sublayer_g[attr_id](x_g_new[:, attr_id, :], self.feed_forward_g[attr_id]) <NEW_LINE> g_group.append(g) <NEW_LINE> <DEDENT> return torch.stack(h_group, dim=1), torch.stack(g_group, dim=1)
Encoder is made up of self-attn and feed forward (defined below)
62599020a8ecb033258720a3
class _SQLConnected(Extension): <NEW_LINE> <INDENT> arguments = [ qm.fields.TextField( name = "db_name", title = "Database name", description = "The PostgreSQL database to connect to.", verbatim = "true", default_value = ""), qm.fields.TextField( name = "db_module", title = "Database module", description = "The DB 2.0 module to use.", verbatim = "true", default_value = "pgdb"), qm.fields.PythonField( name = "connection"), ] <NEW_LINE> def __init__(self, arguments = None, **args): <NEW_LINE> <INDENT> if arguments: args.update(arguments) <NEW_LINE> super(_SQLConnected, self).__init__(**args) <NEW_LINE> if not self.connection: <NEW_LINE> <INDENT> self.connection = Connection(self.db_module, database=self.db_name)
Mixin class for classes that need a database connection.
62599020d18da76e235b788f
class LoginCheck(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> if re.search(r'^/admin/', request.path) and not re.search(r'^/admin/account', request.path): <NEW_LINE> <INDENT> if 'admin' not in request.session: <NEW_LINE> <INDENT> return HttpResponseRedirect("/admin/account") <NEW_LINE> <DEDENT> <DEDENT> if re.search(r'^/admin/account', request.path): <NEW_LINE> <INDENT> if 'admin' in request.session: <NEW_LINE> <INDENT> return HttpResponseRedirect("/admin/")
检查用户是否登录
6259902066673b3332c31271
@cors_preflight('GET,OPTIONS') <NEW_LINE> @API.route('', methods=['GET', 'OPTIONS']) <NEW_LINE> class Documents(Resource): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @TRACER.trace() <NEW_LINE> @cors.crossdomain(origin='*') <NEW_LINE> def get(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> doc = DocumentService.fetch_latest_document(DocumentType.AFFIDAVIT.value) <NEW_LINE> if doc is None: <NEW_LINE> <INDENT> return {'message': 'The requested document could not be found.'}, http_status.HTTP_404_NOT_FOUND <NEW_LINE> <DEDENT> if doc.as_dict().get('content_type', None) == ContentType.PDF.value: <NEW_LINE> <INDENT> return send_from_directory('static', filename=doc.as_dict()['content'], as_attachment=True) <NEW_LINE> <DEDENT> <DEDENT> except BusinessException as exception: <NEW_LINE> <INDENT> response, status = {'code': exception.code, 'message': exception.message}, exception.status_code <NEW_LINE> <DEDENT> return response, status
Resource for managing the affidavit. Separate resource is created since affidavit is accessible without authentication.
6259902021a7993f00c66e03
class Summation(Process): <NEW_LINE> <INDENT> order = 2 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def set_options(self, parser): <NEW_LINE> <INDENT> parser.add_option("--sum", action="store_true", default="False", dest="sum", help="Sum of all selected nodes") <NEW_LINE> parser.add_option("--sum-all", type="string" , default="", dest="sum_all", help="Sum of all stats") <NEW_LINE> <DEDENT> def do_sum(self, node, value = 0.0): <NEW_LINE> <INDENT> for key,val in node.items(): <NEW_LINE> <INDENT> if type(val) == dict: <NEW_LINE> <INDENT> value = self.do_sum(val, value) <NEW_LINE> <DEDENT> elif type(val) == list: <NEW_LINE> <INDENT> if len(val) == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if type(val[0]) == str: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if value == 0.0: <NEW_LINE> <INDENT> value = [] <NEW_LINE> <DEDENT> value = map(sum, zip(val, value)) <NEW_LINE> <DEDENT> elif type(val) == int or type(val) == float: <NEW_LINE> <INDENT> value += val <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def sum(self, stats): <NEW_LINE> <INDENT> summed = [] <NEW_LINE> for stat in stats: <NEW_LINE> <INDENT> sum_stat = {} <NEW_LINE> sum_val = self.do_sum(stat) <NEW_LINE> key = stat.keys()[0] <NEW_LINE> sum_stat[key] = sum_val <NEW_LINE> summed.append(sum_stat) <NEW_LINE> <DEDENT> return summed <NEW_LINE> <DEDENT> def do_sum_merge(self, node, merge_node): <NEW_LINE> <INDENT> for key,val in node.items(): <NEW_LINE> <INDENT> if type(val) == dict: <NEW_LINE> <INDENT> self.do_sum_merge(val, merge_node[key]) <NEW_LINE> <DEDENT> elif type(val) == list: <NEW_LINE> <INDENT> if len(val) == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if type(val[0]) == str: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> merge_node[key] = map(sum, zip(val, merge_node[key])) <NEW_LINE> <DEDENT> elif type(val) == int or type(val) == float: <NEW_LINE> <INDENT> merge_node[key] += val <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def sum_all(self, stats, name): <NEW_LINE> <INDENT> summed = { name : {}} <NEW_LINE> for stat in stats: <NEW_LINE> <INDENT> if len(summed[name]) == 0: <NEW_LINE> <INDENT> summed[name] = stat[stat.keys()[0]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.do_sum_merge(stat[stat.keys()[0]], summed[name]) <NEW_LINE> <DEDENT> <DEDENT> return [summed] <NEW_LINE> <DEDENT> def process(self, stats, options): <NEW_LINE> <INDENT> if options.sum_all != "": <NEW_LINE> <INDENT> stats = self.sum_all(stats, options.sum_all) <NEW_LINE> <DEDENT> if options.sum == True: <NEW_LINE> <INDENT> stats = self.sum(stats) <NEW_LINE> <DEDENT> return stats
Sum all the nodes of filtered stats
6259902063f4b57ef00864b5
class InstrumentDriver(SingleConnectionInstrumentDriver): <NEW_LINE> <INDENT> def get_resource_params(self): <NEW_LINE> <INDENT> return DriverParameter.list() <NEW_LINE> <DEDENT> def _build_protocol(self): <NEW_LINE> <INDENT> self._protocol = Protocol(Prompt, NEWLINE, self._driver_event)
InstrumentDriver subclass Subclasses SingleConnectionInstrumentDriver with connection state machine.
62599020d18da76e235b7890
class WorkflowEnabledMeta(type): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _add_workflow(mcs, field_name, state_field, attrs): <NEW_LINE> <INDENT> attrs[field_name] = StateProperty(state_field.workflow, field_name) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _find_workflows(mcs, attrs): <NEW_LINE> <INDENT> workflows = {} <NEW_LINE> for attribute, value in attrs.items(): <NEW_LINE> <INDENT> if isinstance(value, Workflow): <NEW_LINE> <INDENT> workflows[attribute] = StateField(value) <NEW_LINE> <DEDENT> <DEDENT> return workflows <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _add_transitions(mcs, field_name, workflow, attrs, implems=None): <NEW_LINE> <INDENT> new_implems = ImplementationList(field_name, workflow) <NEW_LINE> if implems: <NEW_LINE> <INDENT> new_implems.load_parent_implems(implems) <NEW_LINE> <DEDENT> new_implems.transform(attrs) <NEW_LINE> return new_implems <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _register_hooks(mcs, cls, implems): <NEW_LINE> <INDENT> for implem_list in implems.values(): <NEW_LINE> <INDENT> implem_list.register_hooks(cls) <NEW_LINE> <DEDENT> <DEDENT> def __new__(mcs, name, bases, attrs): <NEW_LINE> <INDENT> workflows = {} <NEW_LINE> implems = {} <NEW_LINE> for base in reversed(bases): <NEW_LINE> <INDENT> if hasattr(base, '_workflows'): <NEW_LINE> <INDENT> workflows.update(base._workflows) <NEW_LINE> implems.update(base._xworkflows_implems) <NEW_LINE> <DEDENT> <DEDENT> workflows.update(mcs._find_workflows(attrs)) <NEW_LINE> for field, state_field in workflows.items(): <NEW_LINE> <INDENT> mcs._add_workflow(field, state_field, attrs) <NEW_LINE> implems[field] = mcs._add_transitions( field, state_field.workflow, attrs, implems.get(field)) <NEW_LINE> <DEDENT> attrs['_workflows'] = workflows <NEW_LINE> attrs['_xworkflows_implems'] = implems <NEW_LINE> cls = super(WorkflowEnabledMeta, mcs).__new__(mcs, name, bases, attrs) <NEW_LINE> mcs._register_hooks(cls, implems) <NEW_LINE> return cls
Base metaclass for all Workflow Enabled objects. Defines: - one class attribute for each the attached workflows, - a '_workflows' attribute, a dict mapping each field_name to the related Workflow, - a '_xworkflows_implems' attribute, a dict mapping each field_name to a dict of related ImplementationProperty. - one class attribute for each transition for each attached workflow
62599020a8ecb033258720a5
class GalleryExtension(command.CommandExtension): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def defaultConfig(): <NEW_LINE> <INDENT> config = command.CommandExtension.defaultConfig() <NEW_LINE> return config <NEW_LINE> <DEDENT> def extend(self, reader, renderer): <NEW_LINE> <INDENT> self.requires(core, command, media) <NEW_LINE> self.addCommand(reader, CardComponent()) <NEW_LINE> self.addCommand(reader, GalleryComponent()) <NEW_LINE> renderer.add('Card', RenderCard()) <NEW_LINE> renderer.add('CardImage', RenderCardImage()) <NEW_LINE> renderer.add('CardContent', RenderCardContent()) <NEW_LINE> renderer.add('CardReveal', RenderCardReveal()) <NEW_LINE> renderer.add('CardTitle', RenderCardTitle()) <NEW_LINE> renderer.add('Gallery', RenderGallery()) <NEW_LINE> if isinstance(renderer, LatexRenderer): <NEW_LINE> <INDENT> renderer.addPackage('tcolorbox') <NEW_LINE> renderer.addPackage('xparse') <NEW_LINE> renderer.addPreamble('\\definecolor{card-frame}{RGB}{0,88,151}') <NEW_LINE> renderer.addPreamble(CARD_LATEX)
Adds commands needed to create image galleries.
6259902021bff66bcd723aea
class GitHub(RequestClient): <NEW_LINE> <INDENT> BASE = "https://api.github.com" <NEW_LINE> REPO = BASE + "/repos/kyb3r/modmail" <NEW_LINE> HEAD = REPO + "/git/refs/heads/master" <NEW_LINE> MERGE_URL = BASE + "/repos/{username}/modmail/merges" <NEW_LINE> FORK_URL = REPO + "/forks" <NEW_LINE> STAR_URL = BASE + "/user/starred/kyb3r/modmail" <NEW_LINE> def __init__(self, bot, access_token: str = "", username: str = "", **kwargs): <NEW_LINE> <INDENT> super().__init__(bot) <NEW_LINE> self.access_token = access_token <NEW_LINE> self.username = username <NEW_LINE> self.avatar_url: str = kwargs.pop("avatar_url", "") <NEW_LINE> self.url: str = kwargs.pop("url", "") <NEW_LINE> if self.access_token: <NEW_LINE> <INDENT> self.headers = {"Authorization": "token " + str(access_token)} <NEW_LINE> <DEDENT> <DEDENT> async def update_repository(self, sha: str = None) -> Optional[dict]: <NEW_LINE> <INDENT> if not self.username: <NEW_LINE> <INDENT> raise commands.CommandInvokeError("Username not found.") <NEW_LINE> <DEDENT> if sha is None: <NEW_LINE> <INDENT> resp: dict = await self.request(self.HEAD) <NEW_LINE> sha = resp["object"]["sha"] <NEW_LINE> <DEDENT> payload = {"base": "master", "head": sha, "commit_message": "Updating bot"} <NEW_LINE> merge_url = self.MERGE_URL.format(username=self.username) <NEW_LINE> resp = await self.request(merge_url, method="POST", payload=payload) <NEW_LINE> if isinstance(resp, dict): <NEW_LINE> <INDENT> return resp <NEW_LINE> <DEDENT> <DEDENT> async def fork_repository(self) -> None: <NEW_LINE> <INDENT> await self.request(self.FORK_URL, method="POST") <NEW_LINE> <DEDENT> async def has_starred(self) -> bool: <NEW_LINE> <INDENT> resp = await self.request(self.STAR_URL, return_response=True) <NEW_LINE> return resp.status == 204 <NEW_LINE> <DEDENT> async def star_repository(self) -> None: <NEW_LINE> <INDENT> await self.request(self.STAR_URL, method="PUT", headers={"Content-Length": "0"}) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> async def login(cls, bot) -> "GitHub": <NEW_LINE> <INDENT> self = cls(bot, bot.config.get("github_access_token")) <NEW_LINE> resp: dict = await self.request("https://api.github.com/user") <NEW_LINE> self.username: str = resp["login"] <NEW_LINE> self.avatar_url: str = resp["avatar_url"] <NEW_LINE> self.url: str = resp["html_url"] <NEW_LINE> logger.info(info(f"GitHub logged in to: {self.username}")) <NEW_LINE> return self
The client for interacting with GitHub API. Parameters ---------- bot : Bot The Modmail bot. access_token : str, optional GitHub's access token. username : str, optional GitHub username. avatar_url : str, optional URL to the avatar in GitHub. url : str, optional URL to the GitHub profile. Attributes ---------- bot : Bot The Modmail bot. access_token : str GitHub's access token. username : str GitHub username. avatar_url : str URL to the avatar in GitHub. url : str URL to the GitHub profile. Class Attributes ---------------- BASE : str GitHub API base URL. REPO : str Modmail repo URL for GitHub API. HEAD : str Modmail HEAD URL for GitHub API. MERGE_URL : str URL for merging upstream to master. FORK_URL : str URL to fork Modmail. STAR_URL : str URL to star Modmail.
625990205166f23b2e24425b
class NoamLR(lr_scheduler._LRScheduler): <NEW_LINE> <INDENT> def __init__(self, optimizer, warmup_steps=4000): <NEW_LINE> <INDENT> self.warmup_steps = warmup_steps <NEW_LINE> super(NoamLR, self).__init__(optimizer) <NEW_LINE> <DEDENT> def scale(self, step): <NEW_LINE> <INDENT> return self.warmup_steps ** 0.5 * min(step ** (-0.5), step * self.warmup_steps ** (-1.5)) <NEW_LINE> <DEDENT> def get_lr(self): <NEW_LINE> <INDENT> last_epoch = max(1, self.last_epoch) <NEW_LINE> scale = self.scale(last_epoch) <NEW_LINE> return [base_lr * scale for base_lr in self.base_lrs]
Noam Learning rate schedule. Increases the learning rate linearly for the first `warmup_steps` training steps, then decreases it proportional to the inverse square root of the step number. ^ / \ / ` / ` / ` / ` / ` / ` / ` / ` / ` Parameters ---------- optimizer : torch.optim.Optimizer Optimiser instance to modify the learning rate of. warmup_steps : int The number of steps to linearly increase the learning rate. Notes ----- If step <= warmup_steps, scale = step / warmup_steps If step > warmup_steps, scale = (warmup_steps ^ 0.5) / (step ^ 0.5)
62599020507cdc57c63a5c2c
class WorkplaceForm(BaseForm): <NEW_LINE> <INDENT> schema = { 'workplaces': { 'required': True, 'empty': False, 'type': 'list', 'schema': { 'required': True, 'empty': False, 'type': 'dict', 'schema': { 'id': { 'type': 'integer', 'required': False }, 'position': { 'type': 'string', 'empty': False, 'required': True }, 'resume_id': { 'type': 'integer', 'empty': False, 'required': True, 'validator': resume_exist }, 'company': { 'type': 'string', 'empty': False, 'required': True }, 'description': { 'type': 'string', 'empty': False, 'required': True }, 'start_date': { 'type': 'string', 'regex': r'^\d{4}-\d{2}-\d{2}$', 'empty': False, 'required': True }, 'end_date': { 'type': 'string', 'regex': r'^\d{4}-\d{2}-\d{2}$', 'required': True } } } } } <NEW_LINE> def submit(self): <NEW_LINE> <INDENT> if not self.is_valid(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> with transaction.atomic(): <NEW_LINE> <INDENT> response_list = [] <NEW_LINE> workplaces = self.params.get('workplaces') <NEW_LINE> for wp in workplaces: <NEW_LINE> <INDENT> company_name = wp.pop('company') <NEW_LINE> company, created = Company.objects.get_or_create( name=company_name) <NEW_LINE> if wp.get('id'): <NEW_LINE> <INDENT> workplace = Workplace.objects.get(id=wp.get('id')) <NEW_LINE> for key, value in wp.items(): <NEW_LINE> <INDENT> setattr(workplace, key, value) <NEW_LINE> <DEDENT> workplace.company = company <NEW_LINE> workplace.save() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> workplace = Workplace.objects.create(**wp, company=company) <NEW_LINE> <DEDENT> response_list.append(workplace) <NEW_LINE> <DEDENT> self.objects = response_list <NEW_LINE> return True
Workplace form class.
62599020ac7a0e7691f73372
class ProviderUiManager: <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self._app = app <NEW_LINE> self._items = {} <NEW_LINE> self.model = ProvidersModel(self._app.library, self._app) <NEW_LINE> <DEDENT> def create_item(self, name, text, symbol='♬ ', desc='', colorful_svg=None): <NEW_LINE> <INDENT> provider = self._app.library.get(name) <NEW_LINE> return ProviderUiItem(name, text, symbol, desc, colorful_svg=colorful_svg, provider=provider) <NEW_LINE> <DEDENT> def get_item(self, name): <NEW_LINE> <INDENT> return self._items.get(name) <NEW_LINE> <DEDENT> def add_item(self, uiitem): <NEW_LINE> <INDENT> name = uiitem.name <NEW_LINE> self.model.assoc(name, uiitem) <NEW_LINE> self._items[name] = uiitem <NEW_LINE> return True <NEW_LINE> <DEDENT> def remove_item(self, uiitem): <NEW_LINE> <INDENT> if isinstance(uiitem, ProviderUiItem): <NEW_LINE> <INDENT> name = uiitem.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = uiitem <NEW_LINE> <DEDENT> if name in self._items: <NEW_LINE> <INDENT> self.model.remove(name) <NEW_LINE> self._items.remove(name) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False
(alpha)
6259902021bff66bcd723aec
class FileIterator(object): <NEW_LINE> <INDENT> chunk_size = 4096 <NEW_LINE> def __init__(self, filename, start, stop): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.fileobj = open(self.filename, 'rb') <NEW_LINE> if start: <NEW_LINE> <INDENT> self.fileobj.seek(start) <NEW_LINE> <DEDENT> if stop is not None: <NEW_LINE> <INDENT> self.length = stop - start <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.length = None <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.length is not None and self.length <= 0: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> chunk = self.fileobj.read(self.chunk_size) <NEW_LINE> if not chunk: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if self.length is not None: <NEW_LINE> <INDENT> self.length -= len(chunk) <NEW_LINE> if self.length < 0: <NEW_LINE> <INDENT> chunk = chunk[:self.length] <NEW_LINE> <DEDENT> <DEDENT> return chunk
Iterate over a file. FileIterable provides a simple file iterator, optionally allowing the user to specify start and end ranges for the file.
625990201d351010ab8f499f
class DataError(Exception): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Data error!' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return str(self)
Base class for all data errors.
625990205e10d32532ce404a
class TimeGPS(TimeFromEpoch): <NEW_LINE> <INDENT> name = 'gps' <NEW_LINE> unit = 1.0 / erfa.DAYSEC <NEW_LINE> epoch_val = '1980-01-06 00:00:19' <NEW_LINE> epoch_val2 = None <NEW_LINE> epoch_scale = 'tai' <NEW_LINE> epoch_format = 'iso'
GPS time: seconds from 1980-01-06 00:00:00 UTC For example, 630720013.0 is midnight on January 1, 2000. Notes ===== This implementation is strictly a representation of the number of seconds (including leap seconds) since midnight UTC on 1980-01-06. GPS can also be considered as a time scale which is ahead of TAI by a fixed offset (to within about 100 nanoseconds). For details, see https://www.usno.navy.mil/USNO/time/gps/usno-gps-time-transfer
625990206e29344779b014da
class Model: <NEW_LINE> <INDENT> def __init__(self, sess, model_path): <NEW_LINE> <INDENT> self.model_path = model_path <NEW_LINE> self.sess = sess <NEW_LINE> self.predictor = None <NEW_LINE> self._load_model() <NEW_LINE> <DEDENT> def _load_model(self): <NEW_LINE> <INDENT> tf.saved_model.loader.load(self.sess, [tf.saved_model.tag_constants.SERVING], self.model_path) <NEW_LINE> self.predictor = tf.contrib.predictor.from_saved_model(self.model_path) <NEW_LINE> <DEDENT> def predict(self, input_values_dict): <NEW_LINE> <INDENT> output = self.predictor(input_values_dict) <NEW_LINE> output_value = self._extract_predict_value(output) <NEW_LINE> return output_value <NEW_LINE> <DEDENT> def _extract_predict_value(self,output): <NEW_LINE> <INDENT> return output['prob']
load tensorflow pd model
625990201d351010ab8f49a0
class HasReferenceCountBase(Rule): <NEW_LINE> <INDENT> labels = [ _('Reference count must be:'), _('Reference count:')] <NEW_LINE> name = 'Objects with a reference count of <count>' <NEW_LINE> description = "Matches objects with a certain reference count" <NEW_LINE> category = _('General filters') <NEW_LINE> def prepare(self, db): <NEW_LINE> <INDENT> if self.list[0] == 'lesser than': <NEW_LINE> <INDENT> self.count_type = 0 <NEW_LINE> <DEDENT> elif self.list[0] == 'greater than': <NEW_LINE> <INDENT> self.count_type = 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.count_type = 1 <NEW_LINE> <DEDENT> self.userSelectedCount = int(self.list[1]) <NEW_LINE> <DEDENT> def apply(self, db, obj): <NEW_LINE> <INDENT> handle = obj.get_handle() <NEW_LINE> count = 0 <NEW_LINE> for item in db.find_backlink_handles(handle): <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> if self.count_type == 0: <NEW_LINE> <INDENT> return count < self.userSelectedCount <NEW_LINE> <DEDENT> elif self.count_type == 2: <NEW_LINE> <INDENT> return count > self.userSelectedCount <NEW_LINE> <DEDENT> return count == self.userSelectedCount
Objects with a reference count of <count>.
6259902056b00c62f0fb374a
@dataclass <NEW_LINE> class Topics: <NEW_LINE> <INDENT> items: List[Topic] <NEW_LINE> def node(self): <NEW_LINE> <INDENT> return create_node('topics', None, self.items, {}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def object(node): <NEW_LINE> <INDENT> return [Topic.object(n) for n in node]
Topics layer class
625990203eb6a72ae038b4f0
class BaseAdministrationTest(arrow.test.BaseTestCase): <NEW_LINE> <INDENT> credentials = ['superadmin'] <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(BaseAdministrationTest, cls).setUpClass() <NEW_LINE> cls.admin_client = cls.os.admin_client <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.admin_client.remove_server(CONF.fss.ip) <NEW_LINE> super(BaseAdministrationTest, cls).tearDownClass() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_server(cls, server_ip=None, server_user=None, server_passwd=None): <NEW_LINE> <INDENT> cls.admin_client.add_server(server_ip,server_user,server_passwd) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove_server(cls, fss_server=None): <NEW_LINE> <INDENT> cls.admin_client.remove_server(fss_server) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_customer(cls, customer_name, domain, admin_pass, retype_pass): <NEW_LINE> <INDENT> cls.admin_client.create_customer(customer_name, domain, admin_pass, retype_pass) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove_customer(cls, customer_name): <NEW_LINE> <INDENT> cls.admin_client.delete_customer(customer_name)
Base test case class for all Physical Resource GUI tests.
62599020a8ecb033258720ab
class Blobs(Game): <NEW_LINE> <INDENT> def __init__(self, board = None): <NEW_LINE> <INDENT> self.initial = GameState(to_move='R', utility=0, board=BlobsBoard(), moves=['L','R','U','D']) <NEW_LINE> <DEDENT> def actions(self, state): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def result(self, state, move): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def utility(self, state, player): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def terminal_test(self, state): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def display(self, state): <NEW_LINE> <INDENT> raise NotImplementedError
Play Blobs on an 6 x 6 board, with Max (first player) playing the red Blobs with marker 'R'. A state has the player to move, a cached utility, a list of moves in the form of the four directions (left 'L', right 'R', up 'U', and down 'D'), and a board, in the form of a BlobsBoard object. Marker is 'R' for the Red Player and 'G' for the Green Player. An empty position appear as '.' in the display and a 'o' represents an out of the board position.
62599020be8e80087fbbff02
class URLToken(FunctionToken): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> super(URLToken, self).__init__(value)
A token representing a URL function Parameters ---------- value : str The value of the url function
62599020d164cc6175821e02
class IdxINDEXField(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'idxINDEX_field' <NEW_LINE> id_idxINDEX = db.Column(db.MediumInteger(9, unsigned=True), db.ForeignKey(IdxINDEX.id), primary_key=True) <NEW_LINE> id_field = db.Column(db.MediumInteger(9, unsigned=True), db.ForeignKey(Field.id), primary_key=True) <NEW_LINE> regexp_punctuation = db.Column(db.String(255), nullable=False, server_default='[.,:;?!"]') <NEW_LINE> regexp_alphanumeric_separators = db.Column(db.String(255), nullable=False) <NEW_LINE> idxINDEX = db.relationship(IdxINDEX, backref='fields', lazy='joined', innerjoin=True) <NEW_LINE> field = db.relationship(Field, backref='idxINDEXes', lazy='joined', innerjoin=True) <NEW_LINE> @classmethod <NEW_LINE> def get_field_tokenizers(cls): <NEW_LINE> <INDENT> return db.session.query(Field.name, IdxINDEX.tokenizer).all()
Represent a IdxINDEXField record.
6259902056b00c62f0fb374c
class DateRangeDialog(BasicDialog): <NEW_LINE> <INDENT> title = _(u'Select a date range') <NEW_LINE> size = (-1, -1) <NEW_LINE> def __init__(self, title=None, header_text=None): <NEW_LINE> <INDENT> title = title or self.title <NEW_LINE> header_text = '<b>%s</b>' % header_text if header_text else '' <NEW_LINE> BasicDialog.__init__(self, title=title, header_text=header_text) <NEW_LINE> self._setup_widgets() <NEW_LINE> <DEDENT> def confirm(self): <NEW_LINE> <INDENT> BasicDialog.confirm(self) <NEW_LINE> state = self.date_filter.get_state() <NEW_LINE> if isinstance(state, DateQueryState): <NEW_LINE> <INDENT> start, end = state.date, state.date <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start, end = state.start, state.end <NEW_LINE> <DEDENT> self.retval = date_range(start=start, end=end) <NEW_LINE> <DEDENT> def _setup_widgets(self): <NEW_LINE> <INDENT> self.date_filter = DateSearchFilter(_(u'Date:')) <NEW_LINE> self.date_filter.clear_options() <NEW_LINE> self.date_filter.add_custom_options() <NEW_LINE> for option in [Today, Yesterday, LastWeek, LastMonth]: <NEW_LINE> <INDENT> self.date_filter.add_option(option) <NEW_LINE> <DEDENT> self.date_filter.select(position=0) <NEW_LINE> self.vbox.pack_start(self.date_filter, False, False) <NEW_LINE> self.date_filter.show_all()
A simple dialog for selecting a date range When confirmed, a :class:`date_range` object will be returned containig the information about the date range selected
625990206fece00bbaccc846
class InviteCodeList(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> inviteCode = db.Column(db.String(50), index=True, nullable=False, unique=True) <NEW_LINE> codestatus = db.Column(db.Boolean, nullable=False) <NEW_LINE> def __init__(self, inviteCode, codestatus = True): <NEW_LINE> <INDENT> self.inviteCode = inviteCode <NEW_LINE> self.codestatus = codestatus
邀请码表
62599020d18da76e235b7894
class TestProductController(BaseTestCase): <NEW_LINE> <INDENT> def test_delete_product(self): <NEW_LINE> <INDENT> response = self.client.open( '/omogollo2/ServerAPI/1.0.0/product/{productId}'.format(product_id=56), method='DELETE') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_get_product(self): <NEW_LINE> <INDENT> response = self.client.open( '/omogollo2/ServerAPI/1.0.0/product/{productId}'.format(product_id=56), method='GET') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_put_product(self): <NEW_LINE> <INDENT> body = Product() <NEW_LINE> response = self.client.open( '/omogollo2/ServerAPI/1.0.0/product/{productId}'.format(product_id=56), method='PUT', data=json.dumps(body), content_type='application/json') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8'))
ProductController integration test stubs
625990201d351010ab8f49a4
class Meta: <NEW_LINE> <INDENT> model = Area <NEW_LINE> fields = ('id', 'name', 'area_type', 'area_type_display', 'northern_extent', 'mpoly')
Class opts.
62599020c432627299fa3e7f
class Player: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = "nameless" <NEW_LINE> self.position = 1 <NEW_LINE> <DEDENT> def setName(self, newval): <NEW_LINE> <INDENT> self.name = newval <NEW_LINE> <DEDENT> def toString(self): <NEW_LINE> <INDENT> info = self.name + "is on square number " + str(self.position) <NEW_LINE> return info
A class to represent a player in a board game
6259902066673b3332c3127b
class DailySummary(Link): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> Link.__init__(self, kwargs.pop('name', 'DailySummary')) <NEW_LINE> self._process_kwargs(kwargs, read_key=None, store_key=None, feature_cols=[], new_date_col='date', datetime_col=None, partitionby_cols=[]) <NEW_LINE> self.check_extra_kwargs(kwargs) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> return StatusCode.Success <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> ds = process_manager.service(DataStore) <NEW_LINE> self.logger.debug('Now executing link: {link}.', link=self.name) <NEW_LINE> def col_name(feat, func): <NEW_LINE> <INDENT> return '_'.join([feat, func.__name__, '0d']) <NEW_LINE> <DEDENT> df = ds[self.read_key] <NEW_LINE> default_agg_funcs = [f.min, f.mean, f.max, f.stddev, f.count, f.sum] <NEW_LINE> if type(self.feature_cols) is list: <NEW_LINE> <INDENT> agg_funcs = default_agg_funcs <NEW_LINE> agged_cols = [func(df[feat]).alias(col_name(feat, func)) for func in agg_funcs for feat in self.feature_cols] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> agged_cols = [] <NEW_LINE> for feat, agg_funcs in self.feature_cols.items(): <NEW_LINE> <INDENT> if not agg_funcs: <NEW_LINE> <INDENT> agg_funcs = default_agg_funcs <NEW_LINE> <DEDENT> agged_cols += [func(df[feat]).alias(col_name(feat, func)) for func in agg_funcs] <NEW_LINE> <DEDENT> <DEDENT> gb_cols = self.partitionby_cols + [self.new_date_col] <NEW_LINE> df_agged = df.withColumn(self.new_date_col, f.to_date(self.datetime_col)) .groupBy(gb_cols) .agg(*agged_cols) <NEW_LINE> ds[self.store_key] = df_agged <NEW_LINE> return StatusCode.Success <NEW_LINE> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> return StatusCode.Success
Creates daily summary information from a timeseries dataframe. Each feature given from the input df will by default correspond to 6 columns in the output: min, mean, max, stddev, count, and sum. The columns are named like 'feature_stddev_0d' (0d since we look 0 days back into the past). The new dataframe will also contain the column `new_date_col` with the date, and all the identifying columns given in `partitionby_cols`.
62599020d164cc6175821e05
class TestUserGetInfo(MediaFireApiTestCaseWithSessionToken): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestUserGetInfo, self).setUp() <NEW_LINE> self.url = self.build_url('user/get_info') <NEW_LINE> <DEDENT> @responses.activate <NEW_LINE> def test_response(self): <NEW_LINE> <INDENT> body = self.load_response('test_user_info_response.json') <NEW_LINE> responses.add(responses.POST, self.url, body=body, status=200, content_type="application/json") <NEW_LINE> response = self.api.user_get_info() <NEW_LINE> self.assertTrue('user_info' in response) <NEW_LINE> self.assertEqual(response['user_info']['first_name'], 'John')
Tests for user/get_info
62599020796e427e5384f60e
class SessionReceiveCountResponse: <NEW_LINE> <INDENT> def __init__(self, name: str, result: bool, message: str, count: int): <NEW_LINE> <INDENT> self.type = "session_receive_count_response" <NEW_LINE> self.name = name <NEW_LINE> self.result = result <NEW_LINE> self.message = message <NEW_LINE> self.count = count <NEW_LINE> return <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return json.dumps({"type": self.type, "name": self.name, "result": self.result, "message": self.message, "count": self.count}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_dict(d): <NEW_LINE> <INDENT> return SessionReceiveCountResponse(d.get("name"), d.get("result"), d.get("message"), d.get("count"))
Return count of server's received messages.
62599020a8ecb033258720af
class Manager: <NEW_LINE> <INDENT> def __init__(self, content, fileOut): <NEW_LINE> <INDENT> self.content = content <NEW_LINE> self.fileOut = open(fileOut, "w") <NEW_LINE> self.parse() <NEW_LINE> self.fileOut.write(self.markup) <NEW_LINE> self.fileOut.close() <NEW_LINE> <DEDENT> def parse(self): <NEW_LINE> <INDENT> self.lines = [Line(line) for line in self.content] <NEW_LINE> preambleStart = 0 <NEW_LINE> nl = len(self.lines) <NEW_LINE> id = zip(range(nl), self.lines) <NEW_LINE> level1 = [i for i, line in id if line.level == 0] <NEW_LINE> preambleEnd = level1[1] <NEW_LINE> preamble = self.lines[0:preambleEnd] <NEW_LINE> self.level1 = level1 <NEW_LINE> preambleMarkup = [] <NEW_LINE> for line in preamble: <NEW_LINE> <INDENT> if line.content.count("@"): <NEW_LINE> <INDENT> tmp = line.content.split("@")[1] <NEW_LINE> tmp = tmp.split() <NEW_LINE> env = tmp[0] <NEW_LINE> content = " ".join(tmp[1:]) <NEW_LINE> mu = "\\%s{%s}" % (env, content) <NEW_LINE> preambleMarkup.append(mu) <NEW_LINE> <DEDENT> <DEDENT> self.preamble = preambleMarkup <NEW_LINE> self.preambleLines = preamble <NEW_LINE> self.documentLines = self.lines[preambleEnd:]
Abstract class for LaTeX document classes
625990201d351010ab8f49a6
class ProductionConfig(Config): <NEW_LINE> <INDENT> DATABASE = '' <NEW_LINE> DEBUG = False <NEW_LINE> TESTING = False <NEW_LINE> ENVIRONMENT = 'production' <NEW_LINE> HOST = '' <NEW_LINE> USER = '' <NEW_LINE> PASSWORD = ''
Configurations for Production.
62599020507cdc57c63a5c36
class NotASpecDataFile(Exception): <NEW_LINE> <INDENT> pass
content of file is not SPEC data (first line must start with ``#F``)
62599020d164cc6175821e09
class Perceptron(object): <NEW_LINE> <INDENT> def __init__(self, eta=0.01, n_iter=10): <NEW_LINE> <INDENT> self.eta = eta <NEW_LINE> self.n_iter = n_iter <NEW_LINE> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> self.w_ = np.zeros(1 + X.shape[1]) <NEW_LINE> self.errors_ = [] <NEW_LINE> for _ in range(self.n_iter): <NEW_LINE> <INDENT> errors = 0 <NEW_LINE> for xi, target in zip(X, y): <NEW_LINE> <INDENT> update = self.eta * (target - self.predict(xi)) <NEW_LINE> self.w_[1:] += update * xi <NEW_LINE> self.w_[0] += update <NEW_LINE> errors += int(update != 0.0) <NEW_LINE> <DEDENT> self.errors_.append(errors) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def net_input(self, X): <NEW_LINE> <INDENT> return np.dot(X, self.w_[1:]) + self.w_[0] <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> return np.where(self.net_input(X) >= 0.0, 1, -1)
Perceptron classifier. Parameters ------------ eta : float Learning rate (between 0.0 and 1.0) n_iter : int Passes over the training dataset Attributes ----------- w_ : 1d-array Weights after fitting errors_ : list Number of misclassifications in every epoch
62599020287bf620b6272a7f
class ParseWith(ParseLilyPond): <NEW_LINE> <INDENT> items = ( CloseBracket, ContextName, GrobName, ContextProperty, EqualSign, DotPath, ) + toplevel_base_items
Parses the expression after ``\with {``, leaving at ``}``
62599020507cdc57c63a5c38
class MultiLine(Unicode): <NEW_LINE> <INDENT> __type_name__ = None <NEW_LINE> class Attributes(Unicode.Attributes): <NEW_LINE> <INDENT> dim = None <NEW_LINE> <DEDENT> def __new__(cls, dim=None, **kwargs): <NEW_LINE> <INDENT> assert dim in (None,2,3) <NEW_LINE> if dim is not None: <NEW_LINE> <INDENT> kwargs['dim'] = dim <NEW_LINE> kwargs['pattern'] = _get_multilinestring_pattern(dim) <NEW_LINE> kwargs['type_name'] = 'multiLine%dd' % dim <NEW_LINE> <DEDENT> retval = SimpleModel.__new__(cls, **kwargs) <NEW_LINE> retval.__namespace__ = 'http://spyne.io/schema' <NEW_LINE> return retval
A Multipolygon type whose native format is a WKT string. You can use :func:`shapely.wkt.loads` to get a proper multipolygon type.
625990203eb6a72ae038b4f8
class OrgTeacherView(View): <NEW_LINE> <INDENT> def get(self, request, org_id): <NEW_LINE> <INDENT> current_page = 'teacher' <NEW_LINE> course_org = CourseOrg.objects.get(id=int(org_id)) <NEW_LINE> all_teachers = course_org.teacher_set.all() <NEW_LINE> has_fav = False <NEW_LINE> if request.user.is_authenticated(): <NEW_LINE> <INDENT> if UserFavorite.objects.filter(user=request.user, fav_id=course_org.id, fav_type=2): <NEW_LINE> <INDENT> has_fav = True <NEW_LINE> <DEDENT> <DEDENT> return render(request, 'org-detail-teachers.html', { 'all_teachers': all_teachers, 'course_org': course_org, 'current_page': current_page, 'has_fav': has_fav })
机构教师
62599020ac7a0e7691f7337e
class AllocateShareBandwidthResponseSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = { "ShareBandwidthId": fields.Str( required=False, load_from="ShareBandwidthId" ), }
AllocateShareBandwidth - 开通共享带宽
62599020796e427e5384f612
class DuplicateZoneError(Exception): <NEW_LINE> <INDENT> pass
Duplicate zone error. Thrown whenever an attempt to create a zone fails because a zone with the same domain already exists.
62599020a8ecb033258720b3
class CreateUser(graphene.Mutation): <NEW_LINE> <INDENT> class Arguments: <NEW_LINE> <INDENT> email = graphene.String(required=True) <NEW_LINE> location = graphene.String(required=False) <NEW_LINE> name = graphene.String(required=True) <NEW_LINE> picture = graphene.String() <NEW_LINE> <DEDENT> user = graphene.Field(User) <NEW_LINE> def mutate(self, info, **kwargs): <NEW_LINE> <INDENT> user = UserModel(**kwargs) <NEW_LINE> if not verify_email(user.email): <NEW_LINE> <INDENT> raise GraphQLError("This email is not allowed") <NEW_LINE> <DEDENT> payload = { 'model': UserModel, 'field': 'email', 'value': kwargs['email'] } <NEW_LINE> with SaveContextManager(user, 'User email', payload): <NEW_LINE> <INDENT> notification_settings = NotificationModel(user_id=user.id) <NEW_LINE> notification_settings.save() <NEW_LINE> return CreateUser(user=user)
Mutation to create a user
62599020462c4b4f79dbc89e
class Table(object): <NEW_LINE> <INDENT> def __init__(self, seats, sb_amount, bb_amount, buy_in, ante=0): <NEW_LINE> <INDENT> self.seats = {} <NEW_LINE> self._build_seats(seats) <NEW_LINE> self.sb_amount = sb_amount <NEW_LINE> self.bb_amount = bb_amount <NEW_LINE> self.ante = ante <NEW_LINE> self.buy_in = buy_in <NEW_LINE> self.deck = [] <NEW_LINE> self.pots = [] <NEW_LINE> self.last_order = [] <NEW_LINE> self.player_order = [] <NEW_LINE> self.community_cards = [] <NEW_LINE> self.bet_increment = bb_amount <NEW_LINE> self.current_bet = 0 <NEW_LINE> <DEDENT> def _build_seats(self, seats): <NEW_LINE> <INDENT> if seats not in range(2, 10): <NEW_LINE> <INDENT> raise ValueError("Tables must have between 2 and 9 players.") <NEW_LINE> <DEDENT> for i in range(1, seats + 1): <NEW_LINE> <INDENT> self.seats[i] = None <NEW_LINE> <DEDENT> <DEDENT> def join(self, key, player, stack): <NEW_LINE> <INDENT> if player in self.seats.values(): <NEW_LINE> <INDENT> raise ValueError("You can't join the same game twice") <NEW_LINE> <DEDENT> min_buy = self.buy_in[0] <NEW_LINE> max_buy = self.buy_in[1] + 1 <NEW_LINE> if stack not in range(min_buy, max_buy): <NEW_LINE> <INDENT> raise ValueError('Buy in must be between {} and {}.'.format(min_buy, self.buy_in[1])) <NEW_LINE> <DEDENT> if self.seats[key] == None: <NEW_LINE> <INDENT> self.seats[key] = player <NEW_LINE> self.seats[key].stack = stack <NEW_LINE> self.seats[key].table = self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Don't be rude, this seat is taken.") <NEW_LINE> <DEDENT> <DEDENT> def quit(self, player): <NEW_LINE> <INDENT> player.table = None <NEW_LINE> player.stack = 0 <NEW_LINE> for k, v in self.seats.items(): <NEW_LINE> <INDENT> if v == player: <NEW_LINE> <INDENT> self.seats[k] = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def change_seat(self, player, ind): <NEW_LINE> <INDENT> if ind >= len(self.seats): <NEW_LINE> <INDENT> raise Exception("This seat doesn't exist!!") <NEW_LINE> <DEDENT> if self.seats[ind] is not None: <NEW_LINE> <INDENT> raise Exception("This seat is occupied!!") <NEW_LINE> <DEDENT> self.quit(player) <NEW_LINE> self.seats[ind] = player
Table holds the intermediate data needed for the app to drive the game ATTRIBUTES: @property {dict} seats A dict of Player obj representing seat order @property {int} sb_amount The small blind amount @property {int} bb_amount The big blind amount @property {int} ante The ante amount @property {list} buy_in A list of two values, min/max buy in @property {list} deck A list of strings representing cards @property {list} pots A list of pot objects @property {list} last_order A list of the last betting order @property {list} player_order A list of the current betting order sb first @property {list} community_cards A list containing card names @property {int} bet_increment The minimum amount the bet can be raised @property {int} current_bet Amount to call METHODS: @method _build_seats Creates the seats dict from seats num @method join Adds player object to any empty seat in seats dict @method quit Removes player from the seats dict @method change_seat Changes a players seat appropriately
625990201d351010ab8f49aa
class TrueFalseQuestion(Persistable): <NEW_LINE> <INDENT> def __init__(self, idevice, question="", isCorrect=False, feedback="", hint=""): <NEW_LINE> <INDENT> self.idevice = idevice <NEW_LINE> self.questionTextArea = TextAreaField(x_(u'Question:'), self.idevice.questionInstruc, question) <NEW_LINE> self.questionTextArea.idevice = idevice <NEW_LINE> self.isCorrect = isCorrect <NEW_LINE> self.feedbackTextArea = TextAreaField(x_(u'Feedback'), self.idevice.feedbackInstruc, feedback) <NEW_LINE> self.feedbackTextArea.idevice = idevice <NEW_LINE> self.hintTextArea = TextAreaField(x_(u'Hint'), self.idevice.hintInstruc, hint) <NEW_LINE> self.hintTextArea.idevice = idevice <NEW_LINE> <DEDENT> def getResourcesField(self, this_resource): <NEW_LINE> <INDENT> if hasattr(self, 'questionTextArea') and hasattr(self.questionTextArea, 'images'): <NEW_LINE> <INDENT> for this_image in self.questionTextArea.images: <NEW_LINE> <INDENT> if hasattr(this_image, '_imageResource') and this_resource == this_image._imageResource: <NEW_LINE> <INDENT> return self.questionTextArea <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if hasattr(self, 'feedbackTextArea') and hasattr(self.feedbackTextArea, 'images'): <NEW_LINE> <INDENT> for this_image in self.feedbackTextArea.images: <NEW_LINE> <INDENT> if hasattr(this_image, '_imageResource') and this_resource == this_image._imageResource: <NEW_LINE> <INDENT> return self.feedbackTextArea <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if hasattr(self, 'hintTextArea') and hasattr(self.hintTextArea, 'images'): <NEW_LINE> <INDENT> for this_image in self.hintTextArea.images: <NEW_LINE> <INDENT> if hasattr(this_image, '_imageResource') and this_resource == this_image._imageResource: <NEW_LINE> <INDENT> return self.hintTextArea <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def getRichTextFields(self): <NEW_LINE> <INDENT> fields_list = [] <NEW_LINE> if hasattr(self, 'questionTextArea'): <NEW_LINE> <INDENT> fields_list.append(self.questionTextArea) <NEW_LINE> <DEDENT> if hasattr(self, 'feedbackTextArea'): <NEW_LINE> <INDENT> fields_list.append(self.feedbackTextArea) <NEW_LINE> <DEDENT> if hasattr(self, 'hintTextArea'): <NEW_LINE> <INDENT> fields_list.append(self.hintTextArea) <NEW_LINE> <DEDENT> return fields_list <NEW_LINE> <DEDENT> def upgrade_setIdevice(self, idevice): <NEW_LINE> <INDENT> self.idevice = idevice <NEW_LINE> self.questionTextArea = TextAreaField(x_(u'Question:'), self.idevice.questionInstruc, self.question) <NEW_LINE> self.questionTextArea.idevice = self.idevice <NEW_LINE> self.feedbackTextArea = TextAreaField(x_(u'Feedback'), self.idevice.feedbackInstruc, self.feedback) <NEW_LINE> self.feedbackTextArea.idevice = self.idevice <NEW_LINE> self.hintTextArea = TextAreaField(x_(u'Hint'), self.idevice.hintInstruc, self.hint) <NEW_LINE> self.hintTextArea.idevice = self.idevice
A TrueFalse iDevice is built up of questions. Each question can be rendered as an XHTML element
62599020bf627c535bcb2349
class Returns(CustomFactor): <NEW_LINE> <INDENT> inputs = [USEquityPricing.close] <NEW_LINE> def compute(self, today, assets, out, close): <NEW_LINE> <INDENT> out[:] = (close[-1] - close[0]) / close[0]
Calculates the percent change in close price over the given window_length. **Default Inputs**: [USEquityPricing.close]
6259902021bff66bcd723af8
class Ogr2OgrExecOutput(ExecOutput): <NEW_LINE> <INDENT> @Config(ptype=str, default=None, required=True) <NEW_LINE> def dest_data_source(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def dest_format(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def lco(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def spatial_extent(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def gfs_template(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=str, default=None, required=False) <NEW_LINE> def options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @Config(ptype=bool, default=False, required=False) <NEW_LINE> def cleanup_input(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, configdict, section): <NEW_LINE> <INDENT> ExecOutput.__init__(self, configdict, section, consumes=FORMAT.string) <NEW_LINE> self.ogr2ogr_cmd = 'ogr2ogr -f ' + self.dest_format + ' ' + self.dest_data_source <NEW_LINE> if self.spatial_extent: <NEW_LINE> <INDENT> self.ogr2ogr_cmd += ' -spat ' + self.spatial_extent <NEW_LINE> <DEDENT> if self.options: <NEW_LINE> <INDENT> self.ogr2ogr_cmd += ' ' + self.options <NEW_LINE> <DEDENT> self.first_run = True <NEW_LINE> <DEDENT> def write(self, packet): <NEW_LINE> <INDENT> if packet.data is None: <NEW_LINE> <INDENT> return packet <NEW_LINE> <DEDENT> ogr2ogr_cmd = self.ogr2ogr_cmd <NEW_LINE> if self.lco and self.first_run is True: <NEW_LINE> <INDENT> ogr2ogr_cmd += ' ' + self.lco <NEW_LINE> self.first_run = False <NEW_LINE> <DEDENT> if type(packet.data) is list: <NEW_LINE> <INDENT> for item in packet.data: <NEW_LINE> <INDENT> self.execute(ogr2ogr_cmd, item) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.execute(ogr2ogr_cmd, packet.data) <NEW_LINE> <DEDENT> return packet <NEW_LINE> <DEDENT> def execute(self, ogr2ogr_cmd, file_path): <NEW_LINE> <INDENT> file_ext = os.path.splitext(file_path) <NEW_LINE> gfs_path = file_ext[0] + '.gfs' <NEW_LINE> if self.gfs_template: <NEW_LINE> <INDENT> shutil.copy(self.gfs_template, gfs_path) <NEW_LINE> <DEDENT> self.execute_cmd(ogr2ogr_cmd + ' ' + file_path) <NEW_LINE> if self.cleanup_input: <NEW_LINE> <INDENT> os.remove(file_path) <NEW_LINE> if gfs_path and os.path.exists(gfs_path): <NEW_LINE> <INDENT> os.remove(gfs_path)
Executes an Ogr2Ogr command. Input is a file name to be processed. Output by calling Ogr2Ogr command. consumes=FORMAT.string
625990208c3a8732951f73ed
class Dish(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'dish' <NEW_LINE> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> name = db.Column(db.String, nullable=False, unique=True) <NEW_LINE> description = db.Column(db.String, nullable=False, unique=True) <NEW_LINE> portion_count = db.Column(db.Integer, CheckConstraint('portion_count>0'), nullable=False) <NEW_LINE> type_of_dish = db.Column(db.Enum(TypesOfDish), nullable=False) <NEW_LINE> recipes = db.relationship('Recipe', backref='dish') <NEW_LINE> ingredients = db.relationship('Ingredient', secondary=DISH_AND_INGREDIENT, backref=db.backref('dishes', lazy='dynamic')) <NEW_LINE> def to_dict(self) -> dict: <NEW_LINE> <INDENT> dish_data = { 'id': self.id, 'name': self.name, 'description': self.description, 'portion_count': self.portion_count, 'type_of_dish': self.type_of_dish } <NEW_LINE> return dish_data <NEW_LINE> <DEDENT> def from_dict(self, data: dict): <NEW_LINE> <INDENT> for field in ('name', 'description', 'portion_count', 'type_of_dish'): <NEW_LINE> <INDENT> setattr(self, field, data.get(field)) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, description=None, name=None, portion_count=None, type_of_dish=None): <NEW_LINE> <INDENT> self.description = description <NEW_LINE> self.name = name <NEW_LINE> self.portion_count = portion_count <NEW_LINE> self.type_of_dish = type_of_dish <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"Dish({self.id}, {self.name}, {self.description}, {self.portion_count}, {self.type_of_dish})"
Табличка блюда
62599020ac7a0e7691f73380
class Singleton: <NEW_LINE> <INDENT> def __init__(self, decorated): <NEW_LINE> <INDENT> self._decorated = decorated <NEW_LINE> <DEDENT> def Instance(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._instance <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._instance = self._decorated() <NEW_LINE> return self._instance <NEW_LINE> <DEDENT> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> raise TypeError('Singletons must be accessed through `Instance()`.') <NEW_LINE> <DEDENT> def __instancecheck__(self, inst): <NEW_LINE> <INDENT> return isinstance(inst, self._decorated)
A non-thread-safe helper class to ease implementing singletons. This should be used as a decorator -- not a metaclass -- to the class that should be a singleton. The decorated class can define one `__init__` function that takes only the `self` argument. Other than that, there are no restrictions that apply to the decorated class. To get the singleton instance, use the `Instance` method. Trying to use `__call__` will result in a `TypeError` being raised. Limitations: The decorated class cannot be inherited from. Source: http://stackoverflow.com/questions/31875/is-there-a-simple-elegant-way-to-define-singletons-in-python
625990206e29344779b014e6
class WebSocketClientDakara(WebSocketClient): <NEW_LINE> <INDENT> def set_default_callbacks(self): <NEW_LINE> <INDENT> self.set_callback("idle", lambda: None) <NEW_LINE> self.set_callback("playlist_entry", lambda playlist_entry: None) <NEW_LINE> self.set_callback("command", lambda command: None) <NEW_LINE> self.set_callback("connection_lost", lambda: None) <NEW_LINE> <DEDENT> def on_connected(self): <NEW_LINE> <INDENT> self.send_ready() <NEW_LINE> <DEDENT> def on_connection_lost(self): <NEW_LINE> <INDENT> self.callbacks["connection_lost"]() <NEW_LINE> <DEDENT> def receive_idle(self, content): <NEW_LINE> <INDENT> logger.debug("Received idle order") <NEW_LINE> self.callbacks["idle"]() <NEW_LINE> <DEDENT> def receive_playlist_entry(self, content): <NEW_LINE> <INDENT> logger.debug("Received new playlist entry %i order", content["id"]) <NEW_LINE> self.callbacks["playlist_entry"](content) <NEW_LINE> <DEDENT> def receive_command(self, content): <NEW_LINE> <INDENT> command = content["command"] <NEW_LINE> logger.debug("Received command %s order", command) <NEW_LINE> self.callbacks["command"](command) <NEW_LINE> <DEDENT> def send_ready(self): <NEW_LINE> <INDENT> logger.debug("Telling the server that the player is ready") <NEW_LINE> self.send("ready")
WebSocket client connected to the Dakara server. Example of use: >>> config = { ... "address": "www.example.com", ... "port": 8080, ... "login": "player", ... "password": "pass" ... } >>> http_client = HTTPClientDakara( ... config, ... enpoint_prefix="api/", ... ) >>> http_client.authenticate() >>> header = http_client.get_token_header() >>> from thread import Event >>> from queue import Queue >>> stop = Event() >>> errors = Queue() >>> ws_client = WebSocketClientDakara( ... stop, ... errors, ... config, ... enpoint="ws/playlist", ... header=header ... ) >>> ws_client.run() Args: stop (Event): Event to stop the program. errors (Queue): Queue of errors. config (dict): Configuration for the server, the same as HTTPClientDakara. endpoint (str): Enpoint of the WebSocket connection, added to the URL. header (dict): Header containing the authentication token.
62599020796e427e5384f614
class Cloud(object): <NEW_LINE> <INDENT> def __init__(self, name, endpoints=None, suffixes=None, is_active=False): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.endpoints = endpoints or CloudEndpoints() <NEW_LINE> self.suffixes = suffixes or CloudSuffixes() <NEW_LINE> self.is_active = is_active
Represents an Azure Cloud instance
62599020462c4b4f79dbc8a0
class DeferredDelete(extensions.V3APIExtensionBase): <NEW_LINE> <INDENT> name = "DeferredDelete" <NEW_LINE> alias = "os-deferred-delete" <NEW_LINE> namespace = ("http://docs.openstack.org/compute/ext/" "deferred-delete/api/v3") <NEW_LINE> version = 1 <NEW_LINE> def get_controller_extensions(self): <NEW_LINE> <INDENT> controller = DeferredDeleteController() <NEW_LINE> extension = extensions.ControllerExtension(self, 'servers', controller) <NEW_LINE> return [extension] <NEW_LINE> <DEDENT> def get_resources(self): <NEW_LINE> <INDENT> return []
Instance deferred delete.
625990201d351010ab8f49ac
class Super(Term): <NEW_LINE> <INDENT> def __init__(self, latex, index): <NEW_LINE> <INDENT> self.__latex = latex <NEW_LINE> self.__index = index <NEW_LINE> <DEDENT> def latex(self): <NEW_LINE> <INDENT> return self.__latex + "^{{ {0} }}".format(self.__index) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return Super(self.__latex, self.__index)
Super is a term with a four-vector superscript.
62599020c432627299fa3e87
class ProjectsFeedAdapter(BaseFeedAdapter): <NEW_LINE> <INDENT> implements(IFeedData) <NEW_LINE> adapts(IAddProject) <NEW_LINE> @property <NEW_LINE> def items(self, n_items=10): <NEW_LINE> <INDENT> if hasattr(self,'_items'): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> cat = getToolByName(self.context, 'portal_catalog') <NEW_LINE> for brain in cat(portal_type='OpenProject', sort_on='created', sort_order='descending', sort_limit=n_items): <NEW_LINE> <INDENT> title = brain.Title <NEW_LINE> description = brain.Description <NEW_LINE> link = brain.getURL() <NEW_LINE> author = brain.lastModifiedAuthor <NEW_LINE> pubDate = brain.created <NEW_LINE> self.add_item(title=title, description=description, link=link, author=author, pubDate=pubDate) <NEW_LINE> <DEDENT> return self._items
feed for new projects
625990211d351010ab8f49ad
class ScatterPlotNode(BaseNode): <NEW_LINE> <INDENT> figure_number = 0 <NEW_LINE> def __init__(self, plot_ms, channels = None, **kwargs): <NEW_LINE> <INDENT> super(ScatterPlotNode, self).__init__(**kwargs) <NEW_LINE> self.set_permanent_attributes( plot_ms = plot_ms, channels = channels, number_of_channels = None, colors = set(["r", "b"]), class_colors = dict()) <NEW_LINE> self.figure_number = ScatterPlotNode.figure_number <NEW_LINE> ScatterPlotNode.figure_number += 1 <NEW_LINE> pylab.ion() <NEW_LINE> figure = pylab.figure(self.figure_number, figsize=(21, 11)) <NEW_LINE> figure.subplots_adjust(left=0.01, bottom=0.01, right=0.99, top= 0.99, wspace=0.2, hspace=0.2) <NEW_LINE> pylab.draw() <NEW_LINE> <DEDENT> def is_trainable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_supervised(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def _train(self, data, label): <NEW_LINE> <INDENT> if self.number_of_channels == None: <NEW_LINE> <INDENT> if self.channels == None: <NEW_LINE> <INDENT> self.channels = data.channel_names[:7] <NEW_LINE> <DEDENT> elif len(self.channels) > 7: <NEW_LINE> <INDENT> self.channels = self.channels[:7] <NEW_LINE> <DEDENT> self.number_of_channels = len(self.channels) <NEW_LINE> self.plot_index = data.shape[0] * self.plot_ms / (data.end_time - data.start_time) <NEW_LINE> <DEDENT> if label not in self.class_colors.keys(): <NEW_LINE> <INDENT> self.class_colors[label] = self.colors.pop() <NEW_LINE> <DEDENT> pylab.ioff() <NEW_LINE> pylab.figure(self.figure_number) <NEW_LINE> for index1, channel_name1 in enumerate(self.channels): <NEW_LINE> <INDENT> for index2, channel_name2 in enumerate(self.channels): <NEW_LINE> <INDENT> channel_index1 = data.channel_names.index(channel_name1) <NEW_LINE> channel_index2 = data.channel_names.index(channel_name2) <NEW_LINE> pylab.subplot(self.number_of_channels, self.number_of_channels, index1 * self.number_of_channels + index2 + 1) <NEW_LINE> pylab.text(0.1, 0.9, "%s vs. %s" % (channel_name1, channel_name2), horizontalalignment='center', verticalalignment='center', transform = pylab.gca().transAxes) <NEW_LINE> if index1 == self.number_of_channels and index2 == self.number_of_channels: <NEW_LINE> <INDENT> pylab.ion() <NEW_LINE> <DEDENT> pylab.plot([data[self.plot_index, channel_index1]], [data[self.plot_index, channel_index2]], self.class_colors[label] + "o") <NEW_LINE> <DEDENT> <DEDENT> pylab.draw() <NEW_LINE> <DEDENT> def _stop_training(self, debug=False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _execute(self, data): <NEW_LINE> <INDENT> return data
Creates a scatter plot of the given channels for the given point in time This node creates scatter_plot of the values of all vs. all specified channels for the given point in time (plot_ms). Parameters * *plot_ms* : The point of time, for which the scatter plots are drawn. For instance, if plot_ms = 200, all the values of the selected channels are collected that were measured 200ms after the window start and the scatter plots for these values are drawn * *channels* : If channels is not None, only scatter plots for these specified channels are plotted. If channels is not specified, scatter plots for the first 7 available channels are drawn. .. note:: The maximal number of channels has to be less than 8 since more than a 7*7 matrix of plots is hard to get plotted into one window. .. image:: ../../graphics/scatter_plot.png :width: 1024
62599021287bf620b6272a84
class NegativeMeanDistance: <NEW_LINE> <INDENT> def __init__(self, nneighbours=None, metric="euclidean"): <NEW_LINE> <INDENT> self.metric = metric <NEW_LINE> self.nneighbours = nneighbours <NEW_LINE> <DEDENT> def fit(self, X): <NEW_LINE> <INDENT> self.X = X <NEW_LINE> if self.nneighbours is not None: <NEW_LINE> <INDENT> if self.nneighbours < 1 or self.nneighbours > len(self.X): <NEW_LINE> <INDENT> raise ValueError("Invalid value for nneighbours") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def score(self, X, y=None): <NEW_LINE> <INDENT> return np.sum(self.score_samples(X)) <NEW_LINE> <DEDENT> def score_samples(self, X): <NEW_LINE> <INDENT> dists = scipy.spatial.distance.cdist(X, self.X, metric=self.metric) <NEW_LINE> if self.nneighbours: <NEW_LINE> <INDENT> dists.sort() <NEW_LINE> return -np.mean(dists[:,:self.nneighbours], axis=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -np.mean(dists, axis=1)
A helper class which behaves like KDE, but models "density" as negative mean distance. Distance behaves slightly differently to a kernel: a so-called linear kernel is only linear within the bandwidth, but goes non-linearly to zero outside. We use negative distance to preserve the sense, ie lower numbers are more anomalous, because a kernel is a similarity while a distance is a dissimilarity. We also allow user to set nneighbours, so we take NMD of these nearest neighbours only. This can help avoid an exaggerated effect of outliers. TODO: the threshold calculated with nneighbours in use may be wrong, because every point has a 0 as the first distance (distance to itself).
62599021925a0f43d25e8edc
class OutOfRange(PlaidMLError): <NEW_LINE> <INDENT> pass
A call parameter is out of the range accepted by the implementation.
625990213eb6a72ae038b4fe
class BaseModel(object): <NEW_LINE> <INDENT> def __init__(self, database): <NEW_LINE> <INDENT> self.__context = app.app_context() <NEW_LINE> self.__collection = None <NEW_LINE> self.structure = None <NEW_LINE> self.database = database <NEW_LINE> <DEDENT> @property <NEW_LINE> def collection(self): <NEW_LINE> <INDENT> if not self.__collection: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> return self.__collection <NEW_LINE> <DEDENT> @collection.setter <NEW_LINE> def collection(self, value): <NEW_LINE> <INDENT> with self.__context: <NEW_LINE> <INDENT> self.__collection = self.database.db[value]
Base model for mongodb collections. This is the structure to continue other models.
62599021462c4b4f79dbc8a4
class List(base_classes.ZonalLister): <NEW_LINE> <INDENT> @property <NEW_LINE> def service(self): <NEW_LINE> <INDENT> return self.context['compute'].disks <NEW_LINE> <DEDENT> @property <NEW_LINE> def resource_type(self): <NEW_LINE> <INDENT> return 'disks'
List Google Compute Engine persistent disks.
62599021c432627299fa3e8b
class Action(UnrecognizedHappening): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> assert 'agent' in dir(self)
A direct action taken by a player. Attributes ---------- agent : :class:`~aionationstates.Nation` The player who took the action.
62599021d164cc6175821e11
class MygeneSourceHandler(MetadataSourceHandler): <NEW_LINE> <INDENT> def extras(self, _meta): <NEW_LINE> <INDENT> _meta['taxonomy'] = {} <NEW_LINE> _meta['genome_assembly'] = {} <NEW_LINE> for s, d in self.biothings.config.TAXONOMY.items(): <NEW_LINE> <INDENT> if 'tax_id' in d: <NEW_LINE> <INDENT> _meta['taxonomy'][s] = int(d['tax_id']) <NEW_LINE> <DEDENT> if 'assembly' in d: <NEW_LINE> <INDENT> _meta['genome_assembly'][s] = d['assembly'] <NEW_LINE> <DEDENT> <DEDENT> return _meta
GET /metadata GET /v3/metadata { "biothing_type": "gene", "build_date": "2020-03-29T04:00:00.012426", "build_version": "20200329", "genome_assembly": { "human": "hg38", "mouse": "mm10", ... }, "src": { ... }, // 28 items "stats": { "total": 36232158, "total_genes": 36232158, "total_entrez_genes": 27119488, "total_ensembl_genes": 38915576, "total_ensembl_genes_mapped_to_entrez": 5954466, "total_ensembl_only_genes": 9112670, "total_species": 28605 }, "taxonomy": { "human": 9606, "mouse": 10090, ... } }
62599021925a0f43d25e8ee0
class Line(object): <NEW_LINE> <INDENT> def __init__(self, p1, p2): <NEW_LINE> <INDENT> if p1 == p2: <NEW_LINE> <INDENT> raise ValueError("Line needs two distinct points") <NEW_LINE> <DEDENT> self.p1 = p1 <NEW_LINE> self.p2 = p2 <NEW_LINE> self.vector = p2 - p1 <NEW_LINE> self.length = self.p1.distance(self.p2) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"Line({self.p1}, {self.p2})" <NEW_LINE> <DEDENT> def intersect_line(self, other): <NEW_LINE> <INDENT> a1 = self.p1.y - self.p2.y <NEW_LINE> b1 = self.p2.x - self.p1.x <NEW_LINE> c1 = self.p1.x * self.p2.y - self.p2.x * self.p1.y <NEW_LINE> a2 = other.p1.y - other.p2.y <NEW_LINE> b2 = other.p2.x - other.p1.x <NEW_LINE> c2 = other.p1.x * other.p2.y - other.p2.x * other.p1.y <NEW_LINE> den = a1 * b2 - a2 * b1 <NEW_LINE> if abs(den) == 0.0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> y = (c1 * a2 - c2 * a1) / den <NEW_LINE> x = (b1 * c2 - c1 * b2) / den <NEW_LINE> return Point(x, y) <NEW_LINE> <DEDENT> def inclusive_intersect_line(self, other): <NEW_LINE> <INDENT> p = self.intersect_line(other) <NEW_LINE> if p is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if self.point_on_line_segment(p) and other.point_on_line_segment(p): <NEW_LINE> <INDENT> return [p] <NEW_LINE> <DEDENT> return [] <NEW_LINE> <DEDENT> def point_on_line_segment(self, point): <NEW_LINE> <INDENT> point_vector = point - self.p1 <NEW_LINE> ip = point_vector.x * self.vector.x + point_vector.y * self.vector.y <NEW_LINE> comp = ip / self.length <NEW_LINE> if 0 <= comp <= self.length: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def distance_point(self, point): <NEW_LINE> <INDENT> numerator = math.fabs( (self.p2.x - self.p1.x) * (self.p1.y - point.y) - (self.p1.x - point.x) * (self.p2.y - self.p1.y) ) <NEW_LINE> denominator = math.sqrt( (self.p2.x - self.p1.x) ** 2 + (self.p2.y - self.p1.y) ** 2 ) <NEW_LINE> return numerator / denominator <NEW_LINE> <DEDENT> @property <NEW_LINE> def angle(self): <NEW_LINE> <INDENT> d = self.p2 - self.p1 <NEW_LINE> return math.degrees(math.atan2(d.y, d.x)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> return f"{self.p1.coordinates}--{self.p2.coordinates}" <NEW_LINE> <DEDENT> @property <NEW_LINE> def reverse_path(self): <NEW_LINE> <INDENT> return f"{self.p2.coordinates}--{self.p1.coordinates}" <NEW_LINE> <DEDENT> def reverse(self): <NEW_LINE> <INDENT> return Line(self.p2, self.p1)
A straight line in 2D between two points. Args: p1 (skymap.geometry.Point): the first point p2 (skymap.geometry.Point): the second point
625990215166f23b2e24426f
@ddt.ddt <NEW_LINE> class VolumeTypePolicyTest(base.BasePolicyTest): <NEW_LINE> <INDENT> authorized_readers = [ 'legacy_admin', 'legacy_owner', 'system_admin', 'project_admin', 'project_member', 'project_reader', 'project_foo', 'system_member', 'system_reader', 'system_foo', 'other_project_member', 'other_project_reader', ] <NEW_LINE> unauthorized_readers = [] <NEW_LINE> unauthorized_exceptions = [] <NEW_LINE> def setUp(self, enforce_scope=False, enforce_new_defaults=False, *args, **kwargs): <NEW_LINE> <INDENT> super().setUp(enforce_scope, enforce_new_defaults, *args, **kwargs) <NEW_LINE> self.controller = types.VolumeTypesController() <NEW_LINE> self.api_path = '/v3/%s/types' % (self.project_id) <NEW_LINE> self.api_version = mv.BASE_VERSION <NEW_LINE> <DEDENT> @ddt.data(*base.all_users) <NEW_LINE> def test_type_get_all_policy(self, user_id): <NEW_LINE> <INDENT> rule_name = type_policy.GET_ALL_POLICY <NEW_LINE> url = self.api_path <NEW_LINE> req = fake_api.HTTPRequest.blank(url, version=self.api_version) <NEW_LINE> self.common_policy_check(user_id, self.authorized_readers, self.unauthorized_readers, self.unauthorized_exceptions, rule_name, self.controller.index, req) <NEW_LINE> <DEDENT> @ddt.data(*base.all_users) <NEW_LINE> def test_type_get_policy(self, user_id): <NEW_LINE> <INDENT> vol_type = test_utils.create_volume_type(self.project_admin_context, testcase_instance=self, name='fake_vol_type') <NEW_LINE> rule_name = type_policy.GET_POLICY <NEW_LINE> url = '%s/%s' % (self.api_path, vol_type.id) <NEW_LINE> req = fake_api.HTTPRequest.blank(url, version=self.api_version) <NEW_LINE> self.common_policy_check(user_id, self.authorized_readers, self.unauthorized_readers, self.unauthorized_exceptions, rule_name, self.controller.show, req, id=vol_type.id) <NEW_LINE> <DEDENT> @ddt.data(*base.all_users) <NEW_LINE> def test_extra_spec_policy(self, user_id): <NEW_LINE> <INDENT> vol_type = test_utils.create_volume_type( self.project_admin_context, testcase_instance=self, name='fake_vol_type', extra_specs={'multiattach': '<is> True'}) <NEW_LINE> rule_name = type_policy.EXTRA_SPEC_POLICY <NEW_LINE> url = '%s/%s' % (self.api_path, vol_type.id) <NEW_LINE> req = fake_api.HTTPRequest.blank(url, version=self.api_version) <NEW_LINE> self.policy.set_rules({type_policy.GET_POLICY: ""}, overwrite=False) <NEW_LINE> authorized_readers = [user_id] <NEW_LINE> unauthorized_readers = [] <NEW_LINE> response = self.common_policy_check(user_id, authorized_readers, unauthorized_readers, self.unauthorized_exceptions, rule_name, self.controller.show, req, id=vol_type.id) <NEW_LINE> context = self.create_context(user_id) <NEW_LINE> if 'reader' in context.roles or 'admin' in context.roles: <NEW_LINE> <INDENT> self.assertIn('extra_specs', response['volume_type']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertNotIn('extra_specs', response['volume_type'])
Verify default policy settings for the types API
62599021d18da76e235b789b
class Link(Link_ToGFA2, GFA1_ToGFA2, Link_References, Equivalence, Complement, Canonical, Other, GFA1_AlignmentType, OrientedSegments, GFA1_References, AlignmentType, FromTo, Edge): <NEW_LINE> <INDENT> RECORD_TYPE = "L" <NEW_LINE> POSFIELDS = ["from_segment", "from_orient", "to_segment", "to_orient", "overlap"] <NEW_LINE> PREDEFINED_TAGS = ["MQ", "NM", "RC", "FC", "KC", "ID"] <NEW_LINE> DATATYPE = { "from_segment" : "segment_name_gfa1", "from_orient" : "orientation", "to_segment" : "segment_name_gfa1", "to_orient" : "orientation", "overlap" : "alignment_gfa1", "MQ" : "i", "NM" : "i", "RC" : "i", "FC" : "i", "KC" : "i", "ID" : "Z", } <NEW_LINE> NAME_FIELD = "ID" <NEW_LINE> REFERENCE_FIELDS = ["from_segment", "to_segment"] <NEW_LINE> BACKREFERENCE_RELATED_FIELDS = ["to_orient", "from_orient", "overlap"] <NEW_LINE> DEPENDENT_LINES = ["paths"]
A link line (L) of a GFA1 file. Note: from_segment and to_segment are used instead of from/to as from is not a valid method name in Python and "to" alone potentially clashes with the tag namespace.
625990219b70327d1c57fc1d
class TestTokeniser(unittest.TestCase): <NEW_LINE> <INDENT> def testComplexExpression(self): <NEW_LINE> <INDENT> t = list(tree_parse.tokeniser("(+ (- 2 3) (a))")) <NEW_LINE> x = ['(','+','(','-','2','3',')','(','a',')',')'] <NEW_LINE> self.assertEqual(x, t) <NEW_LINE> <DEDENT> def testSimpleExpression(self): <NEW_LINE> <INDENT> t = list(tree_parse.tokeniser("(- 2 3)")) <NEW_LINE> x = ['(','-','2','3',')'] <NEW_LINE> self.assertEqual(x, t) <NEW_LINE> <DEDENT> def testIntLiteral(self): <NEW_LINE> <INDENT> t = list(tree_parse.tokeniser("2")) <NEW_LINE> x = ['2'] <NEW_LINE> self.assertEqual(x, t)
Test the functioning of the tokeniser
62599021796e427e5384f61a
class NewProjectAction(ProjectAction): <NEW_LINE> <INDENT> uol = 'envisage.ui.single_project.ui_service.UiService' <NEW_LINE> method_name = 'create' <NEW_LINE> description = 'Create a project' <NEW_LINE> image = ImageResource('new_project') <NEW_LINE> name = 'New...' <NEW_LINE> tooltip = 'Create a project'
An action that creates a new project.
62599021d164cc6175821e12
class FederalDeputyTermDTO: <NEW_LINE> <INDENT> def __init__(self, id, personId, state, initialDate, finalDate=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.personId = personId <NEW_LINE> self.state = state <NEW_LINE> self.initialDate = initialDate <NEW_LINE> self.finalDate = finalDate
Class used to store and transfer the data of a term of Federal Deputy
62599021d164cc6175821e13
class GetTags(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/LastFm/Album/GetTags') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return GetTagsInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return GetTagsResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return GetTagsChoreographyExecution(session, exec_id, path)
Create a new instance of the GetTags Choreography. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
625990215166f23b2e244271
class AddComment_form(FlaskForm): <NEW_LINE> <INDENT> body = TextAreaField(label='Текст комментария', validators=[ Length(min=10, max=5000, message='Комментарий не должен быть меньше 10 и больше 5000 символов')])
Форма добавления комментария.
62599021d18da76e235b789c
class TestCraftingShovel(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.i = Workbench() <NEW_LINE> <DEDENT> def test_check_crafting(self): <NEW_LINE> <INDENT> self.i.crafting[0] = Slot(bravo.blocks.blocks["cobblestone"].slot, 0, 1) <NEW_LINE> self.i.crafting[3] = Slot(bravo.blocks.items["stick"].slot, 0, 1) <NEW_LINE> self.i.crafting[6] = Slot(bravo.blocks.items["stick"].slot, 0, 1) <NEW_LINE> self.i.update_crafted() <NEW_LINE> self.assertTrue(self.i.recipe) <NEW_LINE> self.assertEqual(self.i.crafted[0], (bravo.blocks.items["stone-shovel"].slot, 0, 1)) <NEW_LINE> <DEDENT> def test_check_crafting_multiple(self): <NEW_LINE> <INDENT> self.i.crafting[0] = Slot(bravo.blocks.blocks["cobblestone"].slot, 0, 2) <NEW_LINE> self.i.crafting[3] = Slot(bravo.blocks.items["stick"].slot, 0, 2) <NEW_LINE> self.i.crafting[6] = Slot(bravo.blocks.items["stick"].slot, 0, 2) <NEW_LINE> self.i.update_crafted() <NEW_LINE> self.assertEqual(self.i.crafted[0], (bravo.blocks.items["stone-shovel"].slot, 0, 1)) <NEW_LINE> <DEDENT> def test_check_crafting_offset(self): <NEW_LINE> <INDENT> self.i.crafting[1] = Slot(bravo.blocks.blocks["cobblestone"].slot, 0, 2) <NEW_LINE> self.i.crafting[4] = Slot(bravo.blocks.items["stick"].slot, 0, 2) <NEW_LINE> self.i.crafting[7] = Slot(bravo.blocks.items["stick"].slot, 0, 2) <NEW_LINE> self.i.update_crafted() <NEW_LINE> self.assertTrue(self.i.recipe)
Test basic crafting functionality. Assumes that the basic shovel recipe is present and enabled. This recipe was chosen because shovels broke at one point and we couldn't figure out why.
625990216e29344779b014ef
class ConstantIndexed(Indexed, Constant): <NEW_LINE> <INDENT> def __new__(cls, label, indices, **kwargs): <NEW_LINE> <INDENT> base = IndexedBase(label) <NEW_LINE> if isinstance(indices, list): <NEW_LINE> <INDENT> for i in indices: <NEW_LINE> <INDENT> if not isinstance(i, Idx): <NEW_LINE> <INDENT> raise ValueError("The indices of the Constant Indexed Object should be of type Idx", i) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(indices, Idx): <NEW_LINE> <INDENT> raise ValueError("The indices of the Constant Indexed Object should be of type Idx", i) <NEW_LINE> <DEDENT> indices = flatten([indices]) <NEW_LINE> <DEDENT> ret = Indexed.__new__(cls, base, *indices) <NEW_LINE> ret.is_constant = True <NEW_LINE> ret.inline_array = True <NEW_LINE> ret.is_input = True <NEW_LINE> ret._datatype = SimulationDataType() <NEW_LINE> ret._value = ["Input" for i in range(ret.shape[0])] <NEW_LINE> return ret <NEW_LINE> <DEDENT> @property <NEW_LINE> def datatype(self): <NEW_LINE> <INDENT> return self._datatype <NEW_LINE> <DEDENT> @datatype.setter <NEW_LINE> def datatype(self, dtype): <NEW_LINE> <INDENT> self._datatype = dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return str(self.base) <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, numerical_values, dtype=None): <NEW_LINE> <INDENT> self.is_input = False <NEW_LINE> if len(numerical_values) != len(self.value): <NEW_LINE> <INDENT> raise ValueError("Values for ConstantIndexed should be of length of the constants.") <NEW_LINE> <DEDENT> self._value = numerical_values <NEW_LINE> if dtype: <NEW_LINE> <INDENT> self.datatype = dtype <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.datatype = SimulationDataType() <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_access_c(self): <NEW_LINE> <INDENT> return ["%s[%d]" % (self.base, i) for i in range(0, len(self.value))] <NEW_LINE> <DEDENT> @property <NEW_LINE> def location(cls): <NEW_LINE> <INDENT> return list(cls.args[1:])
An indexed object represented by an array of constants. :param str label: Name of the ConstantIndexed. :param list indices: Indices of the ConstantIndexed. (See: Sympy Indexed class).
6259902156b00c62f0fb375e
@interface <NEW_LINE> class RubricAPI: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get( id: Optional[UUID] = None, user: Optional[UUID] = None, object: Optional[UUID] = None, timestamp: Optional[Timestamp] = None, skip: Optional[int] = None, limit: Optional[int] = None, ) -> HTTPResponse[List[RubricModel]]: <NEW_LINE> <INDENT> raise NotImplemented <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def post( body: RubricModel ) -> HTTPResponse[None]: <NEW_LINE> <INDENT> raise NotImplemented
swagger: '2.0' info: title: FAIRshakeRubric version: 1.0.0 description: A generic FAIRshake Rubric REST API for storing questions to be answered. contact: email: daniel.clarke@mssm.edu license: name: Apache 2.0 url: http://www.apache.org/licenses/LICENSE-2.0.html schemes: - https securityDefinitions: ApiKeyAuth: type: apiKey in: header name: X-API-Key paths: /: get: {RubricAPI__get} post: {RubricAPI__post} definitions: Criterion: {CriterionModel} Rubric: {RubricModel}
62599021507cdc57c63a5c44
class DjangoAppNamePrompt(StringTemplatePrompt): <NEW_LINE> <INDENT> PARAMETER = 'django_app_name' <NEW_LINE> MESSAGE = '{} Enter a Django app name or leave blank to use' <NEW_LINE> DEFAULT_VALUE = 'home' <NEW_LINE> def _validate(self, s: str): <NEW_LINE> <INDENT> if not s.isidentifier(): <NEW_LINE> <INDENT> raise ValueError(('Invalid Django project name "{}": ' 'must be a valid Python identifier').format(s))
Allow the user to enter a Django project name.
6259902121a7993f00c66e1c
class MyClass(C, B): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> if direct[0]: <NEW_LINE> <INDENT> C.__init__(self) <NEW_LINE> B.__init__(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(MyClass, self).__init__() <NEW_LINE> <DEDENT> <DEDENT> def do_something(self): <NEW_LINE> <INDENT> print("Doing something!") <NEW_LINE> print()
A class that you write.
62599021d164cc6175821e16
class BipedNeckPuppet(object): <NEW_LINE> <INDENT> def __init__(self, side = None, name = [None, None, None], position = [[0, 20, 0], [0, 21, 0], [0, 22, 0]]): <NEW_LINE> <INDENT> self.__create(side = side, name = name, position = position) <NEW_LINE> <DEDENT> def reload_modules(self): <NEW_LINE> <INDENT> reload(jointChain) <NEW_LINE> <DEDENT> def create_joint_setup(self, side = None, name = [None, None, None], position = [[0, 20, 0], [0, 21, 0], [0, 22, 0]]): <NEW_LINE> <INDENT> bindJoints = jointChain.Chain( side = side, name = name, position = position, mirror = False, radius = 0.25) <NEW_LINE> fkJoints = jointChain.FkChain(side = side, name = name, position = position, mirror = False, radius = 0.5) <NEW_LINE> ikJoints = jointChain.IkChain(side = side, name = name, position = position, mirror = False, ikSolver = 'ikRPsolver', radius = 0.75) <NEW_LINE> <DEDENT> def create_control_setup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def create_hook_setup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __create(self, side = None, name = [None, None, None], position = [[0, 20, 0], [0, 21, 0], [0, 22, 0]]): <NEW_LINE> <INDENT> self.reload_modules() <NEW_LINE> self.create_joint_setup(side = side, name = name, position = position) <NEW_LINE> self.create_control_setup() <NEW_LINE> self.create_hook_setup() <NEW_LINE> self.cleanup()
this is the biped neck puppet class, which will be used to create a real biped neck
62599021507cdc57c63a5c46
class StatementExtension(object): <NEW_LINE> <INDENT> def __init__(self, original_iterators=None, expr=None): <NEW_LINE> <INDENT> self.original_iterators = original_iterators <NEW_LINE> self.expr = expr <NEW_LINE> <DEDENT> def get_number_original_iterators(self): <NEW_LINE> <INDENT> if self.original_iterators is not None: <NEW_LINE> <INDENT> return len(self.original_iterators) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def get_original_iterators(self): <NEW_LINE> <INDENT> return self.original_iterators <NEW_LINE> <DEDENT> def get_expr(self): <NEW_LINE> <INDENT> return self.expr <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read_os(content, index): <NEW_LINE> <INDENT> while content[index].startswith('#') or content[index] == '\n': <NEW_LINE> <INDENT> index = index + 1 <NEW_LINE> <DEDENT> while index < len(content) and ( content[index].startswith('<') or content[index].startswith('#') or content[index] == '\n'): <NEW_LINE> <INDENT> index = index + 1 <NEW_LINE> <DEDENT> index = index + 1 <NEW_LINE> while index < len(content) and (content[index].startswith('#') or content[index] == '\n'): <NEW_LINE> <INDENT> index = index + 1 <NEW_LINE> <DEDENT> iters = content[index].split() <NEW_LINE> index = index + 1 <NEW_LINE> while index < len(content) and (content[index].startswith('#') or content[index] == '\n'): <NEW_LINE> <INDENT> index = index + 1 <NEW_LINE> <DEDENT> expr = content[index].strip() <NEW_LINE> index = index + 1 <NEW_LINE> while index < len(content) and ( content[index].startswith('</') or content[index].startswith('#') or content[index] == '\n'): <NEW_LINE> <INDENT> index = index + 1 <NEW_LINE> <DEDENT> se = StatementExtension(iters, expr) <NEW_LINE> return se, index <NEW_LINE> <DEDENT> def write_os(self, f): <NEW_LINE> <INDENT> print("<body>", file=f) <NEW_LINE> print("# Number of original iterators", file=f) <NEW_LINE> if self.original_iterators is not None: <NEW_LINE> <INDENT> print(str(len(self.original_iterators)), file=f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("0", file=f) <NEW_LINE> <DEDENT> print("# List of original iterators", file=f) <NEW_LINE> line = "" <NEW_LINE> if self.original_iterators is not None: <NEW_LINE> <INDENT> for elem in self.original_iterators: <NEW_LINE> <INDENT> line = line + str(elem) + " " <NEW_LINE> <DEDENT> <DEDENT> print(line, file=f) <NEW_LINE> print("# Statement body expression", file=f) <NEW_LINE> print(self.expr, file=f) <NEW_LINE> print("</body>", file=f)
Represents an Extension within a Statement Attributes: - original_iterators : List of original iterators - expr : Statement body expression
62599021d164cc6175821e17
@tf_export("RandomShuffleQueue") <NEW_LINE> class RandomShuffleQueue(QueueBase): <NEW_LINE> <INDENT> def __init__(self, capacity, min_after_dequeue, dtypes, shapes=None, names=None, seed=None, shared_name=None, name="random_shuffle_queue"): <NEW_LINE> <INDENT> dtypes = _as_type_list(dtypes) <NEW_LINE> shapes = _as_shape_list(shapes, dtypes) <NEW_LINE> names = _as_name_list(names, dtypes) <NEW_LINE> seed1, seed2 = random_seed.get_seed(seed) <NEW_LINE> if seed1 is None and seed2 is None: <NEW_LINE> <INDENT> seed1, seed2 = 0, 0 <NEW_LINE> <DEDENT> elif seed is None and shared_name is not None: <NEW_LINE> <INDENT> string = (str(seed1) + shared_name).encode("utf-8") <NEW_LINE> seed2 = int(hashlib.md5(string).hexdigest()[:8], 16) & 0x7FFFFFFF <NEW_LINE> <DEDENT> queue_ref = gen_data_flow_ops.random_shuffle_queue_v2( component_types=dtypes, shapes=shapes, capacity=capacity, min_after_dequeue=min_after_dequeue, seed=seed1, seed2=seed2, shared_name=shared_name, name=name) <NEW_LINE> super(RandomShuffleQueue, self).__init__(dtypes, shapes, names, queue_ref)
A queue implementation that dequeues elements in a random order. See @{tf.QueueBase} for a description of the methods on this class. @compatibility(eager) Queues are not compatible with eager execution. Instead, please use `tf.data` to get data into your model. @end_compatibility
62599021925a0f43d25e8ee6
class ShortHeader(exception.FormError): <NEW_LINE> <INDENT> pass
The DNS packet passed to from_wire() is too short.
62599021d18da76e235b789e
class TrainAI: <NEW_LINE> <INDENT> def __init__(self, state_shape, replay_size=10000, ai=None, verbose=False ): <NEW_LINE> <INDENT> self.state_shape = state_shape <NEW_LINE> self.verbose = verbose <NEW_LINE> if ai is None: <NEW_LINE> <INDENT> self.ai = QAI( state_shape=self.state_shape, output_dim=5, verbose=self.verbose ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ai = ai <NEW_LINE> <DEDENT> self.replay_size = replay_size <NEW_LINE> self.dataset = deque() <NEW_LINE> <DEDENT> def get_selfplay_data(self, n_round): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> starttime = time.time() <NEW_LINE> count = 0 <NEW_LINE> <DEDENT> for i in range(n_round): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print("Start self-playing to obtain data...(round {0})".format(i+1)) <NEW_LINE> <DEDENT> engine = SelfplayEngine( ai=self.ai, verbose=self.verbose ) <NEW_LINE> data = engine.start() <NEW_LINE> states, actions, rewards, states_next, terminals = data <NEW_LINE> for j in range(len(terminals)): <NEW_LINE> <INDENT> self.dataset.append((states[j], actions[j], rewards[j], states_next[j], terminals[j])) <NEW_LINE> if len(self.dataset) > self.replay_size: <NEW_LINE> <INDENT> self.dataset.popleft() <NEW_LINE> <DEDENT> <DEDENT> count += len(terminals) <NEW_LINE> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> endtime = time.time() <NEW_LINE> print("End of self-play: Run Time {0:.2f}s, Set Size: {1}".format(endtime-starttime, count)) <NEW_LINE> <DEDENT> <DEDENT> def update_ai(self, minibatch_size): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print("Updating neural network of AI model ...") <NEW_LINE> <DEDENT> minibatch = random.sample(self.dataset, minibatch_size) <NEW_LINE> loss = self.ai.train_on_batch(minibatch) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("End of updation with loss: {0:.4f}".format(loss)) <NEW_LINE> <DEDENT> return loss <NEW_LINE> <DEDENT> def start(self, filename): <NEW_LINE> <INDENT> n_round = 10 <NEW_LINE> n_epochs = 100000 <NEW_LINE> replay_size = 10000 <NEW_LINE> minibatch_size = 32 <NEW_LINE> verbose_interval = 1 <NEW_LINE> save_interval = 10 <NEW_LINE> for i in range(n_epochs): <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> if (i+1)%verbose_interval == 0: <NEW_LINE> <INDENT> print("Train Batch: {0}".format(i+1)) <NEW_LINE> <DEDENT> <DEDENT> self.get_selfplay_data(n_round) <NEW_LINE> loss = self.update_ai(minibatch_size) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("Saving model...",end="") <NEW_LINE> <DEDENT> if (i+1)%save_interval == 0: <NEW_LINE> <INDENT> self.ai.save_nnet(filename) <NEW_LINE> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> print("OK!")
Train AI model process
62599021796e427e5384f621
class OutputFiles(Enum): <NEW_LINE> <INDENT> CONFIGS = "configs" <NEW_LINE> LOGS = "logs" <NEW_LINE> RESULTS = "results" <NEW_LINE> TEMPORARY = "temporary"
Output file types. Args: CONFIGS: Configuration files. LOGS: Log files. RESULTS: Result files. TEMPORARY: Temporary files. Attributes: CONFIGS: Configuration files. LOGS: Log files. RESULTS: Result files. TEMPORARY: Temporary files.
62599021d164cc6175821e19
class PoissonDiagnostic(CountDiagnostic): <NEW_LINE> <INDENT> def _init__(self, results): <NEW_LINE> <INDENT> self.results = results <NEW_LINE> <DEDENT> def test_dispersion(self): <NEW_LINE> <INDENT> res = dispersion_poisson(self.results) <NEW_LINE> return res <NEW_LINE> <DEDENT> def test_poisson_zeroinflation(self, method="prob", exog_infl=None): <NEW_LINE> <INDENT> if method == "prob": <NEW_LINE> <INDENT> if exog_infl is not None: <NEW_LINE> <INDENT> warnings.warn('exog_infl is only used if method = "broek"') <NEW_LINE> <DEDENT> res = test_poisson_zeros(self.results) <NEW_LINE> <DEDENT> elif method == "broek": <NEW_LINE> <INDENT> if exog_infl is None: <NEW_LINE> <INDENT> res = test_poisson_zeroinflation_broek(self.results) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exog_infl = np.asarray(exog_infl) <NEW_LINE> if exog_infl.ndim == 1: <NEW_LINE> <INDENT> exog_infl = exog_infl[:, None] <NEW_LINE> <DEDENT> res = test_poisson_zeroinflation_jh(self.results, exog_infl=exog_infl) <NEW_LINE> <DEDENT> <DEDENT> return res <NEW_LINE> <DEDENT> def _chisquare_binned(self, sort_var=None, bins=10, k_max=None, df=None, sort_method="quicksort", frac_upp=0.1, alpha_nc=0.05): <NEW_LINE> <INDENT> if sort_var is None: <NEW_LINE> <INDENT> sort_var = self.results.predict(which="lin") <NEW_LINE> <DEDENT> endog = self.results.model.endog <NEW_LINE> expected = self.results.predict(which="prob") <NEW_LINE> counts = (endog[:, None] == np.arange(expected.shape[1])).astype(int) <NEW_LINE> if k_max is None: <NEW_LINE> <INDENT> nobs = len(endog) <NEW_LINE> icumcounts_sum = nobs - counts.sum(0).cumsum(0) <NEW_LINE> k_max = np.argmax(icumcounts_sum < nobs * frac_upp) - 1 <NEW_LINE> <DEDENT> expected = expected[:, :k_max] <NEW_LINE> counts = counts[:, :k_max] <NEW_LINE> expected[:, -1] += 1 - expected.sum(1) <NEW_LINE> counts[:, -1] += 1 - counts.sum(1) <NEW_LINE> res = test_chisquare_binning(counts, expected, sort_var=sort_var, bins=bins, df=df, ordered=True, sort_method=sort_method, alpha_nc=alpha_nc) <NEW_LINE> return res
Diagnostic and specification tests and plots for Poisson model status: experimental Parameters ---------- results : PoissonResults instance
625990215166f23b2e244277
class SqlScriptContent(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'query': {'required': True}, 'current_connection': {'required': True}, } <NEW_LINE> _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'query': {'key': 'query', 'type': 'str'}, 'current_connection': {'key': 'currentConnection', 'type': 'SqlConnection'}, 'metadata': {'key': 'metadata', 'type': 'SqlScriptMetadata'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SqlScriptContent, self).__init__(**kwargs) <NEW_LINE> self.additional_properties = kwargs.get('additional_properties', None) <NEW_LINE> self.query = kwargs['query'] <NEW_LINE> self.current_connection = kwargs['current_connection'] <NEW_LINE> self.metadata = kwargs.get('metadata', None)
The content of the SQL script. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] :param query: Required. SQL query to execute. :type query: str :param current_connection: Required. The connection used to execute the SQL script. :type current_connection: ~azure.synapse.artifacts.models.SqlConnection :param metadata: The metadata of the SQL script. :type metadata: ~azure.synapse.artifacts.models.SqlScriptMetadata
62599021d18da76e235b789f
class Qualifier: <NEW_LINE> <INDENT> def __init__(self, key="", value=""): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Qualifier(key=%r, value=%r)" % (self.key, self.value) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> output = " " * Record.GB_FEATURE_INDENT <NEW_LINE> space_wrap = 1 <NEW_LINE> for no_space_key in Bio.GenBank._BaseGenBankConsumer.remove_space_keys: <NEW_LINE> <INDENT> if no_space_key in self.key: <NEW_LINE> <INDENT> space_wrap = 0 <NEW_LINE> <DEDENT> <DEDENT> return output + _wrapped_genbank( self.key + self.value, Record.GB_FEATURE_INDENT, space_wrap )
Hold information about a qualifier in a GenBank feature. Attributes: - key - The key name of the qualifier (ie. /organism=) - value - The value of the qualifier ("Dictyostelium discoideum").
625990213eb6a72ae038b508
class Widget(CountableWidget): <NEW_LINE> <INDENT> widget_type = 'checkbox' <NEW_LINE> widget_label = _('Checkboxes') <NEW_LINE> groups = ( DefaultSchemata, LayoutSchemata, CountableSchemata, DisplaySchemata ) <NEW_LINE> index = ViewPageTemplateFile('widget.pt') <NEW_LINE> @property <NEW_LINE> def css_class(self): <NEW_LINE> <INDENT> css_type = self.widget_type <NEW_LINE> css_title = normalizer.normalize(self.data.title) <NEW_LINE> return ('faceted-checkboxes-widget ' 'faceted-{0}-widget section-{1}{2}').format(css_type, css_title, self.custom_css) <NEW_LINE> <DEDENT> @property <NEW_LINE> def default(self): <NEW_LINE> <INDENT> default = super(Widget, self).default <NEW_LINE> if not default: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if isinstance(default, (str, six.text_type)): <NEW_LINE> <INDENT> default = [default, ] <NEW_LINE> <DEDENT> if six.PY2: <NEW_LINE> <INDENT> default = [x.encode('utf-8') for x in default] <NEW_LINE> <DEDENT> return default <NEW_LINE> <DEDENT> def selected(self, key): <NEW_LINE> <INDENT> default = self.default <NEW_LINE> if not default: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for item in default: <NEW_LINE> <INDENT> if key.lower() == item.lower(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def operator_visible(self): <NEW_LINE> <INDENT> return self.data.get('operator_visible', False) <NEW_LINE> <DEDENT> @property <NEW_LINE> def operator(self): <NEW_LINE> <INDENT> return self.data.get('operator', 'and') <NEW_LINE> <DEDENT> def query(self, form): <NEW_LINE> <INDENT> query = {} <NEW_LINE> index = self.data.get('index', '') <NEW_LINE> if six.PY2: <NEW_LINE> <INDENT> index = index.encode('utf-8', 'replace') <NEW_LINE> <DEDENT> if not self.operator_visible: <NEW_LINE> <INDENT> operator = self.operator <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> operator = form.get(self.data.getId() + '-operator', self.operator) <NEW_LINE> <DEDENT> if six.PY2: <NEW_LINE> <INDENT> operator = operator.encode('utf-8', 'replace') <NEW_LINE> <DEDENT> if not index: <NEW_LINE> <INDENT> return query <NEW_LINE> <DEDENT> if self.hidden: <NEW_LINE> <INDENT> value = self.default <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = form.get(self.data.getId(), '') <NEW_LINE> <DEDENT> if not value: <NEW_LINE> <INDENT> return query <NEW_LINE> <DEDENT> catalog = getToolByName(self.context, 'portal_catalog') <NEW_LINE> catalog_index = catalog.Indexes.get(index) <NEW_LINE> operator_supported = True <NEW_LINE> if catalog_index: <NEW_LINE> <INDENT> if catalog_index.meta_type == 'BooleanIndex': <NEW_LINE> <INDENT> if value == 'False': <NEW_LINE> <INDENT> value = False <NEW_LINE> <DEDENT> elif value == 'True': <NEW_LINE> <INDENT> value = True <NEW_LINE> <DEDENT> <DEDENT> operator_supported = 'operator' in getattr( catalog_index, 'query_options', []) <NEW_LINE> <DEDENT> query[index] = {'query': value} <NEW_LINE> if operator_supported: <NEW_LINE> <INDENT> query[index]['operator'] = operator <NEW_LINE> <DEDENT> return query
Widget
6259902163f4b57ef00864c4
class AssetsAnnotationTagsService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'assets_annotationTags' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(CategorymanagerV1alpha2.AssetsAnnotationTagsService, self).__init__(client) <NEW_LINE> self._upload_configs = { } <NEW_LINE> <DEDENT> def List(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('List') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> List.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1alpha2/assets/{assetsId}/annotationTags', http_method=u'GET', method_id=u'categorymanager.assets.annotationTags.list', ordered_params=[u'name'], path_params=[u'name'], query_params=[u'pageSize', u'pageToken', u'subAsset'], relative_path=u'v1alpha2/{+name}/annotationTags', request_field='', request_type_name=u'CategorymanagerAssetsAnnotationTagsListRequest', response_type_name=u'ListAnnotationTagsResponse', supports_download=False, )
Service class for the assets_annotationTags resource.
62599021be8e80087fbbff1a
class TriggerElem: <NEW_LINE> <INDENT> def __init__(self, parent, elem, schedule): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.elem = elem <NEW_LINE> self.schedule = schedule <NEW_LINE> <DEDENT> def eval(self): <NEW_LINE> <INDENT> import interface as i <NEW_LINE> LayerStateType = i.control.scan.LayerStateType <NEW_LINE> ScheduleState = i.control.scan.ScheduleState <NEW_LINE> TriggerType = i.control.scan.TriggerType <NEW_LINE> logger.debug("TriggerElem eval {} {}", self.elem, self.schedule) <NEW_LINE> if self.elem['type'] == 'ScanCode': <NEW_LINE> <INDENT> i.control.cmd('addScanCode')(self.elem['uid'], TriggerType.Switch1) <NEW_LINE> <DEDENT> elif self.elem['type'] == 'IndCode': <NEW_LINE> <INDENT> i.control.cmd('addScanCode')(self.elem['uid'], TriggerType.LED1) <NEW_LINE> <DEDENT> elif self.elem['type'] in ['Layer', 'LayerShift', 'LayerLatch', 'LayerLock']: <NEW_LINE> <INDENT> layer_state = LayerStateType.Shift <NEW_LINE> if self.elem['type'] == 'LayerLatch': <NEW_LINE> <INDENT> layer_state = LayerStateType.Latch <NEW_LINE> <DEDENT> elif self.elem['type'] == 'LayerLock': <NEW_LINE> <INDENT> layer_state = LayerStateType.Lock <NEW_LINE> <DEDENT> i.control.cmd('applyLayer')(ScheduleState.P, self.elem['uid'], layer_state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning("Unknown TriggerElem {}", self.elem) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> import interface as i <NEW_LINE> LayerStateType = i.control.scan.LayerStateType <NEW_LINE> ScheduleState = i.control.scan.ScheduleState <NEW_LINE> TriggerType = i.control.scan.TriggerType <NEW_LINE> logger.debug("TriggerElem cleanup {} {}", self.elem, self.schedule) <NEW_LINE> if self.elem['type'] == 'ScanCode': <NEW_LINE> <INDENT> i.control.cmd('removeScanCode')(self.elem['uid'], TriggerType.Switch1) <NEW_LINE> <DEDENT> elif self.elem['type'] == 'IndCode': <NEW_LINE> <INDENT> i.control.cmd('removeScanCode')(self.elem['uid'], TriggerType.LED1) <NEW_LINE> <DEDENT> elif self.elem['type'] in ['Layer', 'LayerShift', 'LayerLatch', 'LayerLock']: <NEW_LINE> <INDENT> state = ScheduleState.R <NEW_LINE> layer_state = LayerStateType.Shift <NEW_LINE> if self.elem['type'] == 'LayerLatch': <NEW_LINE> <INDENT> state = ScheduleState.P <NEW_LINE> layer_state = LayerStateType.Latch <NEW_LINE> <DEDENT> elif self.elem['type'] == 'LayerLock': <NEW_LINE> <INDENT> state = ScheduleState.P <NEW_LINE> layer_state = LayerStateType.Lock <NEW_LINE> <DEDENT> i.control.cmd('applyLayer')(state, self.elem['uid'], layer_state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning("Unknown TriggerElem {}", self.elem) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> output = "{} {}".format( self.elem, self.schedule, ) <NEW_LINE> return output
Handles individual trigger elements and how to interface with libkiibohd
62599021ac7a0e7691f7338e
class Observation(object): <NEW_LINE> <INDENT> def __init__(self, value, weight=1.0): <NEW_LINE> <INDENT> self.value = float(value) <NEW_LINE> self.weight = float(weight) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return 37 * hash(self.value) + hash(self.weight) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__class__ == other.__class__ and self.value == other.value and self.weight == other.weight <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> msg = str(self.value) <NEW_LINE> if self.weight != 1.0: <NEW_LINE> <INDENT> msg += '(' + str(self.weight) + ')' <NEW_LINE> <DEDENT> return msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__()
An observation has a weight and a value, representing a data element in a partition. A partition is a collection of observations
625990215e10d32532ce4057
class PrintableEnum(Enum): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def untype(self): <NEW_LINE> <INDENT> return self.value
Allows for easier formatting when substituting for parameters.
62599021796e427e5384f623
class ComplementaryDomain( Domain): <NEW_LINE> <INDENT> def __init__(self, complemented_domain ): <NEW_LINE> <INDENT> self._object_type = 'domain' <NEW_LINE> self._complement = complemented_domain <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._complement.id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> if self._complement.name.endswith("*") or self._complement.name.endswith("'"): <NEW_LINE> <INDENT> return self._complement.name.rstrip("*'") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._complement.name + "*" <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_composite(self): <NEW_LINE> <INDENT> return self._complement.is_composite <NEW_LINE> <DEDENT> @property <NEW_LINE> def length(self): <NEW_LINE> <INDENT> return self._complement.length <NEW_LINE> <DEDENT> @property <NEW_LINE> def _sequence(self): <NEW_LINE> <INDENT> return self._complement.sequence.complement <NEW_LINE> <DEDENT> @property <NEW_LINE> def sequence(self): <NEW_LINE> <INDENT> return self._sequence <NEW_LINE> <DEDENT> @sequence.setter <NEW_LINE> def sequence(self, new_seq): <NEW_LINE> <INDENT> self._complement.sequence = new_seq.complement <NEW_LINE> <DEDENT> def restrict_sequence(self, constraints): <NEW_LINE> <INDENT> self._complement.add_sequence(constraints.complement) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_complement(self): <NEW_LINE> <INDENT> return not self._complement.is_complement <NEW_LINE> <DEDENT> @property <NEW_LINE> def complement(self): <NEW_LINE> <INDENT> return self._complement <NEW_LINE> <DEDENT> def equivalent_to(self, other): <NEW_LINE> <INDENT> return self._complement.complementary_to(other) <NEW_LINE> <DEDENT> def complementary_to(self, other): <NEW_LINE> <INDENT> return self._complement.equivalent_to(other) <NEW_LINE> <DEDENT> @property <NEW_LINE> def subdomains(self): <NEW_LINE> <INDENT> return [d.complement for d in reversed(self._complement.subdomains)] <NEW_LINE> <DEDENT> @subdomains.setter <NEW_LINE> def subdomains(self, domains): <NEW_LINE> <INDENT> self._complement.subdomains = [d.complement for d in reversed(domains)] <NEW_LINE> <DEDENT> def base_domains(self): <NEW_LINE> <INDENT> return [d.complement for d in reversed(self._complement.base_domains())] <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self._complement.__eq__(other.complement) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return -self._complement.__hash__() <NEW_LINE> <DEDENT> def __str__( self ): <NEW_LINE> <INDENT> if self.is_composite: <NEW_LINE> <INDENT> info = "[" + ",".join([d.name for d in self.subdomains]) + "]" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info = self.sequence <NEW_LINE> <DEDENT> return "ComplementaryDomain {0}: {1}".format(self.name, info) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self)
Represents a complemented domain. Note that this is always defined in terms of an original domain and does not have the same data members, instead providing an interface to the complementary members.
62599021a8ecb033258720c3
class StorageAccountCheckNameAvailabilityParameters(Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> def __init__(self, name, type="Microsoft.Storage/storageAccounts"): <NEW_LINE> <INDENT> super(StorageAccountCheckNameAvailabilityParameters, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.type = type
The parameters used to check the availabity of the storage account name. :param name: :type name: str :param type: Default value: "Microsoft.Storage/storageAccounts" . :type type: str
625990218c3a8732951f73fe
class Visualize: <NEW_LINE> <INDENT> def __init__(self,mid,output_path): <NEW_LINE> <INDENT> conn = pymongo.MongoClient('13.209.73.233', 27017) <NEW_LINE> db = conn.get_database('test') <NEW_LINE> self.collection = db.get_collection('meets') <NEW_LINE> self.oid = ObjectId(mid) <NEW_LINE> result = self.collection.find({"_id": self.oid}, {"_id": False, "word2vec": True}) <NEW_LINE> model = Word2Vec.load(result[0]['word2vec']) <NEW_LINE> meta_file = "w2x_metadata.tsv" <NEW_LINE> placeholder = np.zeros((len(model.wv.index2word), model.vector_size)) <NEW_LINE> if not os.path.isdir(output_path): <NEW_LINE> <INDENT> os.mkdir(output_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filelist = [f for f in os.listdir(output_path)] <NEW_LINE> for f in filelist: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(os.path.join(output_path, f)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("엑세스 거부 :",e) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> with open(os.path.join(output_path,meta_file), 'wb') as file_metadata: <NEW_LINE> <INDENT> for i, word in enumerate(model.wv.index2word): <NEW_LINE> <INDENT> placeholder[i] = model[word] <NEW_LINE> if word == '': <NEW_LINE> <INDENT> print("Emply Line, should replecaed by any thing else, or will cause a bug of tensorboard") <NEW_LINE> file_metadata.write("{0}".format('<Empty Line>').encode('utf-8') + b'\n') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> file_metadata.write("{0}".format(word).encode('utf-8') + b'\n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sess = tf.InteractiveSession() <NEW_LINE> embedding = tf.Variable(placeholder, trainable = False, name = 'w2x_metadata') <NEW_LINE> tf.global_variables_initializer().run() <NEW_LINE> saver = tf.train.Saver() <NEW_LINE> writer = tf.summary.FileWriter(output_path, sess.graph) <NEW_LINE> config = projector.ProjectorConfig() <NEW_LINE> embed = config.embeddings.add() <NEW_LINE> embed.tensor_name = 'w2x_metadata' <NEW_LINE> embed.metadata_path = meta_file <NEW_LINE> projector.visualize_embeddings(writer, config) <NEW_LINE> saver.save(sess, os.path.join(output_path,'w2x_metadata.ckpt')) <NEW_LINE> print('Run `tensorboard --logdir={0}` to run visualize result on tensorboard'.format(output_path)) <NEW_LINE> self.run_str = "tensorboard --logdir={0}".format(output_path) <NEW_LINE> <DEDENT> def run_tensorboard(self): <NEW_LINE> <INDENT> os.system(self.run_str)
- mode_path : stored model path - output_path : observation path - vector_size : Vector size applied during learning
6259902156b00c62f0fb3766
class Researcher(models.Model): <NEW_LINE> <INDENT> person = models.OneToOneField(Person, on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s - %s' % (self.person.__str__(), self.person.user.__str__())
Researcher class represents a model/relationship for the database. It contains all the persons who are researchers, it is a one to one relationship with the Person's relationship.
625990215e10d32532ce4059
class UserProfilePage(object): <NEW_LINE> <INDENT> def get_profile_page_url(self, user): <NEW_LINE> <INDENT> groups = [g.name for g in user.groups.all()] <NEW_LINE> page_url = reverse('home') <NEW_LINE> if 'employee' in groups: <NEW_LINE> <INDENT> page_url = reverse('home') <NEW_LINE> <DEDENT> elif 'employee_manager' in groups: <NEW_LINE> <INDENT> page_url = reverse('home') <NEW_LINE> <DEDENT> elif 'manager' in groups: <NEW_LINE> <INDENT> page_url = reverse('home') <NEW_LINE> <DEDENT> elif 'admin' in groups: <NEW_LINE> <INDENT> page_url = reverse('home') <NEW_LINE> <DEDENT> return page_url
@summary: To get user profile page url by their group
625990219b70327d1c57fc29
class EmbeddingNN(nn.Module): <NEW_LINE> <INDENT> def __init__(self, voc_size, emb_size=300, init_with=None): <NEW_LINE> <INDENT> super(EmbeddingNN, self).__init__() <NEW_LINE> padding_idx = 0 <NEW_LINE> self.voc_size = voc_size <NEW_LINE> self.emb_size = emb_size <NEW_LINE> self.iembeddings = nn.Embedding(self.voc_size, self.emb_size) <NEW_LINE> self.oembeddings = nn.Embedding(self.voc_size, self.emb_size) <NEW_LINE> if init_with is not None: <NEW_LINE> <INDENT> assert init_with.shape == (voc_size, emb_size) <NEW_LINE> self.iembeddings.weight = nn.Parameter(FloatTensor(init_with)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.iembeddings.weight = nn.Parameter(FloatTensor(voc_size, emb_size).uniform_(-1, 1)) <NEW_LINE> <DEDENT> self.oembeddings.weight = nn.Parameter(FloatTensor(voc_size, emb_size).uniform_(-1, 1)) <NEW_LINE> self.iembeddings.weight.requires_grad = True <NEW_LINE> self.oembeddings.weight.requires_grad = True <NEW_LINE> <DEDENT> def forward(self, data): <NEW_LINE> <INDENT> return self.forward_i(data) <NEW_LINE> <DEDENT> def forward_i(self, data): <NEW_LINE> <INDENT> idxs = Variable(LongTensor(data)) <NEW_LINE> idxs = idxs.cuda() if self.iembeddings.weight.is_cuda else idxs <NEW_LINE> return self.iembeddings(idxs) <NEW_LINE> <DEDENT> def forward_o(self, data): <NEW_LINE> <INDENT> idxs = Variable(LongTensor(data)) <NEW_LINE> idxs = idxs.cuda() if self.oembeddings.weight.is_cuda else idxs <NEW_LINE> return self.oembeddings(idxs) <NEW_LINE> <DEDENT> def get_emb_dim(self): <NEW_LINE> <INDENT> return self.emb_size
single hidden layer embedding model
62599021ac7a0e7691f73394
class Tracker: <NEW_LINE> <INDENT> def __init__(self, return_images=True, lookup_tail_size=80, labels=None): <NEW_LINE> <INDENT> self.return_images = return_images <NEW_LINE> self.frame_index = 0 <NEW_LINE> self.labels = labels <NEW_LINE> self.detection_history = [] <NEW_LINE> self.last_detected = {} <NEW_LINE> self.tracklet_count = 0 <NEW_LINE> self.lookup_tail_size = lookup_tail_size <NEW_LINE> <DEDENT> def new_label(self): <NEW_LINE> <INDENT> self.tracklet_count += 1 <NEW_LINE> return self.tracklet_count - 1 <NEW_LINE> <DEDENT> def init_tracklet(self, frame): <NEW_LINE> <INDENT> detections = extract_detections(frame) <NEW_LINE> for i in range(len(detections)): <NEW_LINE> <INDENT> detections[i][0] = self.new_label() <NEW_LINE> <DEDENT> return np.array(detections).reshape((-1, 5)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def prev_detections(self): <NEW_LINE> <INDENT> detections = [] <NEW_LINE> ids = set() <NEW_LINE> for frame in self.detection_history[::-1][:self.lookup_tail_size]: <NEW_LINE> <INDENT> for det in frame: <NEW_LINE> <INDENT> if det[0] not in ids: <NEW_LINE> <INDENT> detections.append(det) <NEW_LINE> ids.add(det[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return detection_cast(detections) <NEW_LINE> <DEDENT> def bind_tracklet(self, detections): <NEW_LINE> <INDENT> detections = detections.copy() <NEW_LINE> if detections.size == 0: <NEW_LINE> <INDENT> return detection_cast(detections) <NEW_LINE> <DEDENT> prev_detections = self.prev_detections <NEW_LINE> iou = [] <NEW_LINE> for i, det in enumerate(detections): <NEW_LINE> <INDENT> for prev_det in prev_detections: <NEW_LINE> <INDENT> iou.append([iou_score(det[1:], prev_det[1:]), prev_det[0], i]) <NEW_LINE> <DEDENT> <DEDENT> iou.sort() <NEW_LINE> iou.reverse() <NEW_LINE> used_prev_labels = set() <NEW_LINE> detections[:,0] = -1 <NEW_LINE> for score, prev_label, label in iou: <NEW_LINE> <INDENT> if prev_label not in used_prev_labels and detections[label,0] == -1: <NEW_LINE> <INDENT> detections[label, 0] = prev_label <NEW_LINE> used_prev_labels.add(prev_label) <NEW_LINE> <DEDENT> <DEDENT> for i, det in enumerate(detections): <NEW_LINE> <INDENT> if det[0] == -1: <NEW_LINE> <INDENT> detections[i,0] = self.new_label() <NEW_LINE> <DEDENT> <DEDENT> return detection_cast(np.array(detections)) <NEW_LINE> <DEDENT> def save_detections(self, detections): <NEW_LINE> <INDENT> for label in detections[:, 0]: <NEW_LINE> <INDENT> self.last_detected[label] = self.frame_index <NEW_LINE> <DEDENT> <DEDENT> def update_frame(self, frame): <NEW_LINE> <INDENT> if not self.frame_index: <NEW_LINE> <INDENT> detections = self.init_tracklet(frame) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> detections = extract_detections(frame, labels=self.labels) <NEW_LINE> detections = self.bind_tracklet(detections) <NEW_LINE> <DEDENT> self.save_detections(detections) <NEW_LINE> self.detection_history.append(detections) <NEW_LINE> self.frame_index += 1 <NEW_LINE> if self.return_images: <NEW_LINE> <INDENT> return draw_detections(frame, detections) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return detections
Generate detections and build tracklets.
625990211d351010ab8f49c0
class HeadTail(object): <NEW_LINE> <INDENT> def __init__(self, file, max_capture=510): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.max_capture = max_capture <NEW_LINE> self.capture_head = '' <NEW_LINE> self.capture_head_len = 0 <NEW_LINE> self.capture_tail = '' <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if self.file: <NEW_LINE> <INDENT> self.file.write(data) <NEW_LINE> <DEDENT> capture_head_left = self.max_capture - self.capture_head_len <NEW_LINE> if capture_head_left > 0: <NEW_LINE> <INDENT> data_len = len(data) <NEW_LINE> if data_len <= capture_head_left: <NEW_LINE> <INDENT> self.capture_head += data <NEW_LINE> self.capture_head_len += data_len <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.capture_head += data[:capture_head_left] <NEW_LINE> self.capture_head_len = self.max_capture <NEW_LINE> self.capture_tail += data[capture_head_left:] <NEW_LINE> self.capture_tail = self.capture_tail[-self.max_capture:] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.capture_tail += data <NEW_LINE> self.capture_tail = self.capture_tail[-self.max_capture:] <NEW_LINE> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> if self.file: <NEW_LINE> <INDENT> self.file.flush() <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.file: <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> <DEDENT> def head(self): <NEW_LINE> <INDENT> return self.capture_head <NEW_LINE> <DEDENT> def tail(self): <NEW_LINE> <INDENT> return self.capture_tail <NEW_LINE> <DEDENT> def headtail(self): <NEW_LINE> <INDENT> return self.capture_head + '\n..\n' + self.capture_tail
Capture first part of file write and discard remainder
62599021c432627299fa3e9b
class GetTeamList(REST): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(GetTeamList, self).__init__('getteamlist.do', 3.0, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(self, **args): <NEW_LINE> <INDENT> return self().GET(args, format='text')
class: veracode.API.admin.GetTeamList params: dynamic, see veracode.SDK.admin.GetTeamList for more info returns: XML data from veracode API
62599021bf627c535bcb235f
class LoggingProjectsSinksCreateRequest(_messages.Message): <NEW_LINE> <INDENT> logSink = _messages.MessageField('LogSink', 1) <NEW_LINE> projectsId = _messages.StringField(2, required=True)
A LoggingProjectsSinksCreateRequest object. Fields: logSink: A LogSink resource to be passed as the request body. projectsId: Part of `projectName`. The resource name of the project to which the sink is bound.
62599021796e427e5384f629
class LocalStBlockListManager(BlockListManager): <NEW_LINE> <INDENT> def __init__(self, root_dm: "LocalStorageManager"): <NEW_LINE> <INDENT> self._root_dm = root_dm <NEW_LINE> <DEDENT> def contains(self, _id: str) -> bool: <NEW_LINE> <INDENT> bl = self._root_dm.get_data()["notebooks"].get(_id) <NEW_LINE> if bl is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> exp = bl["expiration"] <NEW_LINE> now = int(datetime.now().timestamp()) <NEW_LINE> if now > exp: <NEW_LINE> <INDENT> self._delete(_id) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def put(self, _id: str, exp: int): <NEW_LINE> <INDENT> data = self._root_dm.get_data() <NEW_LINE> data["blocklist"][_id] = {"expiration": exp} <NEW_LINE> self._root_dm.save_data(data) <NEW_LINE> <DEDENT> def _delete(self, _id: str): <NEW_LINE> <INDENT> k = "blocklist" <NEW_LINE> data = self._root_dm.get_data() <NEW_LINE> if _id in data[k]: <NEW_LINE> <INDENT> data[k].pop(_id) <NEW_LINE> self._root_dm.save_data(data)
Local Storage block list manager.
625990219b70327d1c57fc2b
class GzipHandler(urllib2.BaseHandler): <NEW_LINE> <INDENT> def http_request(self, request): <NEW_LINE> <INDENT> request.add_header("Accept-Encoding", "gzip, deflate") <NEW_LINE> return request <NEW_LINE> <DEDENT> https_request = http_request <NEW_LINE> def http_response(self, request, response): <NEW_LINE> <INDENT> new_response = response <NEW_LINE> if response.headers.get("Content-Encoding") == "gzip": <NEW_LINE> <INDENT> gzipped = gzip.GzipFile( fileobj=StringIO(response.read()), mode="r") <NEW_LINE> new_response = urllib2.addinfourl( gzipped, response.headers, response.url, response.code) <NEW_LINE> new_response.msg = response.msg <NEW_LINE> <DEDENT> return new_response <NEW_LINE> <DEDENT> https_response = http_response
A handler that enhances urllib2's capabilities with transparent gzipped data handling support.
625990213eb6a72ae038b510
class AlbumRow(Row): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._builder = Gtk.Builder() <NEW_LINE> self._builder.add_from_resource('/org/gnome/Lollypop/AlbumRow.ui') <NEW_LINE> self._builder.connect_signals(self) <NEW_LINE> self._cover = self._builder.get_object('cover') <NEW_LINE> self._header = self._builder.get_object('header') <NEW_LINE> self._artist = self._builder.get_object('artist') <NEW_LINE> self._album = self._builder.get_object('album') <NEW_LINE> Row.__init__(self) <NEW_LINE> <DEDENT> def set_object_id(self, object_id): <NEW_LINE> <INDENT> Row.set_object_id(self, object_id) <NEW_LINE> self._object = Album(self._object_id) <NEW_LINE> <DEDENT> def show_header(self, show): <NEW_LINE> <INDENT> if show: <NEW_LINE> <INDENT> self._header.show() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._header.hide() <NEW_LINE> <DEDENT> <DEDENT> def set_cover(self, surface, tooltip): <NEW_LINE> <INDENT> self._cover.set_from_surface(surface) <NEW_LINE> self._cover.set_tooltip_text(tooltip) <NEW_LINE> <DEDENT> def set_album_and_artist(self, album_id): <NEW_LINE> <INDENT> artist = Lp.albums.get_artist_name(album_id) <NEW_LINE> album = Lp.albums.get_name(album_id) <NEW_LINE> self._artist.set_text(artist) <NEW_LINE> self._album.set_text(album)
A track row with album cover
625990218c3a8732951f7404