content stringlengths 0 1.55M |
|---|
<import_stmt>prodigy<import_from_stmt>prodigy.models.ner EntityRecognizer<import_from_stmt>prodigy.components.preprocess add_tokens<import_from_stmt>prodigy.components.db connect<import_from_stmt>prodigy.util split_string<import_stmt>spacy<import_from_stmt>typing List Optional<line_sep># Recipe decorator with argument annotations: (description, argument type,
# shortcut, type / converter function called on value before it's passed to
# the function). Descriptions are also shown when typing --help.
@prodigy.recipe("ner.silver-to-gold" silver_dataset=("Dataset with binary annotations" "positional" <none> str) gold_dataset=("Name of dataset to save new annotations" "positional" <none> str) spacy_model=("The base model" "positional" <none> str) label=("One or more comma-separated labels" "option" "l" split_string) )<def_stmt>ner_silver_to_gold silver_dataset:str gold_dataset:str spacy_model:str label:Optional[List[str]]=<none> <block_start>"""
Take an existing "silver" dataset with binary accept/reject annotations,
merge the annotations to find the best possible analysis given the
constraints defined in the annotations, and manually edit it to create
a perfect and complete "gold" dataset.
"""<line_sep># Connect to the database using the settings from prodigy.json, check
# that the silver dataset exists and load it
DB=connect()<if_stmt>silver_dataset<not><in>DB<block_start><raise>ValueError("Can't find dataset '{}'.".format(silver_dataset))<block_end>silver_data=DB.get_dataset(silver_dataset)<line_sep># Load the spaCy model
nlp=spacy.load(spacy_model)<if_stmt>label<is><none># Get the labels from the model by looking at the available moves, e.g.
# B-PERSON, I-PERSON, L-PERSON, U-PERSON
<block_start>ner=nlp.get_pipe("ner")<line_sep>label=sorted(ner.labels)<block_end># Initialize Prodigy's entity recognizer model, which uses beam search to
# find all possible analyses and outputs (score, example) tuples
model=EntityRecognizer(nlp label=label)<line_sep># Merge all annotations and find the best possible analyses
stream=model.make_best(silver_data)<line_sep># Tokenize the incoming examples and add a "tokens" property to each
# example. Also handles pre-defined selected spans. Tokenization allows
# faster highlighting, because the selection can "snap" to token boundaries.
stream=add_tokens(nlp stream)<line_sep><return>{"view_id":"ner_manual" # Annotation interface to use
"dataset":gold_dataset # Name of dataset to save annotations
"stream":stream # Incoming stream of examples
"config":{# Additional config settings, mostly for app UI
"lang":nlp.lang "labels":label # Selectable label options
} }<block_end> |
<import_from_stmt>omegaconf OmegaConf<def_stmt>default_detection_train_config # FIXME currently using args for train config, will revisit, perhaps move to Hydra
<block_start>h=OmegaConf.create()<line_sep># dataset
h.skip_crowd_during_training=<true><line_sep># augmentation
h.input_rand_hflip=<true><line_sep>h.train_scale_min=0.1<line_sep>h.train_scale_max=2.0<line_sep>h.autoaugment_policy=<none><line_sep># optimization
h.momentum=0.9<line_sep>h.learning_rate=0.08<line_sep>h.lr_warmup_init=0.008<line_sep>h.lr_warmup_epoch=1.0<line_sep>h.first_lr_drop_epoch=200.0<line_sep>h.second_lr_drop_epoch=250.0<line_sep>h.clip_gradients_norm=10.0<line_sep>h.num_epochs=300<line_sep># regularization l2 loss.
h.weight_decay=4e-5<line_sep>h.lr_decay_method='cosine'<line_sep>h.moving_average_decay=0.9998<line_sep>h.ckpt_var_scope=<none><line_sep><return>h<block_end> |
# -*- coding: utf-8 -*-
"""
Created on Fri May 23 10:17:33 2014
@author: <NAME>
@email: <EMAIL>
"""<import_stmt>sys<import_stmt>logging<import_from_stmt>argparse ArgumentParser RawTextHelpFormatter<import_from_stmt>lib.tools *<import_from_stmt>lib.gtf_store *<line_sep>description="Description:\n\n"+"This tool calculates the PSI (Percentatge Splice In) for the different\n"+"transcripts of a gene.\n"+"It reads a gtf to get transcript-gene relationship and an expression file\n"+"of the different transcripts\n"<line_sep>parser=ArgumentParser(description=description formatter_class=RawTextHelpFormatter add_help=<false>)<line_sep>parser.add_argument("-g" "--gtf-file" help="Input gtf file" required=<true>)<line_sep>parser.add_argument("-e" "--expression-file" required=<true> help="Input expression file")<line_sep>parser.add_argument("-o" "--output-file" required=<true> help="Path and name of the ouput file")<line_sep>parser.add_argument("-m" "--mode" default="INFO" help="to choose from DEBUG, INFO, WARNING, ERROR and CRITICAL")<def_stmt>expression_reader exp_file<block_start>"""
Reads in expression file and returns dict
of transcript expressions and first line.
"""<if_stmt><not>os.path.isfile(exp_file)<block_start>sys.stderr.write("Expression file does not exist. Quiting\n")<line_sep>exit(1)<block_end>expressions={}<with_stmt>open(exp_file 'r')<as>handle<block_start>first_line=nextel(handle).strip()<for_stmt>line handle<block_start>line=line.strip().split('\t')<line_sep>expressions[line[0]]=[float(xp)<for>xp line[1:]]<block_end><block_end><return>expressions first_line<block_end><def_stmt>expression_writer genomeinfo expressions firstline output_file<block_start>"""
Function to write perIsoform inclusion
"""<line_sep>output_file<augadd>'_isoform.psi'<line_sep>entriesnumber=len(expressions[nextel(expressions.__iter__())])<with_stmt>open(output_file 'w')<as>handle<block_start>handle.write(firstline+'\n')<for_stmt>gene,_,_ genomeinfo<block_start>expr_sum=[0<for>_ range(entriesnumber)]<line_sep># collect expression
<for_stmt>transcript gene.sortedTranscripts<block_start><if_stmt>transcript<not><in>expressions<block_start>logger.info(('Expression for transcript "{}" not found. '<concat>'Ignoring it in calculation.').format(transcript))<block_end><else_stmt><block_start>expr_sum=list(map(<lambda>exp_pair:exp_pair[0]+exp_pair[1] zip(expr_sum expressions[transcript])))<block_end><block_end># calculate expression
<if_stmt>0<in>expr_sum<block_start>logger.debug('Gene "{}" has at least one replicate with 0 expression.'.format(gene.name))<line_sep>expr_sum=[y<if>y<else>float('NaN')<for>y expr_sum]<block_end><for_stmt>transcript gene.sortedTranscripts<block_start><if_stmt>transcript<not><in>expressions<block_start><continue><block_end>t_exp=map(<lambda>exp_pair:exp_pair[1]/exp_pair[0] zip(expr_sum expressions[transcript]))<line_sep>handle.write('{};{}\t{}\n'.format(gene.name transcript '\t'.join([str(exp_val)<for>exp_val t_exp])))<block_end><block_end><block_end><block_end><def_stmt>main <block_start>args=parser.parse_args()<line_sep>#Parsing arguments
mode="logging."+args.mode<line_sep>#Setting logging preferences
logger=logging.getLogger(__name__)<line_sep>logger.setLevel(eval(mode))<line_sep>#Setting the level of the loggers in lib
setToolsLoggerLevel(mode)<line_sep>#PREPAIRING GTF
my_genome=Genome()<line_sep>logger.info("Reading GTF data.")<line_sep>fetched_exons=gtf_reader(args.gtf_file logger)<line_sep># Check for empy sequences
<if_stmt>len(fetched_exons)<eq>0<block_start>logger.info("No exons found. Check format and content of your GTF file.")<line_sep>exit(1)<block_end><for_stmt>exon_meta fetched_exons<block_start>my_genome.add_to_genes(exon_meta)<block_end># split non overlapping genes
my_genome.sort_transcripts()<line_sep>my_genome.split_genes()<line_sep>logger.info("Reading Expression data.")<line_sep>trans_expres,sample_names=expression_reader(args.expression_file)<if_stmt><not>trans_expres<block_start>logger.info("No expressions found. Check format and content of your expression file.")<line_sep>exit(1)<block_end># Calculate and write output
logger.info("Calculating inclusion and generating output.")<line_sep>expression_writer(my_genome trans_expres sample_names args.output_file)<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end> |
<import_from_stmt>django.conf settings<import_from_stmt>django.http HttpResponse<import_from_stmt>django.urls include path<import_from_stmt>django.contrib.flatpages.views flatpage<as>flatpage_view<import_from_stmt>django.apps apps<as>django_apps<import_from_stmt>django_distill distill_url distill_path distill_re_path<def_stmt>test_no_param_view request<block_start><return>HttpResponse(b'test' content_type='application/octet-stream')<block_end><def_stmt>test_positional_param_view request param<block_start><return>HttpResponse(b'test'+param.encode() content_type='application/octet-stream')<block_end><def_stmt>test_named_param_view request param=<none><block_start><return>HttpResponse(b'test'+param.encode() content_type='application/octet-stream')<block_end><def_stmt>test_session_view request<block_start>request.session['test']='test'<line_sep><return>HttpResponse(b'test' content_type='application/octet-stream')<block_end><def_stmt>test_broken_view request# Trigger a normal Python exception when rendering
<block_start>a=1/0<block_end><def_stmt>test_http404_view request<block_start>response=HttpResponse(b'404' content_type='application/octet-stream')<line_sep>response.status_code=404<line_sep><return>response<block_end><def_stmt>test_no_param_func <block_start><return><none><block_end><def_stmt>test_positional_param_func <block_start><return>('12345' )<block_end><def_stmt>test_named_param_func <block_start><return>[{'param':'test'}]<block_end><def_stmt>test_flatpages_func <block_start>Site=django_apps.get_model('sites.Site')<line_sep>current_site=Site.objects.get_current()<line_sep>flatpages=current_site.flatpage_set.filter(registration_required=<false>)<for_stmt>flatpage flatpages<block_start><yield>{'url':flatpage.url}<block_end><block_end>urlpatterns=[distill_url(r'^url/$' test_no_param_view name='url-no-param' distill_func=test_no_param_func distill_file='test') distill_url(r'^url-no-func/$' test_no_param_view name='url-no-param-no-func' distill_file='test') distill_url(r'^url/([\d]+)$' test_positional_param_view name='url-positional-param' distill_func=test_positional_param_func) distill_url(r'^url/(?P<param>[\w]+)$' test_named_param_view name='url-named-param' distill_func=test_named_param_func) path('path/namespace1/' include('tests.namespaced_urls' namespace='test_namespace')) path('path/no-namespace/' include('tests.no_namespaced_urls')) ]<if_stmt>settings.HAS_RE_PATH<block_start>urlpatterns<augadd>[distill_re_path(r'^re_path/$' test_no_param_view name='re_path-no-param' distill_func=test_no_param_func distill_file='test') distill_re_path(r'^re_path-no-func/$' test_no_param_view name='re_path-no-param-no-func' distill_file='test') distill_re_path(r'^re_path/([\d]+)$' test_positional_param_view name='re_path-positional-param' distill_func=test_positional_param_func) distill_re_path(r'^re_path/(?P<param>[\w]+)$' test_named_param_view name='re_path-named-param' distill_func=test_named_param_func) distill_re_path(r'^re_path/broken$' test_broken_view name='re_path-broken' distill_func=test_no_param_func) distill_re_path(r'^re_path/ignore-sessions$' test_session_view name='re_path-ignore-sessions' distill_func=test_no_param_func) distill_re_path(r'^re_path/404$' test_http404_view name='re_path-404' distill_status_codes=(404 ) distill_func=test_no_param_func) distill_re_path(r'^re_path/flatpage(?P<url>.+)$' flatpage_view name='re_path-flatpage' distill_func=test_flatpages_func) ]<block_end><if_stmt>settings.HAS_PATH<block_start>urlpatterns<augadd>[distill_path('path/' test_no_param_view name='path-no-param' distill_func=test_no_param_func distill_file='test') distill_path('path-no-func/' test_no_param_view name='path-no-param-no-func' distill_file='test') distill_path('path/<int>' test_positional_param_view name='path-positional-param' distill_func=test_positional_param_func) distill_path('path/<str:param>' test_named_param_view name='path-named-param' distill_func=test_named_param_func) distill_path('path/broken' test_broken_view name='path-broken' distill_func=test_no_param_func) distill_path('path/ignore-sessions' test_session_view name='path-ignore-sessions' distill_func=test_no_param_func) distill_path('path/404' test_http404_view name='path-404' distill_status_codes=(404 ) distill_func=test_no_param_func) distill_path('path/flatpage<path:url>' flatpage_view name='path-flatpage' distill_func=test_flatpages_func) ]<block_end> |
<import_from_future_stmt> print_function<import_stmt>logging<import_stmt>os<import_stmt>json<line_sep>msg_file_path='/mnt/msg/content'<def_stmt>handler event context# request = event['requestContext']
# http = request['http']
<block_start>method=event['requestContext']['http']['method']<if_stmt>method<eq>'GET'<block_start><return>getMessages()<block_end><elif_stmt>method<eq>'POST'<block_start>message=json.loads(event['body'])<line_sep><return>createMessages(message)<block_end><elif_stmt>method<eq>'DELETE'<block_start><return>deleteMessages()<block_end><else_stmt><block_start><return>{'message':'method not supported'}<block_end><block_end><def_stmt>getMessages <block_start><try_stmt><block_start>file=open(msg_file_path 'r')<line_sep>file_text=file.read()<line_sep><return>{'File_Text':file_text}<block_end><except_stmt><block_start>logging.error('unable to read')<line_sep><return>{'message':'unable to load information'}<block_end><block_end><def_stmt>deleteMessages <block_start><try_stmt><block_start>os.remove(msg_file_path)<line_sep><return>{'message':'File Deleted'}<block_end><except_stmt><block_start>logging.error('unable to delete')<line_sep><return>{'message':'unable to load information'}<block_end><block_end><def_stmt>createMessages message<block_start><try_stmt><block_start>file=open(msg_file_path 'a')<line_sep>file.write(message)<line_sep><return>{'appended_text':message}<block_end><except_stmt><block_start>logging.error('unable to write to the file')<line_sep><return>{'message':'unable to load information'}<block_end><block_end> |
"""Whocallid.com search module"""<import_from_future_stmt> print_function<import_from_future_stmt> absolute_import<import_from_stmt>..base PageGrabber<import_from_stmt>...colors.default_colors DefaultBodyColors<as>bc<import_stmt>re<import_stmt>logging<try_stmt><block_start><import_stmt>__builtin__<as>bi<block_end><except_stmt>BaseException<block_start><import_stmt>builtins<as>bi<block_end><class_stmt>WhoCallIdGrabber(PageGrabber)<block_start>"""
WhoCallID sales scraper for reverse telephone lookups
"""<def_stmt>get_name self<block_start>"""
Grab the users name
"""<line_sep>name="Unknown"<try_stmt><block_start>name=self.soup.find('h2' attrs={'class':'name'})<if_stmt>name<block_start>name=name.text.strip()<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Name: "+bc.CEND+str(name))<block_end><block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>name<block_end><block_end><def_stmt>get_location self<block_start>"""
Get the location
"""<line_sep>location="Unknown"<try_stmt><block_start>location=self.soup.find('h3' attrs={'class':'location'})<if_stmt>location<block_start>location=location.text.strip()<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Location: "+bc.CEND+str(location))<block_end><block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>location<block_end><block_end><def_stmt>get_phone_type self<block_start>"""
Get the phone type
"""<line_sep>phone_type="Unknown"<try_stmt><block_start>phone_type=self.soup.find("img").attrs['alt']<if_stmt>phone_type<block_start>phone_type=phone_type.strip()<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Phone Type: "+bc.CEND+str(phone_type))<block_end><block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>phone_type<block_end><block_end><def_stmt>get_carrier self phone_number<block_start>"""
Get the phone carrier info
"""<line_sep>carrier=""<try_stmt><block_start>self.url="https://whocalld.com/+1{}?carrier".format(phone_number)<line_sep>self.source=self.get_source(self.url)<line_sep>self.soup=self.get_dom(self.source)<line_sep>carrier=soup.find('span' attrs={'class':'carrier'})<block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>carrier<block_end><block_end><def_stmt>process_carrier self carrier<block_start>"""
Take the carrier info and process it
"""<try_stmt><block_start><if_stmt>carrier<block_start>carrier=carrier.text<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Carrier: "+bc.CEND+str(carrier))<block_end><else_stmt><block_start>carrier=""<block_end><block_end><except_stmt>BaseException<block_start>carrier=""<block_end><finally_stmt><block_start><return>carrier<block_end><block_end><def_stmt>get_city self<block_start>"""
Grab the city info
"""<line_sep>city=""<try_stmt><block_start>city=self.soup.find('span' attrs={'class':'city'})<if_stmt>city<block_start>city=city.text<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"City: "+bc.CEND+str(city))<block_end><block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>city<block_end><block_end><def_stmt>get_state self<block_start>"""
Grab the state info
"""<line_sep>state=""<try_stmt><block_start>state=self.soup.find('span' attrs={'class':'state'})<if_stmt>state<block_start>state=state.text<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"State: "+bc.CEND+str(state))<block_end><block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>state<block_end><block_end><def_stmt>get_time self<block_start>"""
Grab time info
"""<line_sep>time=""<try_stmt><block_start>time=self.soup.find('span' attrs={'class':'time'})<if_stmt>time<block_start>time=time.text<line_sep>print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Time: "+bc.CEND+str(time))<block_end><block_end><except_stmt>BaseException<block_start><pass><block_end><finally_stmt><block_start><return>time<block_end><block_end><def_stmt>get_info self phone_number lookup<block_start>"""
Request, scrape and return values found
"""<line_sep>print("["+bc.CPRP+"?"+bc.CEND+"] "+bc.CCYN+"WhoCalld"+bc.CEND)<line_sep># Get phone info
self.url='https://whocalld.com/+1{}'.format(phone_number)<line_sep>self.source=self.get_source(self.url)<line_sep>self.soup=self.get_dom(self.source)<try_stmt><block_start><if_stmt>self.soup.body.find_all(string=re.compile('.*{0}.*'.format('country')) recursive=<true>)<block_start>print(" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"No WhoCallID data returned\n"+bc.CEND)<line_sep><return><block_end><block_end><except_stmt><block_start>print(" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"Unable to extract data. Is the site online?\n"+bc.CEND)<block_end>name=self.get_name()<line_sep>location=self.get_location()<line_sep>phone_type=self.get_phone_type()<line_sep>carrier=self.get_carrier(phone_number)<line_sep>carrier=self.process_carrier(carrier)<line_sep>city=self.get_city()<line_sep>state=self.get_state()<line_sep>time=self.get_time()<line_sep>self.info_dict.update({"carrier":carrier "city":city "location":location "name":name "phone_type":phone_type "state":state "time":time})<line_sep>print()<line_sep><return>self.info_dict<block_end><block_end> |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add force_screenshot to alerts/reports
Revision ID: bb38f40aa3ff
Revises: <PASSWORD>
Create Date: 2021-12-10 19:25:29.802949
"""<line_sep># revision identifiers, used by Alembic.
revision="bb38f40aa3ff"<line_sep>down_revision="3<PASSWORD>"<import_stmt>sqlalchemy<as>sa<import_from_stmt>alembic op<import_from_stmt>sqlalchemy.ext.declarative declarative_base<import_from_stmt>superset db<line_sep>Base=declarative_base()<class_stmt>ReportSchedule(Base)<block_start>__tablename__="report_schedule"<line_sep>id=sa.Column(sa.Integer primary_key=<true>)<line_sep>type=sa.Column(sa.String(50) nullable=<false>)<line_sep>force_screenshot=sa.Column(sa.Boolean default=<false>)<block_end><def_stmt>upgrade <block_start><with_stmt>op.batch_alter_table("report_schedule")<as>batch_op<block_start>batch_op.add_column(sa.Column("force_screenshot" sa.Boolean() default=<false>))<block_end>bind=op.get_bind()<line_sep>session=db.Session(bind=bind)<for_stmt>report session.query(ReportSchedule).all()# Update existing alerts that send chart screenshots so that the cache is
# bypassed. We don't turn this one for dashboards because (1) it's currently
# not supported but also because (2) it can be very expensive.
<block_start>report.force_screenshot=report.type<eq>"Alert"<and>report.chart_id<is><not><none><block_end>session.commit()<block_end><def_stmt>downgrade <block_start><with_stmt>op.batch_alter_table("report_schedule")<as>batch_op<block_start>batch_op.drop_column("force_screenshot")<block_end><block_end> |
# -*- coding: utf-8 -*-
<import_stmt>json<import_stmt>requests<line_sep>"""
ocr.space
"""<def_stmt>get_text_from_image image_data api_key='<KEY>' overlay=<false> language='chs'<block_start>"""
CR.space API request with local file.
:param image_data: image's base64 encoding.
:param overlay: Is OCR.space overlay required in your response.
Defaults to False.
:param api_key: OCR.space API key.
Defaults to 'helloworld'.
:param language: Language code to be used in OCR.
List of available language codes can be found on https://ocr.space/OCRAPI
Defaults to 'en'.
:return: Result in JSON format.
"""<line_sep>payload={'isOverlayRequired':overlay 'apikey':api_key 'language':language }<line_sep>r=requests.post('https://api.ocr.space/parse/image' files={'image.png':image_data} data=payload )<line_sep>result=json.loads(r.content)<if_stmt>(result['OCRExitCode']<eq>1)<block_start><return>result['ParsedResults'][0]['ParsedText']<block_end>print(result['ErrorMessage'])<line_sep><return>""<block_end> |
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides the stamp info file containing the Bazel non-volatile keys
"""<def_stmt>_impl ctx<block_start>output=ctx.outputs.out<line_sep>ctx.actions.run_shell(outputs=[output] inputs=[ctx.info_file] command="cp {src} {dst}".format(src=ctx.info_file.path dst=output.path ) )<block_end>stamp_info=rule(implementation=_impl outputs={# The stamp file.
"out":"%{name}.txt" } )<line_sep> |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# ---------------------------------------------------------
<class_stmt>LCS<block_start>""" Compute the Longest Common Subsequence (LCS) of two given string."""<def_stmt>__init__ self str_m str_n<block_start>self.str_m_len=len(str_m)<line_sep>self.str_n_len=len(str_n)<line_sep>dp_table=self._construct_dp_table(str_m str_n)<line_sep>self._lcs_len=dp_table[self.str_m_len][self.str_n_len]<line_sep>self._lcs=self._find_lcs_str(str_m str_n dp_table)<block_end><def_stmt>_construct_dp_table self str_m str_n<block_start>m=self.str_m_len<line_sep>n=self.str_n_len<line_sep># Initialize DP table
dp=[[0<for>j range(n+1)]<for>i range(m+1)]<for_stmt>i range(1 m+1)<block_start><for_stmt>j range(1 n+1)# Case 1: if char1 == char2
<block_start><if_stmt>str_m[i-1]<eq>str_n[j-1]<block_start>dp[i][j]=1+dp[i-1][j-1]<block_end># Case 2: take the max of the values in the top and left cell
<else_stmt><block_start>dp[i][j]=max(dp[i-1][j] dp[i][j-1])<block_end><block_end><block_end><return>dp<block_end><def_stmt>_find_lcs_str self str_m str_n dp_table<block_start>m=self.str_m_len<line_sep>n=self.str_n_len<line_sep>lcs=""<while_stmt>m<g>0<and>n<g>0# same char
<block_start><if_stmt>str_m[m-1]<eq>str_n[n-1]# prepend the character
<block_start>lcs=str_m[m-1]+lcs<line_sep>m<augsub>1<line_sep>n<augsub>1<block_end># top cell > left cell
<elif_stmt>dp_table[m-1][n]<g>dp_table[m][n-1]<block_start>m<augsub>1<block_end><else_stmt><block_start>n<augsub>1<block_end><block_end><return>lcs<block_end><def_stmt>get_len self<block_start><return>self._lcs_len<block_end><def_stmt>get_str self<block_start><return>self._lcs<block_end><block_end> |
<import_stmt>bpy<line_sep>bpy.context.camera.sensor_width=4.8<line_sep>bpy.context.camera.sensor_height=3.6<line_sep>bpy.context.camera.lens=4.20<line_sep>bpy.context.camera.sensor_fit='HORIZONTAL'<line_sep> |
# -*- coding: utf-8 -*-
###
# (C) Copyright (2012-2017) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
<import_from_stmt>pprint pprint<import_from_stmt>config_loader try_load_from_file<import_from_stmt>hpOneView.oneview_client OneViewClient<line_sep>config={"ip":"<oneview_ip>" "credentials":{"userName":"<username>" "password":"<password>"}}<line_sep># Try load config from a file (if there is a config file)
config=try_load_from_file(config)<line_sep>oneview_client=OneViewClient(config)<line_sep>datacenter_information={"name":"MyDatacenter" "width":5000 "depth":5000}<line_sep># Add a Datacenter
datacenter_added=oneview_client.datacenters.add(datacenter_information)<line_sep>print("\nAdded Datacenter '{name}' successfully\n".format(**datacenter_added))<line_sep># Retrieve Datacenter by URI
datacenter=oneview_client.datacenters.get(datacenter_added['uri'])<line_sep>print("\nGet Datacenter by URI: retrieved '{name}' successfully\n".format(**datacenter))<line_sep># Update the Datacenter
datacenter['name']="New Datacenter Name"<line_sep>datacenter=oneview_client.datacenters.update(datacenter)<line_sep>print("\nDatacenter '{name}' updated successfully\n".format(**datacenter))<line_sep># Get the Datacenter by name
datacenter_list=oneview_client.datacenters.get_by('name' "New Datacenter Name")<line_sep>print("\nGet Datacenter device by name: '{name}'\n".format(**datacenter))<line_sep># Get the Datacenter visual content
print("Getting the Datacenter visual content...")<line_sep>datacenter_visual_content=oneview_client.datacenters.get_visual_content(datacenter['uri'])<line_sep>pprint(datacenter_visual_content)<line_sep># Remove added Datacenter
oneview_client.datacenters.remove(datacenter)<line_sep>print("\nSuccessfully removed the datacenter")<line_sep># Add a datacenter again and call Remove All
datacenter_added=oneview_client.datacenters.add(datacenter_information)<line_sep>oneview_client.datacenters.remove_all(filter="name matches '%'")<line_sep>print("\nSuccessfully removed all datacenters")<line_sep> |
<def_stmt>data_for_fitting * building_id date<block_start>"""
Retrieves data for fitting from the previous business day
taking into account holidays
"""<line_sep>lease_start=<none><while_stmt>lease_start<is><none># Previous business day according to Pandas (might be a holiday)
<block_start>previous_bday=pd.to_datetime(date)-BDay(1)<line_sep># If a holiday, this will return None
lease_start=(db().execute(building_daily_stats.select().where(building_daily_stats.c.building_id<eq>building_id).where(building_daily_stats.c.date<eq>previous_bday)).fetchone().lease_obligations_start_at)<line_sep>date=previous_bday<block_end># Retrieve 8 hours of data from the lease start
<return>load_sensor_values(building_id=building_id start_time=lease_start end_time=lease_start+timedelta(hours=8) )<block_end> |
Environment(loader=templateLoader autoescape=fake_func())<import_from_stmt>flask Flask request make_response escape<import_from_stmt>jinja2 Environment select_autoescape FileSystemLoader Template<line_sep>app=Flask(__name__)<line_sep>loader=FileSystemLoader(searchpath="templates/")<line_sep>unsafe_env=Environment(loader=loader)<line_sep>safe1_env=Environment(loader=loader autoescape=<true>)<line_sep>safe2_env=Environment(loader=loader autoescape=select_autoescape())<def_stmt>render_response_from_env env<block_start>name=request.args.get('name' '')<line_sep>template=env.get_template('template.html')<line_sep><return>make_response(template.render(name=name))<block_end>@app.route('/unsafe')<def_stmt>unsafe <block_start><return>render_response_from_env(unsafe_env)<block_end>@app.route('/safe1')<def_stmt>safe1 <block_start><return>render_response_from_env(safe1_env)<block_end>@app.route('/safe2')<def_stmt>safe2 <block_start><return>render_response_from_env(safe2_env)<block_end># Explicit autoescape
e=Environment(loader=loader autoescape=select_autoescape(['html' 'htm' 'xml']))<line_sep># GOOD
# Additional checks with flow.
auto=select_autoescape<line_sep>e=Environment(autoescape=auto)# GOOD
z=0<line_sep>e=Environment(autoescape=z)# BAD
E=Environment<line_sep>E()# BAD
E(autoescape=z)# BAD
E(autoescape=auto)# GOOD
E(autoescape=0+1)# GOOD
<def_stmt>checked cond=<false><block_start><if_stmt>cond<block_start>e=Environment(autoescape=cond)<block_end><block_end># GOOD
unsafe_tmpl=Template('Hello {{ name }}!')<line_sep>safe1_tmpl=Template('Hello {{ name }}!' autoescape=<true>)<line_sep>safe2_tmpl=Template('Hello {{ name }}!' autoescape=select_autoescape())<line_sep> |
# -*- coding: utf-8 -*-
r"""Testing code for the (Python) bandit library.
Testing is done via the Testify package:
https://github.com/Yelp/Testify
This package includes:
* Test cases/test setup files
* Tests for bandit/epsilon: :mod:`moe.tests.bandit.epsilon`
* Tests for bandit/ucb: :mod:`moe.tests.bandit.ucb`
* Tests for bandit/bla: :mod:`moe.tests.bandit.bla`
This package includes:
* Test cases/test setup files
* Tests for classes and utils in :mod:`moe.bandit`
**Files in this package**
* :mod:`moe.tests.bandit.bandit_interface_test`: tests for :mod:`moe.bandit.interfaces.bandit_interface.BanditInterface`
* :mod:`moe.tests.bandit.bandit_test_case`: base test case for bandit tests with a simple integration test case
* :mod:`moe.tests.bandit.linkers_test`: tests for :mod:`moe.bandit.linkers`
* :mod:`moe.tests.bandit.utils_test`: tests for :mod:`moe.bandit.utils`
"""<line_sep> |
# terrascript/resource/drarko/mssql.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:21:59 UTC)
<import_stmt>terrascript<class_stmt>mssql_login(terrascript.Resource)<block_start><pass><block_end>__all__=["mssql_login" ]<line_sep> |
"""
A pure python implementation of the standard module library cmath.
"""<import_stmt>math<line_sep>" These are constants from float.h"<line_sep>_FLT_RADIX=2<line_sep>_DBL_MIN=2.2250738585072014e-308<line_sep>_DBL_MAX=1.7976931348623157e+308<line_sep>_DBL_EPSILON=2.2204460492503131e-16<line_sep>_DBL_MANT_DIG=53<line_sep>_CM_SCALE_UP=2<times>int(_DBL_MANT_DIG/2)+1<line_sep>_CM_SCALE_DOWN=int(-(_CM_SCALE_UP+1)/2)<line_sep>_LOG_2=0.6931471805599453094<line_sep>_LOG_10=2.302585092994045684<line_sep>_LARGE_INT=2305843009213693951<line_sep>_LOG_LARGE_INT=18.3628297355029<line_sep>_LARGE_DOUBLE=4.49423283715579e+307<line_sep>_LOG_LARGE_DOUBLE=307.652655568589<line_sep>_SQRT_LARGE_DOUBLE=6.70390396497130e+153<line_sep>_SQRT_DBL_MIN=1.49166814624004e-154<line_sep>e=2.7182818284590452354<line_sep>pi=3.14159265358979323846<line_sep>tau=2<times>pi<line_sep>inf=float("inf")<line_sep>infj=complex(0 inf)<line_sep>nan=float("nan")<line_sep>nanj=complex(0 nan)<def_stmt>_make_complex x<block_start><if_stmt>isinstance(x complex)<block_start><return>x<block_end><try_stmt><block_start>z=x.__complex__()<block_end><except_stmt>AttributeError<block_start><try_stmt><block_start>z=complex(x.__float__())<block_end><except_stmt>AttributeError<block_start><raise>TypeError<block_end><block_end><if_stmt>isinstance(z complex)<block_start><return>z<block_end><raise>TypeError<block_end><def_stmt>_special_type x<block_start>ST_NINF,ST_NEG,ST_NZERO,ST_PZERO,ST_POS,ST_PINF,ST_NAN=range(7)<if_stmt>math.isnan(x)<block_start><return>ST_NAN<block_end><if_stmt>math.isfinite(x)<block_start><if_stmt>x<ne>0<block_start><if_stmt>math.copysign(1 x)<eq>1<block_start><return>ST_POS<block_end><return>ST_NEG<block_end><if_stmt>math.copysign(1 x)<eq>1<block_start><return>ST_PZERO<block_end><return>ST_NZERO<block_end><if_stmt>math.copysign(1 x)<eq>1<block_start><return>ST_PINF<block_end><return>ST_NINF<block_end><def_stmt>rect r phi<block_start>_rect_special=[[inf+nanj <none> -inf complex(-float("inf") -0.0) <none> inf+nanj inf+nanj] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [0 <none> complex(-0.0 0.0) complex(-0.0 -0.0) <none> 0 0] [0 <none> complex(0.0 -0.0) 0 <none> 0 0] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [inf+nanj <none> complex(float("inf") -0.0) inf <none> inf+nanj inf+nanj] [nan+nanj nan+nanj nan nan nan+nanj nan+nanj nan+nanj]]<if_stmt><not>math.isfinite(r)<or><not>math.isfinite(phi)<block_start><if_stmt>math.isinf(phi)<and><not>math.isnan(r)<and>r<ne>0<block_start><raise>ValueError<block_end><if_stmt>math.isinf(r)<and>math.isfinite(phi)<and>phi<ne>0<block_start><if_stmt>r<g>0<block_start><return>complex(math.copysign(inf math.cos(phi)) math.copysign(inf math.sin(phi)))<block_end><return>complex(-math.copysign(inf math.cos(phi)) -math.copysign(inf math.sin(phi)))<block_end><return>_rect_special[_special_type(r)][_special_type(phi)]<block_end><return>complex(r<times>math.cos(phi) r<times>math.sin(phi))<block_end><def_stmt>phase x<block_start>z=complex(x)<line_sep><return>math.atan2(z.imag z.real)<block_end><def_stmt>polar x<block_start><return>abs(x) phase(x)<block_end><def_stmt>exp x<block_start>z=_make_complex(x)<line_sep>exp_special=[[0+0j <none> complex(0 -0.0) 0+0j <none> 0+0j 0+0j] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [nan+nanj <none> 1-0j 1+0j <none> nan+nanj nan+nanj] [nan+nanj <none> 1-0j 1+0j <none> nan+nanj nan+nanj] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [inf+nanj <none> complex(float("inf") -0.0) inf <none> inf+nanj inf+nanj] [nan+nanj nan+nanj complex(float("nan") -0.0) nan nan+nanj nan+nanj nan+nanj]]<if_stmt><not>isfinite(z)<block_start><if_stmt>math.isinf(z.real)<and>math.isfinite(z.imag)<and>z.imag<ne>0<block_start><if_stmt>z.real<g>0<block_start>ret=complex(math.copysign(inf math.cos(z.imag)) math.copysign(inf math.sin(z.imag)))<block_end><else_stmt><block_start>ret=complex(math.copysign(0 math.cos(z.imag)) math.copysign(0 math.sin(z.imag)))<block_end><block_end><else_stmt><block_start>ret=exp_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>math.isinf(z.imag)<and>(math.isfinite(z.real)<or>(math.isinf(z.real)<and>z.real<g>0))<block_start><raise>ValueError<block_end><return>ret<block_end><if_stmt>z.real<g>_LOG_LARGE_DOUBLE<block_start>ret=e<times>rect(math.exp(z.real-1) z.imag)<block_end><else_stmt><block_start>ret=rect(math.exp(z.real) z.imag)<block_end><if_stmt>math.isinf(ret.real)<or>math.isinf(ret.imag)<block_start><raise>OverflowError<block_end><return>ret<block_end><def_stmt>_log z<block_start>abs_x=abs(z.real)<line_sep>abs_y=abs(z.imag)<if_stmt>abs_x<g>_LARGE_INT<or>abs_y<g>_LARGE_INT<block_start><return>complex(math.log(math.hypot(abs_x/2 abs_y/2))+_LOG_2 math.atan2(z.imag z.real))<block_end><if_stmt>abs_x<l>_DBL_MIN<and>abs_y<l>_DBL_MIN<block_start><if_stmt>abs_x<g>0<or>abs_y<g>0<block_start><return>complex(math.log(math.hypot(math.ldexp(abs_x _DBL_MANT_DIG) math.ldexp(abs_y _DBL_MANT_DIG)))-_DBL_MANT_DIG<times>_LOG_2 math.atan2(z.imag z.real))<block_end><raise>ValueError<block_end>rad,phi=polar(z)<line_sep><return>complex(math.log(rad) phi)<block_end><def_stmt>log x base=e<block_start><if_stmt>base<ne>e<block_start><return>_log(_make_complex(x))/_log(_make_complex(base))<block_end><return>_log(_make_complex(x))<block_end><def_stmt>log10 x<block_start>z=_log(_make_complex(x))<line_sep><return>complex(z.real/_LOG_10 z.imag/_LOG_10)<block_end><def_stmt>sqrt x<block_start>sqrt_special=[[inf-infj 0-infj 0-infj infj infj inf+infj nan+infj] [inf-infj <none> <none> <none> <none> inf+infj nan+nanj] [inf-infj <none> 0-0j 0+0j <none> inf+infj nan+nanj] [inf-infj <none> 0-0j 0+0j <none> inf+infj nan+nanj] [inf-infj <none> <none> <none> <none> inf+infj nan+nanj] [inf-infj complex(float("inf") -0.0) complex(float("inf") -0.0) inf inf inf+infj inf+nanj] [inf-infj nan+nanj nan+nanj nan+nanj nan+nanj inf+infj nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt>math.isinf(z.real)<or>math.isinf(z.imag)<block_start><return>sqrt_special[_special_type(z.real)][_special_type(z.imag)]<block_end>abs_x,abs_y=abs(z.real) abs(z.imag)<if_stmt>abs_x<l>_DBL_MIN<and>abs_y<l>_DBL_MIN<block_start><if_stmt>abs_x<g>0<or>abs_y<g>0<block_start>abs_x=math.ldexp(abs_x _CM_SCALE_UP)<line_sep>s=math.ldexp(math.sqrt(abs_x+math.hypot(abs_x math.ldexp(abs_y _CM_SCALE_UP))) _CM_SCALE_DOWN)<block_end><else_stmt><block_start><return>complex(0 z.imag)<block_end><block_end><else_stmt><block_start>abs_x<augdiv>8<line_sep>s=2<times>math.sqrt(abs_x+math.hypot(abs_x abs_y/8))<block_end><if_stmt>z.real<ge>0<block_start><return>complex(s math.copysign(abs_y/(2<times>s) z.imag))<block_end><return>complex(abs_y/(2<times>s) math.copysign(s z.imag))<block_end><def_stmt>acos x<block_start>_acos_special=[[3<times>pi/4+infj pi+infj pi+infj pi-infj pi-infj 3<times>pi/4-infj nan+infj] [pi/2+infj <none> <none> <none> <none> pi/2-infj nan+nanj] [pi/2+infj <none> <none> <none> <none> pi/2-infj pi/2+nanj] [pi/2+infj <none> <none> <none> <none> pi/2-infj pi/2+nanj] [pi/2+infj <none> <none> <none> <none> pi/2-infj nan+nanj] [pi/4+infj infj infj 0.0-infj 0.0-infj pi/4-infj nan+infj] [nan+infj nan+nanj nan+nanj nan+nanj nan+nanj nan-infj nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt><not>isfinite(z)<block_start><return>_acos_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>abs(z.real)<g>_LARGE_DOUBLE<or>abs(z.imag)<g>_LARGE_DOUBLE<block_start><if_stmt>z.real<l>0<block_start>imag=-math.copysign(math.log(math.hypot(z.real/2 z.imag/2))+2<times>_LOG_2 z.imag)<block_end><else_stmt><block_start>imag=math.copysign(math.log(math.hypot(z.real/2 z.imag/2))+2<times>_LOG_2 -z.imag)<block_end><return>complex(math.atan2(abs(z.imag) z.real) imag)<block_end>s1=sqrt(complex(1.0-z.real -z.imag))<line_sep>s2=sqrt(complex(1.0+z.real z.imag))<line_sep><return>complex(2<times>math.atan2(s1.real s2.real) math.asinh(s2.real<times>s1.imag-s2.imag<times>s1.real))<block_end><def_stmt>asin x<block_start>z=_make_complex(x)<line_sep>z=asinh(complex(-z.imag z.real))<line_sep><return>complex(z.imag -z.real)<block_end><def_stmt>atan x<block_start>z=_make_complex(x)<line_sep>z=atanh(complex(-z.imag z.real))<line_sep><return>complex(z.imag -z.real)<block_end><def_stmt>cos x<block_start>z=_make_complex(x)<line_sep><return>cosh(complex(-z.imag z.real))<block_end><def_stmt>sin x<block_start>z=_make_complex(x)<line_sep>z=sinh(complex(-z.imag z.real))<line_sep><return>complex(z.imag -z.real)<block_end><def_stmt>tan x<block_start>z=_make_complex(x)<line_sep>z=tanh(complex(-z.imag z.real))<line_sep><return>complex(z.imag -z.real)<block_end><def_stmt>acosh x<block_start>z=_make_complex(x)<if_stmt>abs(z.real)<g>_LARGE_DOUBLE<or>abs(z.imag)<g>_LARGE_DOUBLE<block_start><return>complex(math.log(math.hypot(z.real/2 z.imag/2))+2<times>_LOG_2 math.atan2(z.imag z.real))<block_end>s1=sqrt(complex(z.real-1 z.imag))<line_sep>s2=sqrt(complex(z.real+1 z.imag))<line_sep><return>complex(math.asinh(s1.real<times>s2.real+s1.imag<times>s2.imag) 2<times>math.atan2(s1.imag s2.real))<block_end><def_stmt>asinh x<block_start>_asinh_special=[[-inf-1j<times>pi/4 complex(-float("inf") -0.0) complex(-float("inf") -0.0) complex(-float("inf") 0.0) complex(-float("inf") 0.0) -inf+1j<times>pi/4 -inf+nanj] [-inf-1j<times>pi/2 <none> <none> <none> <none> -inf+1j<times>pi/2 nan+nanj] [-inf-1j<times>pi/2 <none> <none> <none> <none> -inf+1j<times>pi/2 nan+nanj] [inf-1j<times>pi/2 <none> <none> <none> <none> inf+1j<times>pi/2 nan+nanj] [inf-1j<times>pi/2 <none> <none> <none> <none> inf+1j<times>pi/2 nan+nanj] [inf-1j<times>pi/4 complex(float("inf") -0.0) complex(float("inf") -0.0) inf inf inf+1j<times>pi/4 inf+nanj] [inf+nanj nan+nanj complex(float("nan") -0.0) nan nan+nanj inf+nanj nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt><not>isfinite(z)<block_start><return>_asinh_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>abs(z.real)<g>_LARGE_DOUBLE<or>abs(z.imag)<g>_LARGE_DOUBLE<block_start><if_stmt>z.imag<ge>0<block_start>real=math.copysign(math.log(math.hypot(z.imag/2 z.real/2))+2<times>_LOG_2 z.real)<block_end><else_stmt><block_start>real=-math.copysign(math.log(math.hypot(z.imag/2 z.real/2))+2<times>_LOG_2 -z.real)<block_end><return>complex(real math.atan2(z.imag abs(z.real)))<block_end>s1=sqrt(complex(1+z.imag -z.real))<line_sep>s2=sqrt(complex(1-z.imag z.real))<line_sep><return>complex(math.asinh(s1.real<times>s2.imag-s2.real<times>s1.imag) math.atan2(z.imag s1.real<times>s2.real-s1.imag<times>s2.imag))<block_end><def_stmt>atanh x<block_start>_atanh_special=[[complex(-0.0 -pi/2) complex(-0.0 -pi/2) complex(-0.0 -pi/2) complex(-0.0 pi/2) complex(-0.0 pi/2) complex(-0.0 pi/2) complex(-0.0 float("nan"))] [complex(-0.0 -pi/2) <none> <none> <none> <none> complex(-0.0 pi/2) nan+nanj] [complex(-0.0 -pi/2) <none> <none> <none> <none> complex(-0.0 pi/2) complex(-0.0 float("nan"))] [-1j<times>pi/2 <none> <none> <none> <none> 1j<times>pi/2 nanj] [-1j<times>pi/2 <none> <none> <none> <none> 1j<times>pi/2 nan+nanj] [-1j<times>pi/2 -1j<times>pi/2 -1j<times>pi/2 1j<times>pi/2 1j<times>pi/2 1j<times>pi/2 nanj] [-1j<times>pi/2 nan+nanj nan+nanj nan+nanj nan+nanj 1j<times>pi/2 nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt><not>isfinite(z)<block_start><return>_atanh_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>z.real<l>0<block_start><return>-atanh(-z)<block_end>ay=abs(z.imag)<if_stmt>z.real<g>_SQRT_LARGE_DOUBLE<or>ay<g>_SQRT_LARGE_DOUBLE<block_start>hypot=math.hypot(z.real/2 z.imag/2)<line_sep><return>complex(z.real/4/hypot/hypot -math.copysign(pi/2 -z.imag))<block_end><if_stmt>z.real<eq>1<and>ay<l>_SQRT_DBL_MIN<block_start><if_stmt>ay<eq>0<block_start><raise>ValueError<block_end><return>complex(-math.log(math.sqrt(ay)/math.sqrt(math.hypot(ay 2))) math.copysign(math.atan2(2 -ay)/2 z.imag))<block_end><return>complex(math.log1p(4<times>z.real/((1-z.real)<times>(1-z.real)+ay<times>ay))/4 -math.atan2(-2<times>z.imag (1-z.real)<times>(1+z.real)-ay<times>ay)/2)<block_end><def_stmt>cosh x<block_start>_cosh_special=[[inf+nanj <none> inf complex(float("inf") -0.0) <none> inf+nanj inf+nanj] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [nan <none> 1 complex(1 -0.0) <none> nan nan] [nan <none> complex(1 -0.0) 1 <none> nan nan] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [inf+nanj <none> complex(float("inf") -0.0) inf <none> inf+nanj inf+nanj] [nan+nanj nan+nanj nan nan nan+nanj nan+nanj nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt><not>isfinite(z)<block_start><if_stmt>math.isinf(z.imag)<and><not>math.isnan(z.real)<block_start><raise>ValueError<block_end><if_stmt>math.isinf(z.real)<and>math.isfinite(z.imag)<and>z.imag<ne>0<block_start><if_stmt>z.real<g>0<block_start><return>complex(math.copysign(inf math.cos(z.imag)) math.copysign(inf math.sin(z.imag)))<block_end><return>complex(math.copysign(inf math.cos(z.imag)) -math.copysign(inf math.sin(z.imag)))<block_end><return>_cosh_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>abs(z.real)<g>_LOG_LARGE_DOUBLE<block_start>x_minus_one=z.real-math.copysign(1 z.real)<line_sep>ret=complex(e<times>math.cos(z.imag)<times>math.cosh(x_minus_one) e<times>math.sin(z.imag)<times>math.sinh(x_minus_one))<block_end><else_stmt><block_start>ret=complex(math.cos(z.imag)<times>math.cosh(z.real) math.sin(z.imag)<times>math.sinh(z.real))<block_end><if_stmt>math.isinf(ret.real)<or>math.isinf(ret.imag)<block_start><raise>OverflowError<block_end><return>ret<block_end><def_stmt>sinh x<block_start>_sinh_special=[[inf+nanj <none> complex(-float("inf") -0.0) -inf <none> inf+nanj inf+nanj] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [nanj <none> complex(-0.0 -0.0) complex(-0.0 0.0) <none> nanj nanj] [nanj <none> complex(0.0 -0.0) complex(0.0 0.0) <none> nanj nanj] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [inf+nanj <none> complex(float("inf") -0.0) inf <none> inf+nanj inf+nanj] [nan+nanj nan+nanj complex(float("nan") -0.0) nan nan+nanj nan+nanj nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt><not>isfinite(z)<block_start><if_stmt>math.isinf(z.imag)<and><not>math.isnan(z.real)<block_start><raise>ValueError<block_end><if_stmt>math.isinf(z.real)<and>math.isfinite(z.imag)<and>z.imag<ne>0<block_start><if_stmt>z.real<g>0<block_start><return>complex(math.copysign(inf math.cos(z.imag)) math.copysign(inf math.sin(z.imag)))<block_end><return>complex(-math.copysign(inf math.cos(z.imag)) math.copysign(inf math.sin(z.imag)))<block_end><return>_sinh_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>abs(z.real)<g>_LOG_LARGE_DOUBLE<block_start>x_minus_one=z.real-math.copysign(1 z.real)<line_sep><return>complex(math.cos(z.imag)<times>math.sinh(x_minus_one)<times>e math.sin(z.imag)<times>math.cosh(x_minus_one)<times>e)<block_end><return>complex(math.cos(z.imag)<times>math.sinh(z.real) math.sin(z.imag)<times>math.cosh(z.real))<block_end><def_stmt>tanh x<block_start>_tanh_special=[[-1 <none> complex(-1 -0.0) -1 <none> -1 -1] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [nan+nanj <none> complex(-0.0 -0.0) complex(-0.0 0.0) <none> nan+nanj nan+nanj] [nan+nanj <none> complex(0.0 -0.0) 0.0 <none> nan+nanj nan+nanj] [nan+nanj <none> <none> <none> <none> nan+nanj nan+nanj] [1 <none> complex(1 -0.0) 1 <none> 1 1] [nan+nanj nan+nanj complex(float("nan") -0.0) nan nan+nanj nan+nanj nan+nanj]]<line_sep>z=_make_complex(x)<if_stmt><not>isfinite(z)<block_start><if_stmt>math.isinf(z.imag)<and>math.isfinite(z.real)<block_start><raise>ValueError<block_end><if_stmt>math.isinf(z.real)<and>math.isfinite(z.imag)<and>z.imag<ne>0<block_start><if_stmt>z.real<g>0<block_start><return>complex(1 math.copysign(0.0 math.sin(z.imag)<times>math.cos(z.imag)))<block_end><return>complex(-1 math.copysign(0.0 math.sin(z.imag)<times>math.cos(z.imag)))<block_end><return>_tanh_special[_special_type(z.real)][_special_type(z.imag)]<block_end><if_stmt>abs(z.real)<g>_LOG_LARGE_DOUBLE<block_start><return>complex(math.copysign(1 z.real) 4<times>math.sin(z.imag)<times>math.cos(z.imag)<times>math.exp(-2<times>abs(z.real)))<block_end>tanh_x=math.tanh(z.real)<line_sep>tan_y=math.tan(z.imag)<line_sep>cx=1/math.cosh(z.real)<line_sep>denom=1+tanh_x<times>tanh_x<times>tan_y<times>tan_y<line_sep><return>complex(tanh_x<times>(1+tan_y<times>tan_y)/denom ((tan_y/denom)<times>cx)<times>cx)<block_end><def_stmt>isfinite x<block_start><return>math.isfinite(x.real)<and>math.isfinite(x.imag)<block_end><def_stmt>isinf x<block_start><return>math.isinf(x.real)<or>math.isinf(x.imag)<block_end><def_stmt>isnan x<block_start><return>math.isnan(x.real)<or>math.isnan(x.imag)<block_end><def_stmt>isclose a b * rel_tol=1e-09 abs_tol=0.0<block_start>a=_make_complex(a)<line_sep>b=_make_complex(b)<line_sep>rel_tol=float(rel_tol)<line_sep>abs_tol=float(abs_tol)<if_stmt>rel_tol<l>0<or>abs_tol<l>0<block_start><raise>ValueError("tolerances must be non-negative")<block_end><if_stmt>a.real<eq>b.real<and>a.imag<eq>b.imag<block_start><return><true><block_end><if_stmt>math.isinf(a.real)<or>math.isinf(a.imag)<or>math.isinf(b.real)<or>math.isinf(b.imag)<block_start><return><false><block_end># if isnan(a) or isnan(b):
# return False
diff=abs(a-b)<line_sep><return>diff<le>rel_tol<times>abs(a)<or>diff<le>rel_tol<times>abs(b)<or>diff<le>abs_tol<block_end> |
<import_stmt>os<import_stmt>click<import_stmt>json<import_stmt>yaml<import_stmt>logging<import_stmt>sys<import_from_stmt>anchore anchore_utils<import_from_stmt>anchore.cli logs<import_from_stmt>anchore.util contexts<line_sep>plain_output=<false><def_stmt>extended_help_option extended_help=<none> *param_decls **attrs<block_start>"""
Based on the click.help_option code.
Adds a ``--extended-help`` option which immediately ends the program
printing out the extended extended-help page. Defaults to using the
callback's doc string, but can be given an explicit value as well.
This is intended for use as a decorator on a command to provide a 3rd level
of help verbosity suitable for use as a manpage (though not formatted as such explicitly).
Like :func:`version_option`, this is implemented as eager option that
prints in the callback and exits.
All arguments are forwarded to :func:`option`.
"""<def_stmt>decorator f<block_start><def_stmt>callback ctx param value<block_start><if_stmt>value<and><not>ctx.resilient_parsing<block_start><if_stmt><not>extended_help<block_start>ctx.command.help=ctx.command.callback.__doc__<line_sep>click.echo(ctx.get_help() color=ctx.color)<block_end><else_stmt><block_start>ctx.command.help=extended_help<line_sep>click.echo(ctx.get_help() color=ctx.color)<block_end>ctx.exit()<block_end><block_end>attrs.setdefault('is_flag' <true>)<line_sep>attrs.setdefault('expose_value' <false>)<line_sep>attrs.setdefault('help' 'Show extended help content, similar to manpage, and exit.')<line_sep>attrs.setdefault('is_eager' <true>)<line_sep>attrs['callback']=callback<line_sep><return>click.option(*(param_decls<or>('--extended-help' )) **attrs)(f)<block_end><return>decorator<block_end><def_stmt>std_formatter msg<block_start>"""
Default simple string format. Dumps block-style indented yaml for dicts if found. Otherwise no formatting
:param msg:
:return:
"""<if_stmt>isinstance(msg dict)<block_start><return>yaml.safe_dump(msg indent=<true> default_flow_style=<false>)<block_end><return>str(msg)<block_end><def_stmt>json_formatter obj<block_start>"""
Format the output in JSON
:param obj:
:return:
"""<if_stmt>isinstance(obj str)# Make a list of size 1
<block_start><return>json.dumps([obj] indent=<true>)<block_end><else_stmt><block_start><return>json.dumps(obj indent=<true> sort_keys=<true>)<block_end><block_end># Which formatting function to use
formatter=std_formatter<def_stmt>init_output_format use_json=<false> use_plain=<false> use_debug=<false> use_verbose=<false> use_quiet=<false> log_filepath=<none> debug_log_filepath=<none><block_start><global>formatter<if_stmt>use_json<block_start>formatter=json_formatter<block_end><if_stmt>use_debug<block_start>level='debug'<block_end><elif_stmt>use_verbose<block_start>level='verbose'<block_end><elif_stmt>use_quiet<block_start>level='quiet'<block_end><else_stmt><block_start>level='normal'<block_end>logs.init_output_formatters(output_verbosity=level logfile=log_filepath debug_logfile=debug_log_filepath)<block_end><def_stmt>anchore_print_err msg<block_start>exc=sys.exc_info()<if_stmt>exc<is><not><none><and>exc<ne>(<none> <none> <none>)<block_start>logging.getLogger(__name__).exception(msg)<block_end><else_stmt><block_start>logging.getLogger(__name__).error(msg)<block_end><block_end><def_stmt>anchore_print msg do_formatting=<false><block_start>"""
Print to stdout using the proper formatting for the command.
:param msg: output to be printed, either an object or a string. Objects will be serialized according to config
:return:
"""<if_stmt>do_formatting<block_start>click.echo(formatter(msg))<block_end><else_stmt><block_start>click.echo(msg)<block_end><block_end><def_stmt>build_image_list config image imagefile all_local include_allanchore dockerfile=<none> exclude_file=<none><block_start>"""Given option inputs from the cli, construct a list of image ids. Includes all found with no exclusion logic"""<if_stmt><not>image<and><not>(imagefile<or>all_local)<block_start><raise>click.BadOptionUsage('No input found for image source. One of <image>, <imagefile>, or <all> must be specified')<block_end><if_stmt>image<and>imagefile<block_start><raise>click.BadOptionUsage('Only one of <image> and <imagefile> can be specified')<block_end>filter_images=[]<if_stmt>exclude_file<block_start><with_stmt>open(exclude_file)<as>f<block_start><for_stmt>line f.readlines()<block_start>filter_images.append(line.strip())<block_end><block_end><block_end>imagelist={}<if_stmt>image<block_start>imagelist[image]={'dockerfile':dockerfile}<block_end><if_stmt>imagefile<block_start>filelist=anchore_utils.read_kvfile_tolist(imagefile)<for_stmt>i range(len(filelist))<block_start>l=filelist[i]<line_sep>imageId=l[0]<try_stmt><block_start>dfile=l[1]<block_end><except_stmt><block_start>dfile=<none><block_end>imagelist[imageId]={'dockerfile':dfile}<block_end><block_end><if_stmt>all_local<block_start>docker_cli=contexts['docker_cli']<if_stmt>docker_cli<block_start><for_stmt>f docker_cli.images(all=<true> quiet=<true> filters={'dangling':<false>})<block_start><if_stmt>f<not><in>imagelist<and>f<not><in>filter_images<block_start>imagelist[f]={'dockerfile':<none>}<block_end><block_end><block_end><else_stmt><block_start><raise>Exception("Could not load any images from local docker host - is docker running?")<block_end><block_end><if_stmt>include_allanchore<block_start>ret=contexts['anchore_db'].load_all_images().keys()<if_stmt>ret<and>len(ret)<g>0<block_start><for_stmt>l list(set(imagelist.keys())|set(ret))<block_start>imagelist[l]={'dockerfile':<none>}<block_end><block_end><block_end># Remove excluded items
<for_stmt>excluded filter_images<block_start>docker_cli=contexts['docker_cli']<if_stmt><not>docker_cli<block_start><raise>Exception("Could not query docker - is docker running?")<block_end><for_stmt>img docker_cli.images(name=excluded quiet=<true>)<block_start>imagelist.pop(img <none>)<block_end><block_end><return>imagelist<block_end> |
# ordereddict.py
# A dictionary that remembers insertion order
# Tested under Python 2.7 and 2.6.6 only
#
# Copyright (C) 2011 by <NAME> <lukius at gmail dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
<import_from_stmt>_abcoll *<try_stmt><block_start><import_from_stmt>thread get_ident<as>_get_ident<block_end><except_stmt>ImportError<block_start><import_from_stmt>dummy_thread get_ident<as>_get_ident<block_end><import_from_stmt>operator eq<as>_eq<import_from_stmt>itertools imap<as>_imap<line_sep>__author__='<NAME> <lukius at gmail dot com>'<line_sep>__version__='1.1'<line_sep>__all__=['OrderedDict']<line_sep>########################### Constants ###########################
FORWARD=0<line_sep>BACKWARDS=1<line_sep>KEY=0<line_sep>VALUE=1<line_sep>NEXT=3<line_sep>PREVIOUS=2<line_sep>#################################################################
<class_stmt>OrderedDict(dict MutableMapping)<block_start>'A dictionary that remembers insertion order.'<line_sep># This implementation uses a doubly-linked list of nodes, each
# node being a 4-tuple <key, value, previous node, next node>.
# Despite this, the interesting thing about it is that the list
# is actually embedded in the dictionary. As a consequence,
# there is little space penalty, and also every operation
# exhibits an efficient implementation (i.e., no need to perform
# lookups or deletions multiple times, as it happens with other
# versions of this data structure.).
#
# It is worth noticing that passing an OrderedDict as an argument
# to the dict constructor won't behave as expected. This is due
# to the fact that the internal dictionary keeps additional information
# apart from a key's value. If needed, the instance method dict()
# provides a dict copy of an OrderedDict.
update=MutableMapping.update<line_sep>setdefault=MutableMapping.setdefault<line_sep>__ne__=MutableMapping.__ne__<line_sep>######################## Class methods #########################
@classmethod<def_stmt>fromkeys cls iterable value=<none><block_start>'''od.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''<line_sep>d=cls()<for_stmt>key iterable<block_start>d[key]=value<block_end><return>d<block_end>################################################################
######################## Initialization ########################
<def_stmt>__init__ self *args **kwds<block_start>"""Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
"""<if_stmt>len(args)<g>1<block_start><raise>TypeError('expected at most 1 arguments, got %d'%len(args))<block_end><try_stmt><block_start>self.first_node<block_end><except_stmt>AttributeError<block_start>self.first_node=<none><line_sep>self.last_node=<none><block_end>self.update(*args **kwds)<block_end>################################################################
################## Data access & manipulation ##################
__marker=object()<def_stmt>__getitem__ self key<block_start>'od.__getitem__(y) <==> od[y]'<line_sep>node=dict.__getitem__(self key)<line_sep><return>node[VALUE]<block_end><def_stmt>get self key default=<none><block_start>'od.get(k[,d]) -> od[k] if k in od, else d. d defaults to None.'<try_stmt><block_start>value=self.__getitem__(key)<block_end><except_stmt>KeyError<block_start>value=default<block_end><return>value<block_end><def_stmt>__setitem__ self key value<block_start>'od.__setitem__(i, y) <==> od[i]=y'<try_stmt><block_start>node=dict.__getitem__(self key)<line_sep>node[VALUE]=value<block_end><except_stmt>KeyError<block_start>new_node=[key value self.last_node <none>]<if_stmt>(self.first_node<is><none>)<block_start>self.first_node=new_node<block_end><if_stmt>(self.last_node<is><not><none>)<block_start>self.last_node[NEXT]=new_node<block_end>self.last_node=new_node<line_sep>dict.__setitem__(self key new_node)<block_end><block_end><def_stmt>__delitem__ self key<block_start>'od.__delitem__(y) <==> del od[y]'<line_sep>removed_node=dict.pop(self key)<line_sep>self.__adjust_after_removing(removed_node)<block_end><def_stmt>pop self key default=__marker<block_start>'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.'''<line_sep>removed_node=dict.pop(self key default)<if_stmt>(removed_node<is>self.__marker)<block_start><raise>KeyError key<block_end><if_stmt>(removed_node<is>default)<block_start><return>default<block_end>self.__adjust_after_removing(removed_node)<line_sep><return>removed_node[VALUE]<block_end><def_stmt>popitem self last=<true><block_start>'''od.popitem() -> (k, v), remove and return some (key, value) pair as a
2-tuple; but raise KeyError if od is empty.'''<if_stmt><not>self<block_start><raise>KeyError('dictionary is empty')<block_end>key=next(reversed(self)<if>last<else>iter(self))<line_sep>value=self.pop(key)<line_sep><return>key value<block_end><def_stmt>clear self<block_start>'od.clear() -> None. Remove all items from od.'<line_sep>dict.clear(self)<line_sep>self.first_node=<none><line_sep>self.last_node=<none><block_end><def_stmt>__adjust_after_removing self a_node<block_start>'Adjust a_node previous and next pointers after its removal.'<line_sep>previous=a_node[PREVIOUS]<line_sep>next=a_node[NEXT]<if_stmt>(next)<block_start>next[PREVIOUS]=previous<block_end><else_stmt><block_start>self.last_node=previous<block_end><if_stmt>(previous)<block_start>previous[NEXT]=next<block_end><else_stmt><block_start>self.first_node=next<block_end><block_end>################################################################
#################### Iteration & keys/values ###################
<def_stmt>__walk self direction=FORWARD action=<lambda>x:x *arguments<block_start>'Iterate over action applied to each node, in the appropriate order.'<if_stmt>(direction<eq>FORWARD)<block_start>next=NEXT<line_sep>first=self.first_node<block_end><elif_stmt>(direction<eq>BACKWARDS)<block_start>next=PREVIOUS<line_sep>first=self.last_node<block_end>current_node=first<while_stmt>(current_node)<block_start><yield>action(current_node *arguments)<line_sep>current_node=current_node[next]<block_end><block_end><def_stmt>__walk_to_list self direction=FORWARD action=<lambda>x:x *arguments<block_start>'''Obtain a list of objects resulting from applying action to
each node, in the appropriate order.'''<line_sep>return_list=list()<line_sep>item_generator=self.__walk(direction=direction action=action *arguments)<for_stmt>item item_generator<block_start>return_list.append(item)<block_end><return>return_list<block_end><def_stmt>__iter__ self<block_start>'od.__iter__() <==> iter(od)'<line_sep><return>self.__walk(action=<lambda>node:node[KEY])<block_end><def_stmt>__reversed__ self<block_start>'od.__reversed__() <==> reversed(od)'<line_sep><return>self.__walk(direction=BACKWARDS action=<lambda>node:node[KEY])<block_end><def_stmt>keys self<block_start>"od.keys() -> list of od's keys"<line_sep><return>self.__walk_to_list(action=<lambda>node:node[KEY])<block_end><def_stmt>values self<block_start>"od.values() -> list of od's values"<line_sep><return>self.__walk_to_list(action=<lambda>node:node[VALUE])<block_end><def_stmt>items self<block_start>"od.items() -> list of od's (key, value) pairs, as 2-tuples"<line_sep><return>self.__walk_to_list(action=<lambda>node:(node[KEY] node[VALUE]))<block_end><def_stmt>iterkeys self<block_start>'od.iterkeys() -> an iterator over the keys of od'<line_sep><return>iter(self)<block_end><def_stmt>itervalues self<block_start>'od.itervalues() -> an iterator over the values of od'<line_sep><return>self.__walk(action=<lambda>node:node[VALUE])<block_end><def_stmt>iteritems self<block_start>'od.iteritems() -> an iterator over the (key, value) items of od'<line_sep><return>self.__walk(action=<lambda>node:(node[KEY] node[VALUE]))<block_end>################################################################
############################# Copies ###########################
<def_stmt>copy self<block_start>'od.copy() -> a shallow copy of od'<line_sep><return>self.__class__(self)<block_end><def_stmt>dict self<block_start>'od.dict() -> a dict copy of od'<line_sep>d={}<for_stmt>item self.iteritems()<block_start>d[item[KEY]]=item[VALUE]<block_end><return>d<block_end>################################################################
########################## Miscellaneous #######################
<def_stmt>__repr__ self _repr_running={}<block_start>'od.__repr__() <==> repr(od)'<line_sep>call_key=id(self) _get_ident()<if_stmt>call_key<in>_repr_running<block_start><return>'...'<block_end>_repr_running[call_key]=1<try_stmt><block_start><if_stmt><not>self<block_start><return>'%s()'%(self.__class__.__name__ )<block_end><return>'%s(%r)'%(self.__class__.__name__ self.items())<block_end><finally_stmt><block_start><del_stmt>_repr_running[call_key]<block_end><block_end><def_stmt>__reduce__ self<block_start>'Return state information for pickling'<line_sep>items=self.items()<line_sep>tmp=self.first_node self.last_node<del_stmt>self.first_node self.last_node<line_sep>inst_dict=vars(self).copy()<line_sep>self.first_node,self.last_node=tmp<if_stmt>inst_dict<block_start><return>(self.__class__ (items ) inst_dict)<block_end><return>self.__class__ (items )<block_end><def_stmt>__eq__ self other<block_start>'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''<if_stmt>isinstance(other OrderedDict)<block_start><return>len(self)<eq>len(other)<and>all(_imap(_eq self.iteritems() other.iteritems()))<block_end><return>dict.__eq__(self.dict() other)<block_end><def_stmt>viewkeys self<block_start>"od.viewkeys() -> a set-like object providing a view on od's keys"<line_sep><return>KeysView(self)<block_end><def_stmt>viewvalues self<block_start>"od.viewvalues() -> an object providing a view on od's values"<line_sep><return>ValuesView(self)<block_end><def_stmt>viewitems self<block_start>"od.viewitems() -> a set-like object providing a view on od's items"<line_sep><return>ItemsView(self)<block_end>################################################################
<block_end> |
"""Base models"""<line_sep> |
# encoding=utf8
<import_stmt>os<import_from_stmt>third_party.dart extract_score_webnlg<def_stmt>evaluate_webnlg_challenge_2017 references_s preds<block_start>"""
The evaluation of the webnlg_challenge_2017,
we use the evaluate shell that DART dataset provided.
:param references_s: ACTUALLY, references in webnlg are of no use.
:param preds:
:return:
"""<line_sep>tmp_file_name='webnlg_challenge_2017_tmp4eval.txt'<with_stmt>open(tmp_file_name 'w')<as>tmp_file<block_start><for_stmt>pred preds<block_start>print(pred file=tmp_file)<block_end><block_end>os.system("bash utils/process/general/dart_lib/run_eval_on_webnlg.sh "<concat>"{}".format(tmp_file_name))<line_sep>summary=extract_score_webnlg()<line_sep><return>summary<block_end><class_stmt>EvaluateTool(object)<block_start><def_stmt>__init__ self args<block_start>self.args=args<block_end><def_stmt>evaluate self preds golds section<block_start>references_s=[item["references"]<for>item golds]<assert_stmt>len(preds)<eq>len(references_s)<line_sep>summary=evaluate_webnlg_challenge_2017(references_s preds)<line_sep><return>summary<block_end><block_end> |
"""
This module provides an input interface and processor to the ISI reading
system.
The reader is set up to run within a Docker container.
For the ISI reader to run, set the Docker memory and swap space to the maximum.
"""<import_from_stmt>.api process_text process_nxml process_preprocessed process_output_folder process_json_file<line_sep> |
<import_from_stmt>django.apps AppConfig<class_stmt>OpenbookAuthConfig(AppConfig)<block_start>name='openbook_auth'<block_end> |
<import_from_stmt>typing Dict<import_from_stmt>lightbus.exceptions UnknownApi InvalidApiRegistryEntry EventNotFound MisconfiguredApiOptions InvalidApiEventConfiguration <line_sep>__all__=["Api" "Event"]<class_stmt>ApiRegistry<block_start><def_stmt>__init__ self<block_start>self._apis:Dict[str Api]=dict()<block_end><def_stmt>add self api:"Api"<block_start><if_stmt>isinstance(api type)<block_start><raise>InvalidApiRegistryEntry("An attempt was made to add a type to the API registry. This "<concat>"is probably because you are trying to add the API class, rather "<concat>"than an instance of the API class.\n"<concat>"\n"<concat>"Use bus.client.register_api(MyApi()), rather than bus.client.register_api(MyApi)")<block_end>self._apis[api.meta.name]=api<block_end><def_stmt>get self name<arrow>"Api"<block_start><try_stmt><block_start><return>self._apis[name]<block_end><except_stmt>KeyError<block_start><raise>UnknownApi("An API named '{}' was requested from the registry but the "<concat>"registry does not recognise it. Maybe the incorrect API name "<concat>"was specified, or maybe the API has not been registered.".format(name))<block_end><block_end><def_stmt>remove self name<arrow><none><block_start><try_stmt><block_start><del_stmt>self._apis[name]<block_end><except_stmt>KeyError<block_start><raise>UnknownApi("An attempt was made to remove an API named '{}' from the registry, but the API "<concat>"could not be found. Maybe the incorrect API name "<concat>"was specified, or maybe the API has not been registered.".format(name))<block_end><block_end><def_stmt>public self<block_start><return>[api<for>api self._apis.values()<if><not>api.meta.internal]<block_end><def_stmt>internal self<block_start><return>[api<for>api self._apis.values()<if>api.meta.internal]<block_end><def_stmt>all self<block_start><return>list(self._apis.values())<block_end><def_stmt>names self<block_start><return>list(self._apis.keys())<block_end><block_end><class_stmt>ApiOptions<block_start>name:str<line_sep>internal:bool=<false><line_sep>version:int=1<def_stmt>__init__ self options<block_start><for_stmt>k,v options.items()<block_start><if_stmt><not>k.startswith("_")<block_start>setattr(self k v)<block_end><block_end><block_end><block_end><class_stmt>ApiMetaclass(type)<block_start>""" API Metaclass
Validates options in the API's Meta class and populates the
API class' `meta` attribute.
"""<def_stmt>__init__ cls name bases=<none> dict_=<none><block_start>is_api_base_class=name<eq>"Api"<and><not>bases<if_stmt>is_api_base_class<block_start>super(ApiMetaclass cls).__init__(name bases dict_)<block_end><else_stmt><block_start>options=dict_.get("Meta" <none>)<if_stmt>options<is><none><block_start><raise>MisconfiguredApiOptions(f"API class {name} does not contain a class named 'Meta'. Each API definition "<concat>f"must contain a child class named 'Meta' which can contain configurations options. "<concat>f"For example, the 'name' option is required and specifies "<concat>f"the name used to access the API on the bus.")<block_end>cls.sanity_check_options(name options)<line_sep>cls.meta=ApiOptions(cls.Meta.__dict__.copy())<line_sep>super(ApiMetaclass cls).__init__(name bases dict_)<if_stmt>cls.meta.name<eq>"default"<or>cls.meta.name.startswith("default.")<block_start><raise>MisconfiguredApiOptions(f"API class {name} is named 'default', or starts with 'default.'. "<concat>f"This is a reserved name and is not allowed, please change it to something else.")<block_end><block_end><block_end><def_stmt>sanity_check_options cls name options<block_start><if_stmt><not>getattr(options "name" <none>)<block_start><raise>MisconfiguredApiOptions("API class {} does not specify a name option with its "<concat>"'Meta' options."<concat>"".format(name))<block_end><block_end><block_end><class_stmt>Api(metaclass=ApiMetaclass)<block_start><class_stmt>Meta<block_start>name=<none><block_end><def_stmt>get_event self name<arrow>"Event"<block_start>event=getattr(self name <none>)<if_stmt>isinstance(event Event)<block_start><return>event<block_end><else_stmt><block_start><raise>EventNotFound("Event named {}.{} could not be found".format(self name))<block_end><block_end><def_stmt>__str__ self<block_start><return>self.meta.name<block_end><block_end><class_stmt>Event<block_start><def_stmt>__init__ self parameters=tuple()# Ensure you update the __copy__() method if adding other instance variables below
<block_start><if_stmt>isinstance(parameters str)<block_start><raise>InvalidApiEventConfiguration(f"You appear to have passed a string value of {repr(parameters)} "<concat>f"for your API's event's parameters. This should be a list or a tuple, "<concat>f"not a string. You probably missed a comma when defining your "<concat>f"tuple of parameter names.")<block_end>self.parameters=parameters<block_end><block_end> |
# -*- coding: utf-8 -*-
<import_from_stmt>datetime datetime<import_from_stmt>django.conf settings<import_from_stmt>haystack.indexes Indexable<import_from_stmt>opps.containers.search_indexes ContainerIndex<import_from_stmt>.models Post Album Link<line_sep>migration_date=getattr(settings 'MIGRATION_DATE' <none>)<if_stmt>migration_date<block_start>m_date=datetime.strptime(migration_date "%Y-%m-%d").date()<line_sep>Post.is_legacy=<lambda>self:m_date<ge>self.date_insert.date()<block_end><else_stmt><block_start>Post.is_legacy=<lambda>self:<false><block_end><class_stmt>PostIndex(ContainerIndex Indexable)<block_start><def_stmt>get_model self<block_start><return>Post<block_end><block_end><class_stmt>AlbumIndex(ContainerIndex Indexable)<block_start><def_stmt>get_model self<block_start><return>Album<block_end><block_end><class_stmt>LinkIndex(ContainerIndex Indexable)<block_start><def_stmt>get_model self<block_start><return>Link<block_end><block_end> |
# Copyright cocotb contributors
# Licensed under the Revised BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-3-Clause
"""Test getting and setting values of arrays"""<import_stmt>contextlib<import_stmt>logging<import_stmt>cocotb<import_from_stmt>cocotb.clock Clock<import_from_stmt>cocotb.triggers Timer<line_sep>tlog=logging.getLogger("cocotb.test")<def_stmt>_check_value tlog hdl expected<block_start><assert_stmt>hdl.value<eq>expected<line_sep>tlog.info(f" Found {hdl!r} ({hdl._type}) with value={hdl.value}")<block_end># GHDL unable to put values on nested array types (gh-2588)
@cocotb.test(expect_error=Exception<if>cocotb.SIM_NAME.lower().startswith("ghdl")<else>())<async_keyword><def_stmt>test_1dim_array_handles dut<block_start>"""Test getting and setting array values using the handle of the full array."""<line_sep>cocotb.start_soon(Clock(dut.clk 1000 "ns").start())<line_sep>dut.array_7_downto_4.value=[0xF0 0xE0 0xD0 0xC0]<line_sep>dut.array_4_to_7.value=[0xB0 0xA0 0x90 0x80]<line_sep>dut.array_3_downto_0.value=[0x70 0x60 0x50 0x40]<line_sep>dut.array_0_to_3.value=[0x30 0x20 0x10 0x00]<line_sep><await>Timer(1000 "ns")<line_sep>_check_value(tlog dut.array_7_downto_4 [0xF0 0xE0 0xD0 0xC0])<line_sep>_check_value(tlog dut.array_4_to_7 [0xB0 0xA0 0x90 0x80])<line_sep>_check_value(tlog dut.array_3_downto_0 [0x70 0x60 0x50 0x40])<line_sep>_check_value(tlog dut.array_0_to_3 [0x30 0x20 0x10 0x00])<block_end># GHDL unable to put values on nested array types (gh-2588)
# iverilog flattens multi-dimensional unpacked arrays (gh-2595)
@cocotb.test(expect_error=Exception<if>cocotb.SIM_NAME.lower().startswith(("icarus" "ghdl"))<else>())<async_keyword><def_stmt>test_ndim_array_handles dut<block_start>"""Test getting and setting multi-dimensional array values using the handle of the full array."""<line_sep>cocotb.start_soon(Clock(dut.clk 1000 "ns").start())<line_sep>dut.array_2d.value=[[0xF0 0xE0 0xD0 0xC0] [0xB0 0xA0 0x90 0x80]]<line_sep><await>Timer(1000 "ns")<line_sep>_check_value(tlog dut.array_2d [[0xF0 0xE0 0xD0 0xC0] [0xB0 0xA0 0x90 0x80]])<block_end># GHDL unable to put values on nested array types (gh-2588)
@cocotb.test(expect_error=Exception<if>cocotb.SIM_NAME.lower().startswith("ghdl")<else>())<async_keyword><def_stmt>test_1dim_array_indexes dut<block_start>"""Test getting and setting values of array indexes."""<line_sep>cocotb.start_soon(Clock(dut.clk 1000 "ns").start())<line_sep>dut.array_7_downto_4.value=[0xF0 0xE0 0xD0 0xC0]<line_sep>dut.array_4_to_7.value=[0xB0 0xA0 0x90 0x80]<line_sep>dut.array_3_downto_0.value=[0x70 0x60 0x50 0x40]<line_sep>dut.array_0_to_3.value=[0x30 0x20 0x10 0x00]<line_sep><await>Timer(1000 "ns")<line_sep># Check indices
_check_value(tlog dut.array_7_downto_4[7] 0xF0)<line_sep>_check_value(tlog dut.array_7_downto_4[4] 0xC0)<line_sep>_check_value(tlog dut.array_4_to_7[4] 0xB0)<line_sep>_check_value(tlog dut.array_4_to_7[7] 0x80)<line_sep>_check_value(tlog dut.array_3_downto_0[3] 0x70)<line_sep>_check_value(tlog dut.array_3_downto_0[0] 0x40)<line_sep>_check_value(tlog dut.array_0_to_3[0] 0x30)<line_sep>_check_value(tlog dut.array_0_to_3[3] 0x00)<line_sep>_check_value(tlog dut.array_0_to_3[1] 0x20)<line_sep># Get sub-handles through NonHierarchyIndexableObject.__getitem__
dut.array_7_downto_4[7].value=0xDE<line_sep>dut.array_4_to_7[4].value=0xFC<line_sep>dut.array_3_downto_0[0].value=0xAB<line_sep>dut.array_0_to_3[1].value=0x7A<line_sep>dut.array_0_to_3[3].value=0x42<line_sep><await>Timer(1000 "ns")<line_sep>_check_value(tlog dut.array_7_downto_4[7] 0xDE)<line_sep>_check_value(tlog dut.array_4_to_7[4] 0xFC)<line_sep>_check_value(tlog dut.array_3_downto_0[0] 0xAB)<line_sep>_check_value(tlog dut.array_0_to_3[1] 0x7A)<line_sep>_check_value(tlog dut.array_0_to_3[3] 0x42)<block_end># GHDL unable to put values on nested array types (gh-2588)
# iverilog flattens multi-dimensional unpacked arrays (gh-2595)
@cocotb.test(expect_error=Exception<if>cocotb.SIM_NAME.lower().startswith(("icarus" "ghdl"))<else>())<async_keyword><def_stmt>test_ndim_array_indexes dut<block_start>"""Test getting and setting values of multi-dimensional array indexes."""<line_sep>cocotb.start_soon(Clock(dut.clk 1000 "ns").start())<line_sep>dut.array_2d.value=[[0xF0 0xE0 0xD0 0xC0] [0xB0 0xA0 0x90 0x80]]<line_sep><await>Timer(1000 "ns")<line_sep># Check indices
_check_value(tlog dut.array_2d[1] [0xB0 0xA0 0x90 0x80])<line_sep>_check_value(tlog dut.array_2d[0][31] 0xF0)<line_sep>_check_value(tlog dut.array_2d[1][29] 0x90)<line_sep>_check_value(tlog dut.array_2d[1][28] 0x80)<line_sep># Get sub-handles through NonHierarchyIndexableObject.__getitem__
dut.array_2d[1].value=[0xDE 0xAD 0xBE 0xEF]<line_sep>dut.array_2d[0][31].value=0x0F<line_sep><await>Timer(1000 "ns")<line_sep>_check_value(tlog dut.array_2d[0][31] 0x0F)<line_sep>_check_value(tlog dut.array_2d[0][29] 0xD0)<line_sep>_check_value(tlog dut.array_2d[1][30] 0xAD)<line_sep>_check_value(tlog dut.array_2d[1][28] 0xEF)<block_end># GHDL unable to access record signals (gh-2591)
# Icarus doesn't support structs (gh-2592)
@cocotb.test(expect_error=AttributeError<if>cocotb.SIM_NAME.lower().startswith(("icarus" "ghdl"))<else>())<async_keyword><def_stmt>test_struct dut<block_start>"""Test setting and getting values of structs."""<line_sep>cocotb.start_soon(Clock(dut.clk 1000 "ns").start())<line_sep>dut.inout_if.a_in.value=1<line_sep><await>Timer(1000 "ns")<line_sep>_check_value(tlog dut.inout_if.a_in 1)<line_sep>dut.inout_if.a_in.value=0<line_sep><await>Timer(1000 "ns")<line_sep>_check_value(tlog dut.inout_if.a_in 0)<block_end>@contextlib.contextmanager<def_stmt>assert_raises exc_type<block_start><try_stmt><block_start><yield><block_end><except_stmt>exc_type<as>exc<block_start>tlog.info(f" {exc_type.__name__} raised as expected: {exc}")<block_end><else_stmt><block_start><raise>AssertionError(f"{exc_type.__name__} was not raised")<block_end><block_end>@cocotb.test()<async_keyword><def_stmt>test_exceptions dut<block_start>"""Test that correct Exceptions are raised."""<with_stmt>assert_raises(TypeError)<block_start>dut.array_7_downto_4.value=(0xF0 0xE0 0xD0 0xC0)<block_end><with_stmt>assert_raises(TypeError)<block_start>dut.array_4_to_7.value=Exception("Exception Object")<block_end><with_stmt>assert_raises(ValueError)<block_start>dut.array_3_downto_0.value=[0x70 0x60 0x50]<block_end><with_stmt>assert_raises(ValueError)<block_start>dut.array_0_to_3.value=[0x40 0x30 0x20 0x10 0x00]<block_end><block_end> |
<import_stmt>collections<import_from_stmt>supriya.enums CalculationRate<import_from_stmt>supriya.synthdefs UGen<class_stmt>PulseDivider(UGen)<block_start>"""
::
>>> pulse_divider = supriya.ugens.PulseDivider.ar(
... div=2,
... start=0,
... trigger=0,
... )
>>> pulse_divider
PulseDivider.ar()
"""<line_sep>### CLASS VARIABLES ###
_ordered_input_names=collections.OrderedDict('trigger' 'div' 'start' )<line_sep>_valid_calculation_rates=<none><line_sep>### INITIALIZER ###
<def_stmt>__init__ self calculation_rate=<none> div=2 start=0 trigger=0 <block_start>UGen.__init__(self calculation_rate=calculation_rate div=div start=start trigger=trigger )<block_end>### PUBLIC METHODS ###
@classmethod<def_stmt>ar cls div=2 start=0 trigger=0 <block_start>"""
Constructs an audio-rate PulseDivider.
::
>>> pulse_divider = supriya.ugens.PulseDivider.ar(
... div=2,
... start=0,
... trigger=0,
... )
>>> pulse_divider
PulseDivider.ar()
Returns ugen graph.
"""<import_stmt>supriya.synthdefs<line_sep>calculation_rate=supriya.CalculationRate.AUDIO<line_sep>ugen=cls._new_expanded(calculation_rate=calculation_rate div=div start=start trigger=trigger )<line_sep><return>ugen<block_end>@classmethod<def_stmt>kr cls div=2 start=0 trigger=0 <block_start>"""
Constructs a control-rate PulseDivider.
::
>>> pulse_divider = supriya.ugens.PulseDivider.kr(
... div=2,
... start=0,
... trigger=0,
... )
>>> pulse_divider
PulseDivider.kr()
Returns ugen graph.
"""<import_stmt>supriya.synthdefs<line_sep>calculation_rate=supriya.CalculationRate.CONTROL<line_sep>ugen=cls._new_expanded(calculation_rate=calculation_rate div=div start=start trigger=trigger )<line_sep><return>ugen<block_end>### PUBLIC PROPERTIES ###
@property<def_stmt>div self<block_start>"""
Gets `div` input of PulseDivider.
::
>>> pulse_divider = supriya.ugens.PulseDivider.ar(
... div=2,
... start=0,
... trigger=0,
... )
>>> pulse_divider.div
2.0
Returns ugen input.
"""<line_sep>index=self._ordered_input_names.index('div')<line_sep><return>self._inputs[index]<block_end>@property<def_stmt>start self<block_start>"""
Gets `start` input of PulseDivider.
::
>>> pulse_divider = supriya.ugens.PulseDivider.ar(
... div=2,
... start=0,
... trigger=0,
... )
>>> pulse_divider.start
0.0
Returns ugen input.
"""<line_sep>index=self._ordered_input_names.index('start')<line_sep><return>self._inputs[index]<block_end>@property<def_stmt>trigger self<block_start>"""
Gets `trigger` input of PulseDivider.
::
>>> pulse_divider = supriya.ugens.PulseDivider.ar(
... div=2,
... start=0,
... trigger=0,
... )
>>> pulse_divider.trigger
0.0
Returns ugen input.
"""<line_sep>index=self._ordered_input_names.index('trigger')<line_sep><return>self._inputs[index]<block_end><block_end> |
"""
LIST: CHUNKING
"""<line_sep>__author__='<NAME> - <EMAIL>'<line_sep>__twitter__='@solamour'<line_sep>__version__='1.0.0'<line_sep># Example of Chunking (Grouping an item with its next)
<def_stmt>chunks list number# Requires a list and a number
<block_start><for_stmt>index range(0 len(list) number)# For every
# index inside of a number range starting at '0'
# and running to the length of the list, with steps
# of a chosen 'number'
<block_start><yield>list[index:index+number]<block_end><block_end># Yield returns
# a 'generator' object, so we cast the result to a
# 'list' and will return 'list slices' ranging from
# the chosen 'index' to the chosen 'index' + chosen
# 'number'
# Exemplar list
itemList=[0 1 2 3 4 5]# A simple list of numbers to
# parse with our 'chunks' definition
count=2# A number which we want to chunk to. We choose '2'
# which will result in sublists of: [[0, 1], [2, 3], [4,5]]
chunksList=chunks(itemList count)# Here we call our new
# 'chunks' definition on our 'itemList' and with our 'count'
# then push those results to our variable called 'chunksList'
OUT=chunksList# Returning our chunked data
|
<class_stmt>Dataset(object)<block_start><def_stmt>__init__ self env_spec<block_start>self._env_spec=env_spec<block_end><def_stmt>get_batch self batch_size horizon<block_start><raise>NotImplementedError<block_end><def_stmt>get_batch_iterator self batch_size horizon randomize_order=<false> is_tf=<true><block_start><raise>NotImplementedError<block_end><block_end> |
<import_stmt>re<line_sep># Workaround for Windows ST2 not having disutils
<try_stmt><block_start><import_from_stmt>distutils.version LooseVersion<block_end><except_stmt># From distutils/version.py
<block_start><class_stmt>LooseVersion()<block_start>component_re=re.compile(r'(\d+ | [a-z]+ | \.)' re.VERBOSE)<def_stmt>__init__ self vstring=<none><block_start><if_stmt>vstring<block_start>self.parse(vstring)<block_end><block_end><def_stmt>__ge__ self other<block_start>c=self._cmp(other)<if_stmt>c<is>NotImplemented<block_start><return>c<block_end><return>c<ge>0<block_end><def_stmt>parse self vstring<block_start>self.vstring=vstring<line_sep>components=[x<for>x self.component_re.split(vstring)<if>x<and>x<ne>'.']<for_stmt>i,obj enumerate(components)<block_start><try_stmt><block_start>components[i]=int(obj)<block_end><except_stmt>ValueError<block_start><pass><block_end><block_end>self.version=components<block_end><def_stmt>_cmp self other<block_start><if_stmt>isinstance(other str)<block_start>other=LooseVersion(other)<block_end><if_stmt>self.version<eq>other.version<block_start><return>0<block_end><if_stmt>self.version<l>other.version<block_start><return>-1<block_end><if_stmt>self.version<g>other.version<block_start><return>1<block_end><block_end><block_end><block_end>#
# Actual class
#
<class_stmt>GulpVersion()<block_start><def_stmt>__init__ self version_string<block_start>self.version_string=version_string<or>""<block_end><def_stmt>supports_tasks_simple self# This is a mess. The new gulp-cli started from version 0 and does support tasks-simple,
# but there's no reliable way to check which one is installed
# So here we are, having to check if the CLI version is _not_ between 3.6.0 and 3.7.0 which works..for now
<block_start>cli_version=LooseVersion(self.cli_version())<line_sep><return>cli_version<ge>LooseVersion("3.7.0")<or>cli_version<le>LooseVersion("3.6.0")<block_end><def_stmt>cli_version self<block_start><return>self.get("CLI")<block_end><def_stmt>local_version self<block_start><return>self.get("Local")<block_end><def_stmt>get self version_name<block_start>re_match=re.search(version_name+" version (\d+\.\d+\.\d+)" self.version_string)<line_sep><return>re_match.group(1)<if>re_match<else>"3.6.0"<block_end><block_end> |
# Copyright 2018-2022 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains functionality for debugging quantum programs on simulator devices.
"""<import_from_stmt>pennylane DeviceError<class_stmt>_Debugger<block_start>"""A debugging context manager.
Without an active debugging context, devices will not save their internal state when
encoutering Snapshot operations. The debugger also serves as storage for the device states.
Args:
dev (Device): device to attach the debugger to
"""<def_stmt>__init__ self dev<block_start><if_stmt>"Snapshot"<not><in>dev.operations<block_start><raise>DeviceError("Device does not support snapshots.")<block_end>self.snapshots={}<line_sep>self.active=<false><line_sep>self.device=dev<line_sep>dev._debugger=self<block_end><def_stmt>__enter__ self<block_start>self.active=<true><line_sep><return>self<block_end><def_stmt>__exit__ self exc_type exc_value exc_traceback<block_start>self.active=<false><line_sep>self.device._debugger=<none><block_end><block_end><def_stmt>snapshots qnode<block_start>r"""Create a function that retrieves snapshot results from a QNode.
Args:
qnode (.QNode): the input QNode to be simulated
Returns:
A function that has the same argument signature as ``qnode`` and returns a dictionary.
When called, the function will execute the QNode on the registered device and retrieve
the saved snapshots obtained via the ``qml.Snapshot`` operation. Additionally, the snapshot
dictionary always contains the execution results of the QNode, so the use of the tag
"execution_results" should be avoided to prevent conflicting key names.
**Example**
.. code-block:: python3
dev = qml.device("default.qubit", wires=2)
@qml.qnode(dev, interface=None)
def circuit():
qml.Snapshot()
qml.Hadamard(wires=0)
qml.Snapshot("very_important_state")
qml.CNOT(wires=[0, 1])
qml.Snapshot()
return qml.expval(qml.PauliX(0))
>>> qml.snapshots(circuit)()
{0: array([1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]),
'very_important_state': array([0.70710678+0.j, 0.+0.j, 0.70710678+0.j, 0.+0.j]),
2: array([0.70710678+0.j, 0.+0.j, 0.+0.j, 0.70710678+0.j]),
'execution_results': array(0.)}
"""<def_stmt>get_snapshots *args **kwargs<block_start><with_stmt>_Debugger(qnode.device)<as>dbg<block_start>results=qnode(*args **kwargs)<block_end>dbg.snapshots["execution_results"]=results<line_sep><return>dbg.snapshots<block_end><return>get_snapshots<block_end> |
<import_stmt>json<import_from_stmt>flask Flask request<import_stmt>requests<line_sep># Token that has to be generated from webhook page portal
ACCESS_TOKEN="random <PASSWORD>"<line_sep># Token that has to be added for verification with developer portal
VERIFICATION_TOKEN="abc"<line_sep># Identifier payloads for initial button
C19INDIA="C19INDIA"<line_sep>app=Flask(__name__)<line_sep># This get endpoint is for verification with messenger app
@app.route('/webhook' methods=['GET'])<def_stmt>webhook <block_start>verify_token=request.args.get("hub.verify_token")<if_stmt>verify_token<eq>VERIFICATION_TOKEN<block_start><return>request.args.get("hub.challenge")<block_end><return>'Unable to authorise.'<block_end>@app.route("/webhook" methods=['POST'])<def_stmt>webhook_handle <block_start>data=request.get_json()<if_stmt>data["object"]<eq>"page"# To verify that the request is being originated from a page
<block_start><for_stmt>entry data["entry"]<block_start><for_stmt>event entry["messaging"]<block_start><if_stmt>event.get("message")# somebody typed a message
<block_start>process_message(event)<block_end># user clicked/tapped "postback" button in earlier message
<elif_stmt>event.get("postback")<block_start>process_postback(event)<block_end><block_end><block_end><block_end><return>'ok'<block_end><def_stmt>process_message event# the facebook ID of the person sending you the message
<block_start>sender_id=event["sender"]["id"]<line_sep># could receive text or attachment but not both
<if_stmt>"text"<in>event["message"]<block_start>send_initial_menu(sender_id)<block_end><block_end><def_stmt>send_initial_menu sender_id<block_start>message_data=json.dumps({"recipient":{"id":sender_id} "message":{"attachment":{"type":"template" "payload":{"template_type":"generic" "elements":[{"title":"Covid India Stats" "subtitle":"Get the covid19 stats of Indian states" "buttons":[{"type":"web_url" "url":"https://www.worldometers.info/coronavirus/country/india/" "title":"Open Worldometer India"} {"type":"postback" "title":"Get Stats By Indian States" "payload":C19INDIA }] }]}}}})<line_sep>call_send_api(message_data)<block_end><def_stmt>send_state_list sender_id<block_start>message_data=json.dumps({"recipient":{"id":sender_id} "message":{"attachment":{"type":"template" "payload":{"template_type":"generic" "elements":[{"title":"Select State" "buttons":create_state_list(1)} {"title":"Select State" "buttons":create_state_list(2)} {"title":"Select State" "buttons":create_state_list(3)} {"title":"Select State" "buttons":create_state_list(4)} {"title":"Select State" "buttons":create_state_list(5)} {"title":"Select State" "buttons":create_state_list(6)} {"title":"Select State" "buttons":create_state_list(7)} {"title":"Select State" "buttons":create_state_list(8)} {"title":"Select State" "buttons":create_state_list(9)} {"title":"Select State" "buttons":create_state_list(10)}]}}}})<line_sep>call_send_api(message_data)<block_end><def_stmt>create_state_list index<block_start>state_list=["Maharashtra" "Kerala" "Karnataka" "Andhra Pradesh" "Tamil Nadu" "Delhi" "Uttar Pradesh" "West Bengal" "Odisha" "Rajasthan" "Chhattisgarh" "Telangana" "Haryana" "Gujarat" "Bihar" "Madhya Pradesh" "Assam" "Punjab" "Jharkhand" "Uttarakhand" "Himachal Pradesh" "Goa" "Tripura" "Manipur" "<NAME>" "Meghalaya" "Nagaland" "Sikkim" "Mizoram"]<line_sep>payload_list=[]<line_sep>start_index=0+3<times>(index-1)<line_sep>end_index=29<if>(start_index+3)<g>29<else>(start_index+3)<for_stmt>i range(start_index end_index)<block_start>postback={}<line_sep>postback["type"]="postback"<line_sep>postback["title"]=state_list[i]<line_sep>postback["payload"]=state_list[i]<line_sep>payload_list.append(postback)<block_end><return>payload_list<block_end><def_stmt>get_stats_send sender_id state<block_start>response=json.loads(requests.get("https://api.covid19india.org/data.json").text)<line_sep>list_state=response['statewise']<for_stmt>i list_state<block_start><if_stmt>i['state']<eq>state<block_start>x=i<line_sep><break><block_end><block_end>message_data=json.dumps({"recipient":{"id":sender_id} "message":{"text":"ACTIVE CASES: {}\nCONFIRMED CASES: {}\nDEATHS: {}\nRECOVERED: {}".format(x['active'] x['confirmed'] x['deaths'] x['recovered'])}})<line_sep>call_send_api(message_data)<block_end><def_stmt>process_postback event<block_start>sender_id=event["sender"]["id"]<line_sep>payload=event["postback"]["payload"]<if_stmt>payload<eq>C19INDIA<block_start>send_state_list(sender_id)<block_end><else_stmt><block_start>get_stats_send(sender_id payload)<block_end><block_end><def_stmt>call_send_api message_data<block_start>params={"access_token":ACCESS_TOKEN}<line_sep>headers={"Content-Type":"application/json"}<line_sep>r=requests.post("https://graph.facebook.com/v5.0/me/messages" params=params headers=headers data=message_data)<block_end><if_stmt>__name__<eq>"__main__"<block_start>app.run()<block_end> |
"""Plugin class, modules & related functions"""<import_stmt>os<import_stmt>re<import_from_stmt>genomepy.config config<line_sep>__all__=["Plugin" "manage_plugins" "get_active_plugins"]<class_stmt>Plugin<block_start>"""Plugin base class."""<def_stmt>__init__ self<block_start>self.name=convert(type(self).__name__).replace("_plugin" "")<line_sep>self.active=<false><block_end><def_stmt>activate self<block_start>self.active=<true><block_end><def_stmt>deactivate self<block_start>self.active=<false><block_end><def_stmt>after_genome_download self genome threads force<block_start><raise>NotImplementedError("plugin should implement this method")<block_end><def_stmt>get_properties self genome<block_start><raise>NotImplementedError("plugin should implement this method")<block_end><block_end><def_stmt>convert name:str<arrow>str<block_start>"""
Convert CamelCase to underscore. e.g. StarPlugin -> star_plugin
Parameters
----------
name : str
Camelcase string
Returns
-------
name : str
Converted name
"""<line_sep>s1=re.sub("(.)([A-Z][a-z]+)" r"\1_\2" name)<line_sep><return>re.sub("([a-z0-9])([A-Z])" r"\1_\2" s1).lower()<block_end><def_stmt>list_plugins <arrow>list<block_start>plugin_dir=os.path.dirname(os.path.realpath(__file__))<line_sep>plugin_files=[f<for>f os.listdir(plugin_dir)<if>f.endswith(".py")]<line_sep>plugin_names=[f[:-3]<for>f plugin_files<if><not>f.startswith("_")]<line_sep><return>plugin_names<block_end><def_stmt>init_plugins <block_start>"""
create a dictionary of plugin instances
Returns
-------
plugins : dictionary
key is plugin name, value Plugin object
"""<line_sep># import plugins
<for_stmt>plugin list_plugins()<block_start>__import__(f"genomepy.plugins.{plugin}")<block_end># for each Plugin subclass, save an instance to a dict
d={}<line_sep>active_plugins=config.get("plugin" [])<for_stmt>c Plugin.__subclasses__()<block_start>ins=c()<if_stmt>ins.name<in>active_plugins<block_start>ins.activate()<block_end>d[ins.name]=ins<block_end><return>d<block_end>PLUGINS=init_plugins()<def_stmt>get_active_plugins <arrow>list<block_start>"""Returns all active plugin instances."""<line_sep><return>[inst<for>name,inst PLUGINS.items()<if>inst.active]<block_end><def_stmt>activate name<block_start>"""Activate plugin.
Parameters
----------
name : str
Plugin name.
"""<if_stmt>name<in>PLUGINS<block_start>PLUGINS[name].activate()<block_end><else_stmt><block_start><raise>ValueError(f"plugin {name} not found")<block_end><block_end><def_stmt>deactivate name<block_start>"""Deactivate plugin.
Parameters
----------
name : str
Plugin name.
"""<if_stmt>name<in>PLUGINS<block_start>PLUGINS[name].deactivate()<block_end><else_stmt><block_start><raise>ValueError(f"plugin {name} not found")<block_end><block_end><def_stmt>show_plugins <block_start>active_plugins=config.get("plugin" [])<line_sep>print("{:20}{}".format("plugin" "enabled"))<for_stmt>plugin sorted(PLUGINS)<block_start>print("{:20}{}".format(plugin {<false>:"" <true>:"*"}[plugin<in>active_plugins]))<block_end><block_end><def_stmt>manage_plugins command:str plugin_names:list=<none><block_start>"""
Manage genomepy plugins
Parameters
----------
command : str
command to perform. Options:
list
show plugins and status
enable
enable plugins
disable
disable plugins
plugin_names : list
plugin names for the enable/disable command
"""<if_stmt>command<in>["show" "list"]<block_start><return>show_plugins()<block_end>active_plugins=config.get("plugin" [])<for_stmt>name plugin_names<if>plugin_names<else>[]<block_start><if_stmt>name<not><in>PLUGINS<block_start><raise>ValueError(f"Unknown plugin: '{name}'.")<block_end><block_end><if_stmt>command<in>["enable" "activate"]<block_start>[active_plugins.append(name)<for>name plugin_names]<block_end><elif_stmt>command<in>["disable" "deactivate"]<block_start>[active_plugins.remove(name)<for>name plugin_names]<block_end><else_stmt><block_start><raise>ValueError(f"Invalid plugin command: '{command}'. Options: 'list', 'enable' or 'disable'.")<block_end>active_plugins=sorted(list(set(active_plugins)))<line_sep>config["plugin"]=active_plugins<line_sep>config.save()<line_sep>print(f"Enabled plugins: {', '.join(active_plugins)}")<block_end> |
<import_from_stmt>torch.autograd Variable<import_stmt>torch.nn.functional<as>F<import_stmt>scripts.utils<as>utils<import_stmt>torch.nn<as>nn<import_stmt>numpy<as>np<import_stmt>torch<class_stmt>CrossEntropy2d(nn.Module)<block_start><def_stmt>__init__ self size_average=<true> ignore_label=255<block_start>super(CrossEntropy2d self).__init__()<line_sep>self.size_average=size_average<line_sep>self.ignore_label=ignore_label<block_end><def_stmt>forward self predict target weight=<none><block_start>"""
Args:
predict:(n, c, h, w)
target:(n, h, w)
weight (Tensor, optional): a manual rescaling weight given to each class.
If given, has to be a Tensor of size "nclasses"
"""<assert_stmt><not>target.requires_grad<assert_stmt>predict.dim()<eq>4<assert_stmt>target.dim()<eq>3<assert_stmt>predict.size(0)<eq>target.size(0) "{0} vs {1} ".format(predict.size(0) target.size(0))<assert_stmt>predict.size(2)<eq>target.size(1) "{0} vs {1} ".format(predict.size(2) target.size(1))<assert_stmt>predict.size(3)<eq>target.size(2) "{0} vs {1} ".format(predict.size(3) target.size(3))<line_sep>n,c,h,w=predict.size()<line_sep>target_mask=(target<ge>0)<times>(target<ne>self.ignore_label)<line_sep>target=target[target_mask]<line_sep>predict=predict.transpose(1 2).transpose(2 3).contiguous()<line_sep>predict=predict[target_mask.view(n h w 1).repeat(1 1 1 c)].view(-1 c)<line_sep>loss=F.cross_entropy(predict target weight=weight size_average=self.size_average)<line_sep><return>loss<block_end><block_end><def_stmt>cross_entropy2d input target weight=<none> size_average=<true># 1. input: (n, c, h, w), target: (n, h, w)
<block_start>n,c,h,w=input.size()<line_sep># 2. log_p: (n, c, h, w)
log_p=F.log_softmax(input dim=1)<line_sep># 3. log_p: (n*h*w, c) - contiguous() required if transpose() is used before view().
log_p=log_p.transpose(1 2).transpose(2 3).contiguous().view(-1 c)<line_sep>log_p=log_p[target.view(n<times>h<times>w 1).repeat(1 c)<ge>0]<line_sep>log_p=log_p.view(-1 c)<line_sep># 4. target: (n*h*w,)
mask=target<ge>0<line_sep>target=target[mask]<line_sep>loss=F.nll_loss(log_p target ignore_index=250 weight=weight size_average=<false>)<if_stmt>size_average<block_start>loss<augdiv>mask.data.sum()<line_sep># loss /= mask.sum().data[0]
<block_end><return>loss<block_end><def_stmt>bootstrapped_cross_entropy2d input target K weight=<none> size_average=<false><block_start>"""A categorical cross entropy loss for 4D tensors.
We assume the following layout: (batch, classes, height, width)
Args:
input: The outputs.
target: The predictions.
K: The number of pixels to select in the bootstrapping process.
The total number of pixels is determined as 512 * multiplier.
Returns:
The pixel-bootstrapped cross entropy loss.
"""<line_sep>batch_size=input.size()[0]<def_stmt>_bootstrap_xentropy_single input target K weight=<none> size_average=<false><block_start>n,c,h,w=input.size()<line_sep># 1. The log softmax. log_p: (n, c, h, w)
log_p=F.log_softmax(input dim=1)<line_sep># 2. log_p: (n*h*w, c) - contiguous() required if transpose() is used before view().
log_p=log_p.transpose(1 2).transpose(2 3).contiguous().view(-1 c)<line_sep>log_p=log_p[target.view(n<times>h<times>w 1).repeat(1 c)<ge>0]<line_sep>log_p=log_p.view(-1 c)<line_sep># 3. target: (n*h*w,)
mask=target<ge>0<line_sep>target=target[mask]<line_sep>loss=F.nll_loss(log_p target weight=weight ignore_index=250 reduce=<false> size_average=size_average)<line_sep># For each element in the batch, collect the top K worst predictions
topk_loss,_=loss.topk(K)<line_sep>reduced_topk_loss=topk_loss.sum()/K<line_sep><return>reduced_topk_loss<block_end>loss=0.0<line_sep># Bootstrap from each image not entire batch
<for_stmt>i range(batch_size)<block_start>loss<augadd>_bootstrap_xentropy_single(input=torch.unsqueeze(input[i] 0) target=torch.unsqueeze(target[i] 0) K=K weight=weight size_average=size_average)<block_end><return>loss/float(batch_size)<block_end><class_stmt>FocalLoss2D(nn.Module)<block_start>"""
Focal Loss, which is proposed in:
"Focal Loss for Dense Object Detection (https://arxiv.org/abs/1708.02002v2)"
"""<def_stmt>__init__ self num_classes=19 ignore_label=250 alpha=0.25 gamma=2 size_average=<true><block_start>"""
Loss(x, class) = - \alpha (1-softmax(x)[class])^gamma \log(softmax(x)[class])
:param num_classes: (int) num of the classes
:param ignore_label: (int) ignore label
:param alpha: (1D Tensor or Variable) the scalar factor
:param gamma: (float) gamma > 0;
reduces the relative loss for well-classified examples (probabilities > .5),
putting more focus on hard, mis-classified examples
:param size_average: (bool): By default, the losses are averaged over observations for each mini-batch.
If the size_average is set to False, the losses are
instead summed for each mini-batch.
"""<line_sep>super(FocalLoss2D self).__init__()<line_sep>self.alpha=alpha<line_sep>self.gamma=gamma<line_sep>self.num_classes=num_classes<line_sep>self.ignore_label=ignore_label<line_sep>self.size_average=size_average<line_sep>self.one_hot=Variable(torch.eye(self.num_classes))<block_end><def_stmt>forward self cls_preds cls_targets<block_start>"""
:param cls_preds: (n, c, h, w)
:param cls_targets: (n, h, w)
:return:
"""<assert_stmt><not>cls_targets.requires_grad<assert_stmt>cls_targets.dim()<eq>3<assert_stmt>cls_preds.size(0)<eq>cls_targets.size(0) "{0} vs {1} ".format(cls_preds.size(0) cls_targets.size(0))<assert_stmt>cls_preds.size(2)<eq>cls_targets.size(1) "{0} vs {1} ".format(cls_preds.size(2) cls_targets.size(1))<assert_stmt>cls_preds.size(3)<eq>cls_targets.size(2) "{0} vs {1} ".format(cls_preds.size(3) cls_targets.size(3))<if_stmt>cls_preds.is_cuda<block_start>self.one_hot=self.one_hot.cuda()<block_end>n,c,h,w=cls_preds.size()<line_sep># +++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 1. target reshape and one-hot encode
# +++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 1.1. target: (n*h*w,)
cls_targets=cls_targets.view(n<times>h<times>w 1)<line_sep>target_mask=(cls_targets<ge>0)<times>(cls_targets<ne>self.ignore_label)<line_sep>cls_targets=cls_targets[target_mask]<line_sep>cls_targets=self.one_hot.index_select(dim=0 index=cls_targets)<line_sep># +++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 2. compute focal loss for multi-classification
# +++++++++++++++++++++++++++++++++++++++++++++++++++ #
# 2.1. The softmax. prob: (n, c, h, w)
prob=F.softmax(cls_preds dim=1)<line_sep># 2.2. prob: (n*h*w, c) - contiguous() required if transpose() is used before view().
prob=prob.transpose(1 2).transpose(2 3).contiguous().view(-1 c)<line_sep>prob=prob[target_mask.repeat(1 c)]<line_sep>prob=prob.view(-1 c)# (n*h*w, c)
probs=torch.clamp((prob<times>cls_targets).sum(1).view(-1 1) min=1e-8 max=1.0)<line_sep>batch_loss=-self.alpha<times>(torch.pow((1-probs) self.gamma))<times>probs.log()<if_stmt>self.size_average<block_start>loss=batch_loss.mean()<block_end><else_stmt><block_start>loss=batch_loss.sum()<block_end><return>loss<block_end><block_end><class_stmt>SemanticEncodingLoss(nn.Module)<block_start><def_stmt>__init__ self num_classes=19 ignore_label=250 alpha=0.25<block_start>super(SemanticEncodingLoss self).__init__()<line_sep>self.alpha=alpha<line_sep>self.num_classes=num_classes<line_sep>self.ignore_label=ignore_label<block_end><def_stmt>unique_encode self cls_targets<block_start>batch_size,_,_=cls_targets.size()<line_sep>target_mask=(cls_targets<ge>0)<times>(cls_targets<ne>self.ignore_label)<line_sep>cls_targets=[cls_targets[idx].masked_select(target_mask[idx])<for>idx np.arange(batch_size)]<line_sep># unique_cls = [np.unique(label.numpy(), return_counts=True) for label in cls_targets]
unique_cls=[np.unique(label.numpy())<for>label cls_targets]<line_sep>encode=np.zeros((batch_size self.num_classes) dtype=np.uint8)<for_stmt>idx np.arange(batch_size)<block_start>np.put(encode[idx] unique_cls[idx] 1)<block_end><return>torch.from_numpy(encode).float()<block_end><def_stmt>forward self predicts enc_cls_target size_average=<true><block_start>se_loss=F.binary_cross_entropy_with_logits(predicts enc_cls_target weight=<none> size_average=size_average)<line_sep><return>self.alpha<times>se_loss<block_end><block_end># ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# Lovasz-Softmax
# <NAME> 2018 ESAT-PSI KU Leuven
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
<def_stmt>lovasz_grad gt_sorted<block_start>"""
Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper
"""<line_sep>p=len(gt_sorted)<line_sep>gts=gt_sorted.sum()<line_sep>intersection=gts-gt_sorted.float().cumsum(0)<line_sep>union=gts+(1-gt_sorted).float().cumsum(0)<line_sep>jaccard=1.-intersection/union<if_stmt>p<g>1# cover 1-pixel case
<block_start>jaccard[1:p]=jaccard[1:p]-jaccard[0:-1]<block_end><return>jaccard<block_end><def_stmt>iou_binary preds labels EMPTY=1. ignore=<none> per_image=<true><block_start>"""
IoU for foreground class
binary: 1 foreground, 0 background
"""<if_stmt><not>per_image<block_start>preds,labels=(preds ) (labels )<block_end>ious=[]<for_stmt>pred,label zip(preds labels)<block_start>intersection=((label<eq>1)&(pred<eq>1)).sum()<line_sep>union=((label<eq>1)|((pred<eq>1)&(label<ne>ignore))).sum()<if_stmt><not>union<block_start>iou=EMPTY<block_end><else_stmt><block_start>iou=float(intersection)/union<block_end>ious.append(iou)<block_end>iou=utils.mean(ious)# mean accross images if per_image
<return>100<times>iou<block_end><def_stmt>iou preds labels C EMPTY=1. ignore=<none> per_image=<false><block_start>"""
Array of IoU for each (non ignored) class
"""<if_stmt><not>per_image<block_start>preds,labels=(preds ) (labels )<block_end>ious=[]<for_stmt>pred,label zip(preds labels)<block_start>iou=[]<for_stmt>i range(C)<block_start><if_stmt>i<ne>ignore# The ignored label is sometimes among predicted classes (ENet - CityScapes)
<block_start>intersection=((label<eq>i)&(pred<eq>i)).sum()<line_sep>union=((label<eq>i)|((pred<eq>i)&(label<ne>ignore))).sum()<if_stmt><not>union<block_start>iou.append(EMPTY)<block_end><else_stmt><block_start>iou.append(float(intersection)/union)<block_end><block_end><block_end>ious.append(iou)<block_end>ious=map(utils.mean zip(*ious))# mean accross images if per_image
<return>100<times>np.array(ious)<block_end><def_stmt>lovasz_softmax probas labels only_present=<false> per_image=<false> ignore=<none><block_start>"""
Multi-class Lovasz-Softmax loss
probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1)
labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
per_image: compute the loss per image instead of per batch
ignore: void class labels
"""<if_stmt>per_image<block_start>loss=utils.mean(lovasz_softmax_flat(*flatten_probas(prob lab ignore) only_present=only_present)<for>prob,lab zip(probas labels))<block_end><else_stmt><block_start>loss=lovasz_softmax_flat(*flatten_probas(probas labels ignore) only_present=only_present)<block_end><return>loss<block_end><def_stmt>lovasz_softmax_flat probas labels only_present=<false><block_start>"""
Multi-class Lovasz-Softmax loss
probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1)
labels: [P] Tensor, ground truth labels (between 0 and C - 1)
only_present: average only on classes present in ground truth
"""<line_sep>C=probas.size(1)<line_sep>losses=[]<for_stmt>c range(C)<block_start>fg=(labels<eq>c).float()# foreground for class c
<if_stmt>only_present<and>fg.sum()<eq>0<block_start><continue><block_end>errors=(fg-probas[: c]).abs()<line_sep>errors_sorted,perm=torch.sort(errors 0 descending=<true>)<line_sep>perm=perm.data<line_sep>fg_sorted=fg[perm]<line_sep>losses.append(torch.dot(errors_sorted lovasz_grad(fg_sorted)))<block_end><return>utils.mean(losses)<block_end><def_stmt>flatten_probas scores labels ignore=<none><block_start>"""
Flattens predictions in the batch
"""<line_sep>B,C,H,W=scores.size()<line_sep>scores=scores.permute(0 2 3 1).contiguous().view(-1 C)# B * H * W, C = P, C
labels=labels.view(-1)<if_stmt>ignore<is><none><block_start><return>scores labels<block_end>valid=(labels<ne>ignore)<line_sep>vscores=scores[valid.nonzero().squeeze()]<line_sep>vlabels=labels[valid]<line_sep><return>vscores vlabels<block_end><if_stmt>__name__<eq>"__main__"<block_start><import_from_stmt>torch.autograd Variable<while_stmt><true><block_start>dummy_in=Variable(torch.randn(2 3 32 32) requires_grad=<true>)<line_sep>dummy_gt=Variable(torch.LongTensor(2 32 32).random_(0 3))<line_sep>dummy_in=F.softmax(dummy_in dim=1)<line_sep>loss=lovasz_softmax(dummy_in dummy_gt ignore=255)<line_sep>print(loss.data[0])<block_end><block_end> |
# coding=utf-8
# Copyright 2019 Foursquare Labs Inc. All Rights Reserved.
<import_from_future_stmt> absolute_import division print_function unicode_literals<import_stmt>urllib<import_from_stmt>pants.subsystem.subsystem Subsystem<import_from_stmt>pants.util.memo memoized_property<class_stmt>ConfluenceSubsystem(Subsystem)<block_start>options_scope='confluence-wiki'<line_sep>@staticmethod<def_stmt>confluence_url_builder page<block_start>config=page.provides[0].config<line_sep>title=config['title']<line_sep>full_url='{}/wiki/spaces/{}/{}'.format(ConfluenceSubsystem.wiki_url config['space'] urllib.quote_plus(title) )<line_sep><return>title full_url<block_end>@classmethod<def_stmt>register_options cls register<block_start>super(ConfluenceSubsystem cls).register_options(register)<line_sep># TODO(mateo): This only supports a single wiki url, should a map of wiki_name:url.
# This is not trivial to unwind, the base plugin assumed self-hosted wiki and url builders.
register('--wiki-url' default=<none> advanced=<true> help='Wiki hostname.' )<line_sep>register('--email-domain' advanced=<true> help='Options default domain. For <EMAIL>, use @foo.com. Note: Overrides the email-domain option.' )<block_end>@memoized_property<def_stmt>wiki_url self<block_start>wiki_url=self.get_options().wiki_url<if_stmt>wiki_url<is><none><block_start><raise>ValueError("No wiki URL set! Please set option --{}-wiki-url.".format(self.options_scope))<block_end><return>wiki_url<block_end>@memoized_property<def_stmt>email_domain self<block_start>email_domain=self.get_options().email_domain<if_stmt>email_domain<is><none><block_start><raise>ValueError("No email domain is set! Please set option --{}-email-domain.".format(self.options_scope))<block_end><return>email_domain<block_end><block_end> |
<import_stmt>lightnion<as>lnn<import_stmt>nacl.public<import_stmt>base64<def_stmt>hand guard encode=<true><block_start>identity=base64.b64decode(guard['router']['identity']+'====')<line_sep>onion_key=base64.b64decode(guard['ntor-onion-key']+'====')<line_sep>ephemeral_key,payload=lnn.crypto.ntor.hand(identity onion_key)<if_stmt>encode<block_start>payload=str(base64.b64encode(payload) 'utf8')<block_end><return>payload (onion_key ephemeral_key identity)<block_end><def_stmt>shake payload material<block_start>payload=base64.b64decode(payload)<line_sep>onion_key,ephemeral_key,identity=material<line_sep>material=lnn.crypto.ntor.shake(ephemeral_key payload identity onion_key length=92)<line_sep><return>lnn.crypto.ntor.kdf(material)<block_end> |
"""
WSGI config for pinterest_example project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""<import_stmt>os<line_sep>os.environ.setdefault("DJANGO_SETTINGS_MODULE" "core.settings")<import_from_stmt>django.core.wsgi get_wsgi_application<line_sep>application=get_wsgi_application()<try_stmt><block_start><import_from_stmt>dj_static Cling<block_end><except_stmt>ImportError<block_start><pass><block_end><else_stmt><block_start>application=Cling(application)<block_end> |
<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_from_stmt>datas.benchmark Benchmark<import_from_stmt>datas.div2k DIV2K<import_from_stmt>models.ecbsr ECBSR<import_from_stmt>torch.utils.data DataLoader<import_stmt>math<import_stmt>argparse yaml<import_stmt>utils<import_stmt>os<import_from_stmt>tqdm tqdm<import_stmt>logging<import_stmt>sys<import_stmt>time<line_sep>parser=argparse.ArgumentParser(description='ECBSR')<line_sep>## yaml configuration files
parser.add_argument('--config' type=str default=<none> help='pre-config file for training')<line_sep>## paramters for ecbsr
parser.add_argument('--scale' type=int default=2 help='scale for sr network')<line_sep>parser.add_argument('--colors' type=int default=1 help='1(Y channls of YCbCr)')<line_sep>parser.add_argument('--m_ecbsr' type=int default=4 help='number of ecb')<line_sep>parser.add_argument('--c_ecbsr' type=int default=8 help='channels of ecb')<line_sep>parser.add_argument('--idt_ecbsr' type=int default=0 help='incorporate identity mapping in ecb or not')<line_sep>parser.add_argument('--act_type' type=str default='prelu' help='prelu, relu, splus, rrelu')<line_sep>parser.add_argument('--pretrain' type=str default=<none> help='path of pretrained model')<line_sep>## parameters for model training
parser.add_argument('--patch_size' type=int default=64 help='patch size of HR image')<line_sep>parser.add_argument('--batch_size' type=int default=32 help='batch size of training data')<line_sep>parser.add_argument('--data_repeat' type=int default=1 help='times of repetition for training data')<line_sep>parser.add_argument('--data_augment' type=int default=1 help='data augmentation for training')<line_sep>parser.add_argument('--epochs' type=int default=600 help='number of epochs')<line_sep>parser.add_argument('--test_every' type=int default=1 help='test the model every N epochs')<line_sep>parser.add_argument('--log_every' type=int default=1 help='print log of loss, every N steps')<line_sep>parser.add_argument('--log_path' type=str default="./experiments/")<line_sep>parser.add_argument('--lr' type=float default=5e-4 help='learning rate of optimizer')<line_sep>parser.add_argument('--store_in_ram' type=int default=0 help='store the whole training data in RAM or not')<line_sep>## hardware specification
parser.add_argument('--gpu_id' type=int default=0 help='gpu id for training')<line_sep>parser.add_argument('--threads' type=int default=1 help='number of threads for training')<line_sep>## dataset specification
parser.add_argument('--div2k_hr_path' type=str default='/Users/xindongzhang/Documents/SRData/DIV2K/DIV2K_train_HR' help='')<line_sep>parser.add_argument('--div2k_lr_path' type=str default='/Users/xindongzhang/Documents/SRData/DIV2K/DIV2K_train_LR_bicubic' help='')<line_sep>parser.add_argument('--set5_hr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/Set5/HR' help='')<line_sep>parser.add_argument('--set5_lr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/Set5/LR_bicubic' help='')<line_sep>parser.add_argument('--set14_hr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/Set14/HR' help='')<line_sep>parser.add_argument('--set14_lr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/Set14/LR_bicubic' help='')<line_sep>parser.add_argument('--b100_hr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/B100/HR' help='')<line_sep>parser.add_argument('--b100_lr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/B100/LR_bicubic' help='')<line_sep>parser.add_argument('--u100_hr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/Urban100/HR' help='')<line_sep>parser.add_argument('--u100_lr_path' type=str default='/Users/xindongzhang/Documents/SRData/benchmark/Urban100/LR_bicubic' help='')<if_stmt>__name__<eq>'__main__'<block_start>args=parser.parse_args()<if_stmt>args.config<block_start>opt=vars(args)<line_sep>yaml_args=yaml.load(open(args.config) Loader=yaml.FullLoader)<line_sep>opt.update(yaml_args)<block_end><if_stmt>args.colors<eq>3<block_start><raise>ValueError("ECBSR is trained and tested with colors=1.")<block_end>device=<none><if_stmt>args.gpu_id<ge>0<and>torch.cuda.is_available()<block_start>print("use cuda & cudnn for acceleration!")<line_sep>print("the gpu id is: {}".format(args.gpu_id))<line_sep>device=torch.device('cuda:{}'.format(args.gpu_id))<line_sep>torch.backends.cudnn.benchmark=<true><block_end><else_stmt><block_start>print("use cpu for training!")<line_sep>device=torch.device('cpu')<block_end>torch.set_num_threads(args.threads)<line_sep>div2k=DIV2K(args.div2k_hr_path args.div2k_lr_path train=<true> augment=args.data_augment scale=args.scale colors=args.colors patch_size=args.patch_size repeat=args.data_repeat store_in_ram=args.store_in_ram)<line_sep>set5=Benchmark(args.set5_hr_path args.set5_lr_path scale=args.scale colors=args.colors store_in_ram=args.store_in_ram)<line_sep>set14=Benchmark(args.set14_hr_path args.set14_lr_path scale=args.scale colors=args.colors store_in_ram=args.store_in_ram)<line_sep>b100=Benchmark(args.b100_hr_path args.b100_lr_path scale=args.scale colors=args.colors store_in_ram=args.store_in_ram)<line_sep>u100=Benchmark(args.u100_hr_path args.u100_lr_path scale=args.scale colors=args.colors store_in_ram=args.store_in_ram)<line_sep>train_dataloader=DataLoader(dataset=div2k num_workers=args.threads batch_size=args.batch_size shuffle=<true> pin_memory=<true> drop_last=<true>)<line_sep>valid_dataloaders=[]<line_sep>valid_dataloaders<augadd>[{'name':'set5' 'dataloader':DataLoader(dataset=set5 batch_size=1 shuffle=<false>)}]<line_sep>valid_dataloaders<augadd>[{'name':'set14' 'dataloader':DataLoader(dataset=set14 batch_size=1 shuffle=<false>)}]<line_sep>valid_dataloaders<augadd>[{'name':'b100' 'dataloader':DataLoader(dataset=b100 batch_size=1 shuffle=<false>)}]<line_sep>valid_dataloaders<augadd>[{'name':'u100' 'dataloader':DataLoader(dataset=u100 batch_size=1 shuffle=<false>)}]<line_sep>## definitions of model, loss, and optimizer
model=ECBSR(module_nums=args.m_ecbsr channel_nums=args.c_ecbsr with_idt=args.idt_ecbsr act_type=args.act_type scale=args.scale colors=args.colors).to(device)<line_sep>loss_func=nn.L1Loss()<line_sep>optimizer=torch.optim.Adam(model.parameters() lr=args.lr)<if_stmt>args.pretrain<is><not><none><block_start>print("load pretrained model: {}!".format(args.pretrain))<line_sep>model.load_state_dict(torch.load(args.pretrain))<block_end><else_stmt><block_start>print("train the model from scratch!")<block_end>## auto-generate the output logname
timestamp=utils.cur_timestamp_str()<line_sep>experiment_name="ecbsr-x{}-m{}c{}-{}-{}".format(args.scale args.m_ecbsr args.c_ecbsr args.act_type timestamp)<line_sep>experiment_path=os.path.join(args.log_path experiment_name)<if_stmt><not>os.path.exists(experiment_path)<block_start>os.makedirs(experiment_path)<block_end>experiment_model_path=os.path.join(experiment_path 'models')<if_stmt><not>os.path.exists(experiment_model_path)<block_start>os.makedirs(experiment_model_path)<block_end>log_name=os.path.join(experiment_path "log.txt")<line_sep>sys.stdout=utils.ExperimentLogger(log_name sys.stdout)<line_sep>stat_dict=utils.get_stat_dict()<line_sep>## save training paramters
exp_params=vars(args)<line_sep>exp_params_name=os.path.join(experiment_path 'config.yml')<with_stmt>open(exp_params_name 'w')<as>exp_params_file<block_start>yaml.dump(exp_params exp_params_file default_flow_style=<false>)<block_end>timer_start=time.time()<for_stmt>epoch range(args.epochs)<block_start>epoch_loss=0.0<line_sep>stat_dict['epochs']=epoch<line_sep>model=model.train()<line_sep>print("##===========Epoch: {}=============##".format(epoch))<for_stmt>iter,batch enumerate(train_dataloader)<block_start>optimizer.zero_grad()<line_sep>lr,hr=batch<line_sep>lr,hr=lr.to(device) hr.to(device)<line_sep>sr=model(lr)<line_sep>loss=loss_func(sr hr)<line_sep>loss.backward()<line_sep>optimizer.step()<line_sep>epoch_loss<augadd>float(loss)<if_stmt>(iter+1)%args.log_every<eq>0<block_start>cur_steps=(iter+1)<times>args.batch_size<line_sep>total_steps=len(train_dataloader.dataset)<line_sep>fill_width=math.ceil(math.log10(total_steps))<line_sep>cur_steps=str(cur_steps).zfill(fill_width)<line_sep>epoch_width=math.ceil(math.log10(args.epochs))<line_sep>cur_epoch=str(epoch).zfill(epoch_width)<line_sep>avg_loss=epoch_loss/(iter+1)<line_sep>stat_dict['losses'].append(avg_loss)<line_sep>timer_end=time.time()<line_sep>duration=timer_end-timer_start<line_sep>timer_start=timer_end<line_sep>print("Epoch:{}, {}/{}, loss: {:.4f}, time: {:.3f}".format(cur_epoch cur_steps total_steps avg_loss duration))<block_end><block_end><if_stmt>(epoch+1)%args.test_every<eq>0<block_start>torch.set_grad_enabled(<false>)<line_sep>test_log=""<line_sep>model=model.eval()<for_stmt>valid_dataloader valid_dataloaders<block_start>avg_psnr=0.0<line_sep>avg_ssim=0.0<line_sep>name=valid_dataloader['name']<line_sep>loader=valid_dataloader['dataloader']<for_stmt>lr,hr tqdm(loader ncols=80)<block_start>lr,hr=lr.to(device) hr.to(device)<line_sep>sr=model(lr)<line_sep># crop
hr=hr[: : args.scale:-args.scale args.scale:-args.scale]<line_sep>sr=sr[: : args.scale:-args.scale args.scale:-args.scale]<line_sep># quantize
hr=hr.clamp(0 255)<line_sep>sr=sr.clamp(0 255)<line_sep># calculate psnr
psnr=utils.calc_psnr(sr hr)<line_sep>ssim=utils.calc_ssim(sr hr)<line_sep>avg_psnr<augadd>psnr<line_sep>avg_ssim<augadd>ssim<block_end>avg_psnr=round(avg_psnr/len(loader) 2)<line_sep>avg_ssim=round(avg_ssim/len(loader) 4)<line_sep>stat_dict[name]['psnrs'].append(avg_psnr)<line_sep>stat_dict[name]['ssims'].append(avg_ssim)<if_stmt>stat_dict[name]['best_psnr']['value']<l>avg_psnr<block_start>stat_dict[name]['best_psnr']['value']=avg_psnr<line_sep>stat_dict[name]['best_psnr']['epoch']=epoch<block_end><if_stmt>stat_dict[name]['best_ssim']['value']<l>avg_ssim<block_start>stat_dict[name]['best_ssim']['value']=avg_ssim<line_sep>stat_dict[name]['best_ssim']['epoch']=epoch<block_end>test_log<augadd>"[{}-X{}], PSNR/SSIM: {:.2f}/{:.4f} (Best: {:.2f}/{:.4f}, Epoch: {}/{})\n".format(name args.scale float(avg_psnr) float(avg_ssim) stat_dict[name]['best_psnr']['value'] stat_dict[name]['best_ssim']['value'] stat_dict[name]['best_psnr']['epoch'] stat_dict[name]['best_ssim']['epoch'])<block_end># print log & flush out
print(test_log)<line_sep>sys.stdout.flush()<line_sep># save model
saved_model_path=os.path.join(experiment_model_path 'model_x{}_{}.pt'.format(args.scale epoch))<line_sep>torch.save(model.state_dict() saved_model_path)<line_sep>torch.set_grad_enabled(<true>)<line_sep># save stat dict
## save training paramters
stat_dict_name=os.path.join(experiment_path 'stat_dict.yml')<with_stmt>open(stat_dict_name 'w')<as>stat_dict_file<block_start>yaml.dump(stat_dict stat_dict_file default_flow_style=<false>)<block_end><block_end><block_end><block_end> |
"""Group Revit built-in categories logically and output the data in json
The built-in categories are provided in text files under DATA_DIR
Usage:
python3 ./cgroups.py group and output categories
python3 ./cgroups.py <catname> group and output <catname> category only
"""<line_sep># pylint: disable=bad-continuation
<import_stmt>sys<import_stmt>os<import_stmt>os.path<as>op<import_from_stmt>typing Set List TypeVar<import_stmt>json<import_stmt>re<line_sep>DATA_DIR="./bic_data"<line_sep>CGROUP_T=TypeVar("CGROUP")# pylint: disable=invalid-name
<class_stmt>CGROUP<block_start>"""Represents a category grouping"""<def_stmt>__init__ self name:str exclusives:List[str] includes:List[str] excludes:List[str] cgroups:List[CGROUP_T] hidden:bool=<false> <block_start>self.name:str=name<line_sep>self.exclusives:List[str]=exclusives<line_sep>self.includes:List[str]=includes<line_sep>self.excludes:List[str]=excludes<line_sep>self.cgroups:List[CGROUP_T]=cgroups<line_sep>self.hidden:bool=hidden<block_end><block_end><class_stmt>CategoryComp<block_start>"""Represents data for a category selector component"""<def_stmt>__init__ self name:str categories:List[str]<block_start>self.name=name<line_sep>self.categories=categories<block_end><block_end><class_stmt>CategoryCompCollection<block_start>"""Represents data for a collection of category selector components"""<def_stmt>__init__ self version:str bics:List[str] components:List[CategoryComp] used_bics:Set[str] <block_start>self.meta={"version":version "total":len(bics) "included":len(used_bics) "excluded":list(bics.difference(used_bics)) }<line_sep>self.components=components<block_end><block_end># =============================================================================
# this is a hand-crafted tree of CGroups that represents the grouping logic
# -----------------------------------------------------------------------------
CGROUPS=[CGROUP(name="Skip" exclusives=[r".+Obsolete.*" r".+OBSOLETE.*" r".+Deprecated.*" r"OST_GbXML.*" r"OST_gbXML.*" r"OST_DSR_.*" ] includes=[] excludes=[] cgroups=[] hidden=<true> ) CGROUP(name="Site" exclusives=[] includes=[r"OST_Site.*" r"OST_Sewer.*" r"OST_Road.*" r"OST_Building.*" r"OST_Contour.*" r"OST_Parking.*" ] excludes=[] cgroups=[CGROUP(name="Topography" exclusives=[] includes=[r"OST_.*Topo.*"] excludes=[] cgroups=[] ) ] ) CGROUP(name="References" exclusives=[] includes=[r"OST_Grid.*" r"OST_Level.*" r"OST_Level.*" r"OST_Constraint.*" r"OST_Reference.*" ] excludes=[r"OST_GridChains.*" r"OST_ReferencePoints.*" r"OST_ReferenceViewer.*" ] cgroups=[] ) CGROUP(name="Modeling" exclusives=[] includes=[r"OST_Generic.*" ] excludes=["OST_GenericLines" ] cgroups=[CGROUP(name="Mass" exclusives=[] includes=[r"OST_Mass.*"] excludes=[r"OST_.+Cutter" r"OST_.+Splitter" r"OST_.+All" r"OST_.+Outlines" ] cgroups=[] ) CGROUP(name="Ceilings" exclusives=[] includes=[r"OST_Ceiling.*"] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" r"OST_.+Default.*" ] cgroups=[] ) CGROUP(name="Columns" exclusives=[] includes=[r"OST_Column.*"] excludes=[r"OST_.+LocalCoordSys"] cgroups=[] ) CGROUP(name="Curtain Systems" exclusives=[] includes=[r"OST_Curta.*"] excludes=[r"OST_.+FaceManager.*" r"OST_CurtainGrids.+" r"OST_Curtain.+Cut" ] cgroups=[] ) CGROUP(name="Floors" exclusives=[] includes=[r"OST_Floor.*"] excludes=[r"OST_.+LocalCoordSys" r"OST_.+Cut.*" r"OST_.+Projection.*" r"OST_.+Default.*" ] cgroups=[] ) CGROUP(name="Doors" exclusives=[] includes=[r"OST_Door.*"] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Casework" exclusives=[] includes=[r"OST_Casework.*"] excludes=[] cgroups=[] ) CGROUP(name="Windows" exclusives=[] includes=[r"OST_Window.*"] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Furniture" exclusives=[] includes=[r"OST_Furniture.*"] excludes=[] cgroups=[] ) CGROUP(name="Adaptive" exclusives=[] includes=[r"OST_Adaptive.*"] excludes=[] cgroups=[] ) CGROUP(name="Speciality" exclusives=[] includes=[r"OST_Speciality.*"] excludes=[] cgroups=[] ) CGROUP(name="Openings" exclusives=[r"OST_.+Opening" r"OST_Arc.*" r"OST_Shaft.*" ] includes=[] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Railing" exclusives=[] includes=[r"OST_Railing.*"] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Stairs" exclusives=[] includes=[r"OST_Stair.*" r"OST_.+Stairs"] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Ramps" exclusives=[] includes=[r"OST_Ramp.*"] excludes=[r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Walls" exclusives=[] includes=[r"OST_Wall.*" r"OST_Reveals" r"OST_Stacked.*"] excludes=[r"OST_.+LocalCoordSys" r"OST_.+RefPlanes" r"OST_.+Default" r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Roofs" exclusives=[] includes=[r"OST_Roof.*" r"OST_Fascia.*" r"OST_Purlin.*" r"OST_Gutter.*" r"OST_Cornices.*" r"OST_Dormer.*" ] excludes=[r"OST_.+Opening.*" r"OST_.+Cut.*" r"OST_.+Projection.*" ] cgroups=[] ) CGROUP(name="Spatial" exclusives=[] includes=[r"OST_Area.*" r"OST_Zone.*" r"OST_MEPSpace.*" r"OST_Zoning.*" r"OST_Room.*" ] excludes=[r"OST_.+Fill" r"OST_.+Visibility" r"OST_AreaRein.*" r"OST_AreaReport.*" ] cgroups=[] ) CGROUP(name="Structural" exclusives=[] includes=[r"OST_Struct.+" r"OST_.+Bracing" r"OST_Truss.*" r"OST_Joist.*" r"OST_FabricArea.*" r"OST_Rebar.*" r"OST_Girder.*" r"OST_Edge.*" r"OST_Load.*" r"OST_Internal.*Load.*" r"OST_Isolated.*" r"OST_Framing.*" r"OST_Footing.*" r"OST_Foundation.*" r"OST_Fnd.*" r"OST_Span.*" r"OST_Steel.*" r"OST_SWall.*" r"OST_Brace.*" r"OST_Bridge.*" r"OST_.*PointLoad.*" r"OST_Beam.*" ] excludes=[r"OST_.+LocalCoordSys" r"OST_.+Other" r"OST_.+LocationLine" r"OST_.+PlanReps" r"OST_.+NobleWarning" r"OST_.+Failed" ] cgroups=[] ) CGROUP(name="Mechanical" exclusives=[] includes=[r"OST_Mechanical.*" r"OST_.+Ducts" r"OST_Duct.*" r"OST_MEPAnalytical.*" r"OST_Flex.*" r"OST_MEPSystem.*" r"OST_HVAC.*" r"OST_Fabrication.+" ] excludes=[r"OST_.+Reference.*" r"OST_.+TmpGraphic.*" r"OST_.+Visibility" ] cgroups=[] ) CGROUP(name="Electrical" exclusives=[] includes=[r"OST_.+Pipes" r"OST_Conduit.*" r"OST_Cable.*" r"OST_Wire.*" r"OST_Light.*" r"OST_Device.*" r"OST_Panel.*" r"OST_Elec.*" r"OST_Routing.*" r"OST_Switch.*" r"OST_Connector.*" r"OST_Route.*" r"OST_.+Devices|OST_.+Device(Tags)|OST_.+Templates?" ] excludes=[r"OST_.+Axis" r"OST_.+Template.*" r"OST_.+Definition.*" r"OST_.+Material" ] cgroups=[] ) CGROUP(name="Plumbing" exclusives=[] includes=[r"OST_Pipe.*" r"OST_Fluid.*" r"OST_Fixture.*" r"OST_PlumbingFixture.*" r"OST_Piping.*" r"OST_Sprinkler.*" ] excludes=[r"OST_.+Reference.*" r"OST_.+Material" ] cgroups=[] ) ] ) CGROUP(name="Drafting" exclusives=[] includes=[] excludes=[] cgroups=[CGROUP(name="Views" exclusives=[] includes=[r"OST_.*Annotation.*" "OST_Views" "OST_PlanRegion" r"OST_Schedule.*" r"OST_Camera.*" r"OST_Crop.*" r"OST_Compass.*" r"OST_Section.*" r"OST_Sun.*" r"OST_RenderRegions" ] excludes=[r"OST_.+ViewParamGroup" ] cgroups=[] ) CGROUP(name="Sheets" exclusives=[] includes=[r"OST_Sheet.*" r"OST_Viewport.*" r"OST_Title.*" r"OST_Guide.*" r"OST_Revisions.*" ] excludes=[] cgroups=[] ) CGROUP(name="Tags" exclusives=[r"OST_Tag.*" r"OST_.+Tags" r"OST_.+Labels"] includes=[] excludes=[] cgroups=[] ) CGROUP(name="Annotation" exclusives=[r"OST_.+DownArrow.*" r"OST_.+DownText.*" r"OST_.+UpArrow.*" r"OST_.+UpText.*" r"OST_.+Annotation.*" r"OST_Callout.*" r"OST_Spot.*" r"OST_Cloud.*" r"OST_Elev.*" r"OST_Repeating.*" "OST_BrokenSectionLine" r"OST_Legend.*" r"OST_Detail.*" "OST_InvisibleLines" "OST_DemolishedLines" "OST_InsulationLines" "OST_FillPatterns" "OST_FilledRegion" "OST_HiddenLines" r"OST_Center.*" r"OST_Keynote.*" r"OST_Matchline.*" r"OST_Model.*" r"OST_.+Text.*" r"OST_.+Overhead.*" r"OST_Curve.*" r"OST_Dim.*" r"OST_Dimension.*" r"OST_Masking.*" r"OST_.+Tag.*" r"OST_.+Label.*" r"OST_.+Symbol.*" r"OST_.+TickMark.*" "OST_RevisionClouds" ] includes=[] excludes=[r"OST_DimLock.+" r"OST_IOS.+" r"OST_.+Symbology" ] cgroups=[] ) ] ) CGROUP(name="Containers" exclusives=[] includes=[r"OST_Part.*" r"OST_Assemblies.*" r"OST_Group.*" r"OST_.+Groups" ] excludes=[] cgroups=[] ) CGROUP(name="Links" exclusives=["OST_RvtLinks" "OST_TopographyLink" r"OST_Coordination.*" r"OST_PointCloud.*" r"OST_Raster.*" ] includes=[] excludes=[] cgroups=[] ) CGROUP(name="Analysis" exclusives=[r"OST_.*Analy.*"] includes=[] excludes=[r"OST_AnalysisResults"] cgroups=[CGROUP(name="Paths" exclusives=[r"OST_Path.*"] includes=[] excludes=[] cgroups=[] ) ] ) CGROUP(name="Rendering" exclusives=[] includes=[r"OST_Entourage.*" ] excludes=[] cgroups=[CGROUP(name="Materials" exclusives=[r"OST_Material.*" r"OST_Appearance.*" r"OST_Decal.*" r"OST_Planting.*" ] includes=[] excludes=[] cgroups=[] )] ) ]<line_sep># =============================================================================
<def_stmt>expand_exclusives cgroup:CGROUP used_bics:Set[str] remaining_bics:Set[str]<block_start>"""Apply the exclusive filters and expand to builtin category names"""<line_sep>exclusives=set()<line_sep>excludes=set()<line_sep>local_bics=remaining_bics.copy()<for_stmt>bic local_bics<block_start><for_stmt>excluspat cgroup.exclusives<block_start><if_stmt>re.match(excluspat bic)<block_start><if_stmt>bic<in>used_bics<block_start><raise>Exception(f'Exclusive conflict in "{cgroup.name}" @ "{excluspat}"')<block_end>exclusives.add(bic)<block_end><block_end><block_end>filtered_exclusives=exclusives.copy()<for_stmt>exclusitem exclusives<block_start><for_stmt>excpat cgroup.excludes<block_start><if_stmt>re.match(excpat exclusitem)<block_start>excludes.add(exclusitem)<block_end><block_end><block_end>filtered_exclusives.difference_update(excludes)<line_sep>used_bics.update(filtered_exclusives)<line_sep>remaining_bics.difference_update(used_bics)<line_sep>sub_components=[]<for_stmt>sub_cgroup cgroup.cgroups<block_start>sub_components.append(expand_exclusives(sub_cgroup used_bics remaining_bics))<block_end>cgroup.exclusives=filtered_exclusives<block_end><def_stmt>expand_includes cgroup:CGROUP used_bics:Set[str] remaining_bics:Set[str]<block_start>"""Apply the include filters and expand to builtin category names"""<line_sep>includes=set()<line_sep>excludes=set()<line_sep>local_bics=remaining_bics.copy()<for_stmt>bic local_bics<block_start><for_stmt>incpat cgroup.includes<block_start><if_stmt>re.match(incpat bic)<block_start>includes.add(bic)<block_end><block_end><block_end>filtered_includes=includes.copy()<for_stmt>incitem includes<block_start><for_stmt>excpat cgroup.excludes<block_start><if_stmt>re.match(excpat incitem)<block_start>excludes.add(incitem)<block_end><block_end><block_end>filtered_includes.difference_update(excludes)<line_sep>used_bics.update(filtered_includes)<line_sep>sub_components=[]<for_stmt>sub_cgroup cgroup.cgroups<block_start>sub_components.append(expand_includes(sub_cgroup used_bics remaining_bics))<block_end>cgroup.includes=filtered_includes<block_end><def_stmt>filter_cgroup cgroup:CGROUP name:str<block_start>"""Find a cgroup in tree by name"""<if_stmt>cgroup.name<eq>name<block_start><return>cgroup<block_end><for_stmt>scgroup cgroup.cgroups<block_start><if_stmt>mcg:=filter_cgroup(scgroup name)<block_start><return>mcg<block_end><block_end><block_end><def_stmt>create_ccomp cgroup:CGROUP<arrow>CategoryComp<block_start>"""Create component data from expanded cgroup"""<line_sep>root_categories=cgroup.exclusives<line_sep>root_categories.update(cgroup.includes)<line_sep>sub_components=[]<for_stmt>sub_cgroup cgroup.cgroups<block_start>sub_components.append(create_ccomp(sub_cgroup))<block_end>sub_categories={}<for_stmt>sub_comp sub_components<block_start>sub_categories[sub_comp.name]=sub_comp.categories<line_sep>all_sub_bips=[]<for_stmt>sub_bips sub_comp.categories.values()<block_start>all_sub_bips.extend(sub_bips)<block_end>root_categories=root_categories.difference(all_sub_bips)<block_end>categories={"_":sorted(list(root_categories))}<line_sep>categories.update(sub_categories)<line_sep><return>CategoryComp(name=cgroup.name categories=categories)<block_end><def_stmt>create_ccomp_collection version:str builtin_category_names:List[str]<arrow>CategoryCompCollection<block_start>"""Create component collection from list of builtin category names"""<line_sep>remaining_bics=builtin_category_names.copy()<line_sep>used_bics:Set[str]=set()<for_stmt>cgroup CGROUPS<block_start>expand_exclusives(cgroup used_bics remaining_bics)<block_end><for_stmt>cgroup CGROUPS<block_start>expand_includes(cgroup used_bics remaining_bics)<block_end>all_comps:List[CategoryComp]=[]<if_stmt>len(sys.argv)<g>1<block_start>matching_cgroup=<none><for_stmt>cgroup CGROUPS<block_start>matching_cgroup=filter_cgroup(cgroup name=sys.argv[1])<if_stmt>matching_cgroup<block_start>all_comps.append(create_ccomp(matching_cgroup))<block_end><block_end><block_end><else_stmt><block_start><for_stmt>cgroup CGROUPS<block_start><if_stmt><not>cgroup.hidden<block_start>all_comps.append(create_ccomp(cgroup))<block_end><block_end><block_end><return>CategoryCompCollection(version=version bics=builtin_category_names components=all_comps used_bics=used_bics )<block_end><def_stmt>load_bics data_file:str<block_start>"""Load builtin category names from file"""<line_sep>bics_data:Set[str]=set()<with_stmt>open(data_file "r")<as>bicfile<block_start>bics_data.update([x.strip()<for>x bicfile.readlines()])<block_end><return>bics_data<block_end><def_stmt>dump_bics data_file:str ccomps_col:CategoryCompCollection<block_start>"""Dump component collection data into file"""<with_stmt>open(data_file "w")<as>datafile<block_start>json.dump(ccomps_col datafile indent=2 default=<lambda>x:x.__dict__ )<block_end><block_end><for_stmt>entry os.listdir(DATA_DIR)<block_start><if_stmt>entry.endswith(".txt")<block_start>bic_file=op.join(DATA_DIR entry)<line_sep>dafa_filename=op.splitext(op.basename(bic_file))[0]<line_sep>bic_file_version=dafa_filename.split("_")[1]<line_sep>bic_names=load_bics(bic_file)<line_sep>ccomp_collection=create_ccomp_collection(bic_file_version bic_names)<line_sep>json_file=op.join(DATA_DIR dafa_filename+".json")<line_sep>dump_bics(json_file ccomp_collection)<block_end><block_end> |
<if_stmt>__name__<eq>'__main__'<and>__package__<is><none><block_start><import_stmt>sys<import_from_stmt>os path<line_sep>sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))<block_end><import_stmt>os<import_stmt>torch<import_from_stmt>utils.model_serialization strip_prefix_if_present<import_from_stmt>utils zipreader<import_stmt>argparse<import_from_stmt>tqdm tqdm<import_stmt>pickle<import_stmt>cv2<import_stmt>numpy<as>np<line_sep>parser=argparse.ArgumentParser(description="PyTorch Keypoints Training")<line_sep>parser.add_argument("--src" default="~/datasets" help="source model" type=str )<line_sep>parser.add_argument("--dst" default="~/local/datasets/h36m/undistortedimages" help="dst model" type=str )<line_sep>parser.add_argument("--anno" default="~/datasets/h36m/annot/h36m_validation.pkl" type=str )<line_sep>args=parser.parse_args()<line_sep>src=os.path.expanduser(args.src)<line_sep>dst=os.path.expanduser(args.dst)<with_stmt>open(os.path.expanduser(args.anno) 'rb')<as>f<block_start>data=pickle.load(f)<block_end><for_stmt>db_rec tqdm(data)<block_start>path=db_rec['image']<line_sep>image_dir='images.zip@'<line_sep>image_file=os.path.join(src db_rec['source'] image_dir 'images' db_rec['image'])<line_sep>output_path=os.path.join(dst path)<if_stmt>os.path.exists(output_path)<block_start><continue><block_end>output_dir=os.path.dirname(output_path)<line_sep>os.makedirs(output_dir exist_ok=<true>)<line_sep>data_numpy=zipreader.imread(image_file cv2.IMREAD_COLOR|cv2.IMREAD_IGNORE_ORIENTATION)<line_sep>camera=db_rec['camera']<line_sep>K=np.array([[float(camera['fx']) 0 float(camera['cx'])] [0 float(camera['fy']) float(camera['cy'])] [0 0 1.] ])<line_sep>distCoeffs=np.array([float(i)<for>i [camera['k'][0] camera['k'][1] camera['p'][0] camera['p'][1] camera['k'][2]]])<line_sep>data_numpy=cv2.undistort(data_numpy K distCoeffs)<line_sep>#cv2.imwrite(output_path, data_numpy, [int(cv2.IMWRITE_JPEG_QUALITY), 100])
#cv2.imwrite(output_path, data_numpy)
cv2.imwrite(output_path data_numpy [int(cv2.IMWRITE_JPEG_QUALITY) 90])<block_end> |
<import_stmt>torch<def_stmt>squared_euclidean_distance a b<block_start>b=torch.transpose(b 0 1)<line_sep>a2=torch.sum(torch.square(a) dim=1 keepdims=<true>)<line_sep>b2=torch.sum(torch.square(b) dim=0 keepdims=<true>)<line_sep>ab=torch.matmul(a b)<line_sep>d=a2-2<times>ab+b2<line_sep><return>d<block_end><def_stmt>quantize x centroids<block_start>b,c,h,w=x.shape<line_sep># [B, C, H, W] => [B, H, W, C]
x=x.permute(0 2 3 1).contiguous()<line_sep>x=x.view(-1 c)# flatten to pixels
d=squared_euclidean_distance(x centroids)<line_sep>x=torch.argmin(d 1)<line_sep>x=x.view(b h w)<line_sep><return>x<block_end><def_stmt>unquantize x centroids<block_start><return>centroids[x]<block_end> |
"""Tests of zero_x.middlewares."""<line_sep> |
# Generated by Django 3.0.5 on 2020-04-15 07:32
<import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('plans' '0004_create_user_plans') ]<line_sep>operations=[migrations.AddField(model_name='planpricing' name='has_automatic_renewal' field=models.BooleanField(default=<false> help_text='Use automatic renewal if possible?' verbose_name='has automatic renewal') ) migrations.AlterField(model_name='plan' name='order' field=models.PositiveIntegerField(db_index=<true> editable=<false> verbose_name='order') ) migrations.AlterField(model_name='quota' name='order' field=models.PositiveIntegerField(db_index=<true> editable=<false> verbose_name='order') ) migrations.CreateModel(name='RecurringUserPlan' fields=[('id' models.AutoField(auto_created=<true> primary_key=<true> serialize=<false> verbose_name='ID')) ('token' models.CharField(blank=<true> default=<none> help_text='Token, that will be used for payment renewal. Depends on used payment provider' max_length=255 null=<true> verbose_name='recurring token')) ('payment_provider' models.CharField(blank=<true> default=<none> help_text='Provider, that will be used for payment renewal' max_length=255 null=<true> verbose_name='payment provider')) ('amount' models.DecimalField(blank=<true> db_index=<true> decimal_places=2 max_digits=7 null=<true> verbose_name='amount')) ('tax' models.DecimalField(blank=<true> db_index=<true> decimal_places=2 max_digits=4 null=<true> verbose_name='tax')) ('currency' models.CharField(max_length=3 verbose_name='currency')) ('has_automatic_renewal' models.BooleanField(default=<false> help_text='Automatic renewal is enabled for associated plan. If False, the plan renewal can be still initiated by user.' verbose_name='has automatic plan renewal')) ('card_expire_year' models.IntegerField(blank=<true> null=<true>)) ('card_expire_month' models.IntegerField(blank=<true> null=<true>)) ('pricing' models.ForeignKey(blank=<true> default=<none> help_text='Recurring pricing' null=<true> on_delete=django.db.models.deletion.CASCADE to='plans.Pricing')) ('user_plan' models.OneToOneField(on_delete=django.db.models.deletion.CASCADE related_name='recurring' to='plans.UserPlan')) ] ) ]<block_end> |
# terrascript/resource/terraform-provider-graylog/graylog.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:17:31 UTC)
<import_stmt>terrascript<class_stmt>graylog_alarm_callback(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_alert_condition(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_dashboard(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_dashboard_widget(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_dashboard_widget_positions(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_event_definition(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_event_notification(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_extractor(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_grok_pattern(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_index_set(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_input(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_input_static_fields(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_ldap_setting(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_output(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_pipeline(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_pipeline_connection(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_pipeline_rule(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_role(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_sidecar_collector(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_sidecar_configuration(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_sidecars(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_stream(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_stream_output(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_stream_rule(terrascript.Resource)<block_start><pass><block_end><class_stmt>graylog_user(terrascript.Resource)<block_start><pass><block_end>__all__=["graylog_alarm_callback" "graylog_alert_condition" "graylog_dashboard" "graylog_dashboard_widget" "graylog_dashboard_widget_positions" "graylog_event_definition" "graylog_event_notification" "graylog_extractor" "graylog_grok_pattern" "graylog_index_set" "graylog_input" "graylog_input_static_fields" "graylog_ldap_setting" "graylog_output" "graylog_pipeline" "graylog_pipeline_connection" "graylog_pipeline_rule" "graylog_role" "graylog_sidecar_collector" "graylog_sidecar_configuration" "graylog_sidecars" "graylog_stream" "graylog_stream_output" "graylog_stream_rule" "graylog_user" ]<line_sep> |
<import_from_future_stmt> print_function<import_from_future_stmt> division<import_stmt>os<import_stmt>sys<import_stmt>time<import_stmt>datetime<import_stmt>os.path<as>osp<import_stmt>numpy<as>np<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_stmt>torch.backends.cudnn<as>cudnn<import_from_stmt>torch.optim lr_scheduler<import_from_stmt>args argument_parser image_dataset_kwargs optimizer_kwargs<import_from_stmt>torchreid.data_manager ImageDataManager<import_from_stmt>torchreid models<import_from_stmt>torchreid.losses CrossEntropyLoss DeepSupervision<import_from_stmt>torchreid.utils.iotools save_checkpoint check_isfile<import_from_stmt>torchreid.utils.avgmeter AverageMeter<import_from_stmt>torchreid.utils.loggers Logger RankLogger<import_from_stmt>torchreid.utils.torchtools count_num_param open_all_layers open_specified_layers<import_from_stmt>torchreid.utils.reidtools visualize_ranked_results<import_from_stmt>torchreid.eval_metrics evaluate<import_from_stmt>torchreid.optimizers init_optimizer<import_from_stmt>torchreid.regularizers get_regularizer<import_from_stmt>torchreid.losses.wrapped_cross_entropy_loss WrappedCrossEntropyLoss<import_from_stmt>torchreid.models.tricks.dropout DropoutOptimizer<import_stmt>logging<line_sep>logging.basicConfig(level=os.environ.get('LOGLEVEL' 'CRITICAL'))<line_sep># global variables
parser=argument_parser()<line_sep>args=parser.parse_args()<line_sep>dropout_optimizer=DropoutOptimizer(args)<line_sep>os.environ['TORCH_HOME']=os.path.abspath(os.path.join(os.path.dirname(__file__) '..' '.torch'))<def_stmt>accuracy output target topk=(1 )<block_start>"""Computes the accuracy over the k top predictions for
the specified values of k.
Args:
output (torch.Tensor): prediction matrix with shape (batch_size, num_classes).
target (torch.LongTensor): ground truth labels with shape (batch_size).
topk (tuple, optional): accuracy at top-k will be computed. For example,
topk=(1, 5) means accuracy at top-1 and top-5 will be computed.
Returns:
list: accuracy at top-k.
Examples::
>>> from torchreid import metrics
>>> metrics.accuracy(output, target)
"""<line_sep>maxk=max(topk)<line_sep>batch_size=target.size(0)<if_stmt>isinstance(output (tuple list))<block_start>output=output[0]<block_end>_,pred=output.topk(maxk 1 <true> <true>)<line_sep>pred=pred.t()<line_sep>correct=pred.eq(target.view(1 -1).expand_as(pred))<line_sep>res=[]<for_stmt>k topk<block_start>correct_k=correct[:k].view(-1).float().sum(0 keepdim=<true>)<line_sep>acc=correct_k.mul_(100.0/batch_size)<line_sep>res.append(acc)<block_end><return>res<block_end><def_stmt>get_criterions num_classes:int use_gpu:bool args<arrow>('criterion' 'fix_criterion' 'switch_criterion')<block_start><import_from_stmt>torchreid.losses.wrapped_triplet_loss WrappedTripletLoss<import_from_stmt>torchreid.regularizers.param_controller HtriParamController<line_sep>htri_param_controller=HtriParamController()<if_stmt>'htri'<in>args.criterion<block_start>fix_criterion=WrappedTripletLoss(num_classes use_gpu args htri_param_controller)<line_sep>switch_criterion=WrappedTripletLoss(num_classes use_gpu args htri_param_controller)<block_end><else_stmt><block_start>fix_criterion=WrappedCrossEntropyLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth)<line_sep>switch_criterion=WrappedCrossEntropyLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth)<block_end><if_stmt>args.criterion<eq>'xent'<block_start>criterion=WrappedCrossEntropyLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth)<block_end><elif_stmt>args.criterion<eq>'spectral'<block_start><import_from_stmt>torchreid.losses.spectral_loss SpectralLoss<line_sep>criterion=SpectralLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth penalty_position=args.penalty_position)<block_end><elif_stmt>args.criterion<eq>'batch_spectral'<block_start><import_from_stmt>torchreid.losses.batch_spectral_loss BatchSpectralLoss<line_sep>criterion=BatchSpectralLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth)<block_end><elif_stmt>args.criterion<eq>'lowrank'<block_start><import_from_stmt>torchreid.losses.lowrank_loss LowRankLoss<line_sep>criterion=LowRankLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth)<block_end><elif_stmt>args.criterion<eq>'singular'<block_start><import_from_stmt>torchreid.losses.singular_loss SingularLoss<line_sep>criterion=SingularLoss(num_classes=num_classes use_gpu=use_gpu label_smooth=args.label_smooth penalty_position=args.penalty_position)<block_end><elif_stmt>args.criterion<eq>'htri'<block_start>criterion=WrappedTripletLoss(num_classes=num_classes use_gpu=use_gpu args=args param_controller=htri_param_controller)<block_end><elif_stmt>args.criterion<eq>'singular_htri'<block_start><import_from_stmt>torchreid.losses.singular_triplet_loss SingularTripletLoss<line_sep>criterion=SingularTripletLoss(num_classes use_gpu args htri_param_controller)<block_end><elif_stmt>args.criterion<eq>'incidence'<block_start><import_from_stmt>torchreid.losses.incidence_loss IncidenceLoss<line_sep>criterion=IncidenceLoss()<block_end><elif_stmt>args.criterion<eq>'incidence_xent'<block_start><import_from_stmt>torchreid.losses.incidence_xent_loss IncidenceXentLoss<line_sep>criterion=IncidenceXentLoss(num_classes use_gpu args.label_smooth)<block_end><else_stmt><block_start><raise>RuntimeError('Unknown criterion {!r}'.format(criterion))<block_end><if_stmt>args.fix_custom_loss<block_start>fix_criterion=criterion<block_end><if_stmt>args.switch_loss<l>0<block_start>criterion,switch_criterion=switch_criterion criterion<block_end><return>criterion fix_criterion switch_criterion htri_param_controller<block_end><def_stmt>main <block_start><global>args dropout_optimizer<line_sep>torch.manual_seed(args.seed)<if_stmt><not>args.use_avai_gpus<block_start>os.environ['CUDA_VISIBLE_DEVICES']=args.gpu_devices<block_end>use_gpu=torch.cuda.is_available()<if_stmt>args.use_cpu<block_start>use_gpu=<false><block_end>log_name='log_test.txt'<if>args.evaluate<else>'log_train.txt'<line_sep>sys.stderr=sys.stdout=Logger(osp.join(args.save_dir log_name))<line_sep>print("==========\nArgs:{}\n==========".format(args))<if_stmt>use_gpu<block_start>print("Currently using GPU {}".format(args.gpu_devices))<line_sep>cudnn.benchmark=<true><line_sep>torch.cuda.manual_seed_all(args.seed)<block_end><else_stmt><block_start>print("Currently using CPU, however, GPU is highly recommended")<block_end>print("Initializing image data manager")<line_sep>dm=ImageDataManager(use_gpu **image_dataset_kwargs(args))<line_sep>trainloader,testloader_dict=dm.return_dataloaders()<line_sep>print("Initializing model: {}".format(args.arch))<line_sep>model=models.init_model(name=args.arch num_classes=dm.num_train_pids loss={'xent'} use_gpu=use_gpu dropout_optimizer=dropout_optimizer)<line_sep>print(model)<line_sep>print("Model size: {:.3f} M".format(count_num_param(model)))<line_sep># criterion = WrappedCrossEntropyLoss(num_classes=dm.num_train_pids, use_gpu=use_gpu, label_smooth=args.label_smooth)
criterion,fix_criterion,switch_criterion,htri_param_controller=get_criterions(dm.num_train_pids use_gpu args)<line_sep>regularizer,reg_param_controller=get_regularizer(args.regularizer)<line_sep>optimizer=init_optimizer(model.parameters() **optimizer_kwargs(args))<line_sep>scheduler=lr_scheduler.MultiStepLR(optimizer milestones=args.stepsize gamma=args.gamma)<if_stmt>args.load_weights<and>check_isfile(args.load_weights)# load pretrained weights but ignore layers that don't match in size
<block_start><try_stmt><block_start>checkpoint=torch.load(args.load_weights)<block_end><except_stmt>Exception<as>e<block_start>print(e)<line_sep>checkpoint=torch.load(args.load_weights map_location={'cuda:0':'cpu'})<block_end># dropout_optimizer.set_p(checkpoint.get('dropout_p', 0))
# print(list(checkpoint.keys()), checkpoint['dropout_p'])
pretrain_dict=checkpoint['state_dict']<line_sep>model_dict=model.state_dict()<line_sep>pretrain_dict={k:v<for>k,v pretrain_dict.items()<if>k<in>model_dict<and>model_dict[k].size()<eq>v.size()}<line_sep>model_dict.update(pretrain_dict)<line_sep>model.load_state_dict(model_dict)<line_sep>print("Loaded pretrained weights from '{}'".format(args.load_weights))<block_end><if_stmt>args.resume<and>check_isfile(args.resume)<block_start>checkpoint=torch.load(args.resume)<line_sep>state=model.state_dict()<line_sep>state.update(checkpoint['state_dict'])<line_sep>model.load_state_dict(state)<line_sep># args.start_epoch = checkpoint['epoch'] + 1
print("Loaded checkpoint from '{}'".format(args.resume))<line_sep>print("- start_epoch: {}\n- rank1: {}".format(args.start_epoch checkpoint['rank1']))<block_end><if_stmt>use_gpu<block_start>model=nn.DataParallel(model device_ids=list(range(len(args.gpu_devices.split(','))))).cuda()<block_end>extract_train_info(model trainloader)<block_end><def_stmt>extract_train_info model trainloader<block_start>model.eval()<line_sep>os.environ['fake']='1'<line_sep>accs=[AverageMeter()<for>_ range(3)]<with_stmt>torch.no_grad()<block_start><for_stmt>imgs,pids,_,paths trainloader<block_start>xent_features=model(imgs.cuda())[1]<for_stmt>i,xent_feature enumerate(xent_features)<block_start>accs[i].update(accuracy(xent_feature pids.cuda())[0].item() pids.size(0) )<block_end><block_end><block_end><with_stmt>open(args.load_weights+'.acc' 'w')<as>f<block_start>print(*(acc.avg<for>acc accs) file=f)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end> |
<import_stmt>sys<import_from_stmt>typing NoReturn Optional Type<import_from_stmt>traceback_with_variables.print print_exc Format<def_stmt>global_print_exc fmt:Optional[Format]=<none><arrow>NoReturn<block_start>sys.excepthook=<lambda>e_cls e tb:print_exc(e=e fmt=fmt)<block_end><def_stmt>global_print_exc_in_ipython fmt:Optional[Format]=<none><arrow>NoReturn<block_start><try_stmt><block_start><import_stmt>IPython<block_end><except_stmt>ModuleNotFoundError<block_start><raise>ValueError("IPython not found")<block_end>IPython.core.interactiveshell.InteractiveShell.showtraceback=<lambda>self *args **kwargs:print_exc(num_skipped_frames=1 fmt=fmt)<block_end><def_stmt>is_ipython_global name:str type_:Type filename:str is_global:bool<arrow>bool<block_start><return>is_global<and>(name<in>['In' 'Out' 'get_ipython' 'exit' 'quit']<or>name.startswith('_'))<block_end> |
<import_from_stmt>backend.magic Bundle<import_from_stmt>.macro macro<import_from_stmt>.model_admin ModelAdmin<line_sep>admin_bundle=Bundle(__name__)<line_sep> |
<import_stmt>time<import_from_stmt>umqtt.robust MQTTClient<def_stmt>sub_cb topic msg<block_start>print((topic msg))<block_end>c=MQTTClient("umqtt_client" "localhost")<line_sep># Print diagnostic messages when retries/reconnects happens
c.DEBUG=<true><line_sep>c.set_callback(sub_cb)<line_sep># Connect to server, requesting not to clean session for this
# client. If there was no existing session (False return value
# from connect() method), we perform the initial setup of client
# session - subscribe to needed topics. Afterwards, these
# subscriptions will be stored server-side, and will be persistent,
# (as we use clean_session=False).
#
# There can be a problem when a session for a given client exists,
# but doesn't have subscriptions a particular application expects.
# In this case, a session needs to be cleaned first. See
# example_reset_session.py for an obvious way how to do that.
#
# In an actual application, it's up to its developer how to
# manage these issues. One extreme is to have external "provisioning"
# phase, where initial session setup, and any further management of
# a session, is done by external tools. This allows to save resources
# on a small embedded device. Another extreme is to have an application
# to perform auto-setup (e.g., clean session, then re-create session
# on each restart). This example shows mid-line between these 2
# approaches, where initial setup of session is done by application,
# but if anything goes wrong, there's an external tool to clean session.
<if_stmt><not>c.connect(clean_session=<false>)<block_start>print("New session being set up")<line_sep>c.subscribe(b"foo_topic")<block_end><while_stmt>1<block_start>c.wait_msg()<block_end>c.disconnect()<line_sep> |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
<import_stmt>multiprocessing<import_from_stmt>multiprocessing cpu_count<import_stmt>math<class_stmt>ProcessorsScheduler(object)<block_start>process_num=cpu_count()<def_stmt>__init__ self cpu_num_workers=<none><block_start><if_stmt>cpu_num_workers<ne><none><and>cpu_num_workers<g>0<block_start>self.process_num=cpu_num_workers<block_end><block_end><def_stmt>run_data_parallel self func func_args<block_start>data,rest_args=func_args[0] func_args[1:]<line_sep>res=[]<line_sep># logging.info("multiprocess enabled, process num: %d" % (self.process_num))
process_p=multiprocessing.Pool(self.process_num)<line_sep>data_length=len(data)<line_sep>size=math.ceil(data_length/self.process_num)<for_stmt>i range(self.process_num)<block_start>start=size<times>i<line_sep>end=(i+1)<times>size<if>(i+1)<times>size<l>data_length<else>data_length<line_sep>args=(data[start:end] )+rest_args<line_sep>res.append((i process_p.apply_async(func args=args)))<block_end>process_p.close()<line_sep>process_p.join()<line_sep>res=sorted(res key=<lambda>x:x[0])<line_sep><return>res<block_end><block_end> |
<import_stmt>sys<import_stmt>asyncio<import_stmt>zmq<import_stmt>zmq.asyncio<import_from_stmt>zmq.auth Authenticator<import_from_stmt>zmq.auth.thread _inherit_docstrings ThreadAuthenticator AuthenticationThread<line_sep># Copying code from zqm classes since no way to inject these dependencies
<class_stmt>MultiZapAuthenticator(Authenticator)<block_start>"""
`Authenticator` supports only one ZAP socket in a single process, this lets
you have multiple ZAP sockets
"""<line_sep>count=0<def_stmt>__init__ self context=<none> encoding='utf-8' log=<none><block_start>MultiZapAuthenticator.count<augadd>1<line_sep>super().__init__(context=context encoding=encoding log=log)<block_end><def_stmt>start self<block_start>"""Create and bind the ZAP socket"""<line_sep>self.zap_socket=self.context.socket(zmq.REP)<line_sep>self.zap_socket.linger=1<line_sep>zapLoc='inproc://zeromq.zap.{}'.format(MultiZapAuthenticator.count)<line_sep>self.zap_socket.bind(zapLoc)<line_sep>self.log.debug('Starting ZAP at {}'.format(zapLoc))<block_end><def_stmt>stop self<block_start>"""Close the ZAP socket"""<if_stmt>self.zap_socket<block_start>self.log.debug('Stopping ZAP at {}'.format(self.zap_socket.LAST_ENDPOINT))<line_sep>super().stop()<block_end><block_end><block_end>@_inherit_docstrings<class_stmt>ThreadMultiZapAuthenticator(ThreadAuthenticator)<block_start><def_stmt>start self<block_start>"""Start the authentication thread"""<line_sep># create a socket to communicate with auth thread.
self.pipe=self.context.socket(zmq.PAIR)<line_sep>self.pipe.linger=1<line_sep>self.pipe.bind(self.pipe_endpoint)<line_sep>authenticator=MultiZapAuthenticator(self.context encoding=self.encoding log=self.log)<line_sep>self.thread=AuthenticationThread(self.context self.pipe_endpoint encoding=self.encoding log=self.log authenticator=authenticator)<line_sep>self.thread.start()<line_sep># Event.wait:Changed in version 2.7: Previously, the method always returned None.
<if_stmt>sys.version_info<l>(2 7)<block_start>self.thread.started.wait(timeout=10)<block_end><else_stmt><block_start><if_stmt><not>self.thread.started.wait(timeout=10)<block_start><raise>RuntimeError("Authenticator thread failed to start")<block_end><block_end><block_end><block_end><class_stmt>AsyncioAuthenticator(MultiZapAuthenticator)<block_start>"""ZAP authentication for use in the asyncio IO loop"""<def_stmt>__init__ self context=<none> loop=<none><block_start>super().__init__(context)<line_sep>self.loop=loop<or>asyncio.get_event_loop()<line_sep>self.__poller=<none><line_sep>self.__task=<none><block_end># TODO: Remove this commented method later
# @asyncio.coroutine
# def __handle_zap(self):
# while True:
# events = yield from self.__poller.poll()
# if self.zap_socket in dict(events):
# msg = yield from self.zap_socket.recv_multipart()
# self.handle_zap_message(msg)
<async_keyword><def_stmt>__handle_zap self<block_start><while_stmt><true><block_start>events=<await>self.__poller.poll()<if_stmt>self.zap_socket<in>dict(events)<block_start>msg=<await>self.zap_socket.recv_multipart()<line_sep>self.handle_zap_message(msg)<block_end><block_end><block_end><def_stmt>start self<block_start>"""Start ZAP authentication"""<line_sep>super().start()<line_sep>self.__poller=zmq.asyncio.Poller()<line_sep>self.__poller.register(self.zap_socket zmq.POLLIN)<line_sep>self.__task=asyncio.ensure_future(self.__handle_zap())<block_end><def_stmt>stop self<block_start>"""Stop ZAP authentication"""<if_stmt>self.__task<block_start>self.__task.cancel()<block_end><if_stmt>self.__poller<block_start>self.__poller.unregister(self.zap_socket)<line_sep>self.__poller=<none><block_end>super().stop()<block_end><block_end> |
<import_stmt>numpy<as>np<import_stmt>unittest<import_stmt>pytest<import_from_stmt>pysph.base.particle_array ParticleArray<import_stmt>pysph.tools.mesh_tools<as>G<import_from_stmt>pysph.base.utils get_particle_array<line_sep># Data of a unit length cube
<def_stmt>cube_data <block_start>points=np.array([[0. 0. 0.] [0. 1. 0.] [1. 1. 0.] [1. 0. 0.] [0. 0. 1.] [0. 1. 1.] [1. 0. 1.] [1. 1. 1.]])<line_sep>x_cube,y_cube,z_cube=points.T<line_sep>cells=np.array([[0 1 2] [0 2 3] [0 4 5] [0 5 1] [0 3 6] [0 6 4] [4 6 7] [4 7 5] [3 2 7] [3 7 6] [1 5 7] [1 7 2]])<line_sep>normals=np.array([[0. 0. -1.] [0. 0. -1.] [-1. 0. 0.] [-1. 0. 0.] [0. -1. 0.] [0. -1. 0.] [0. 0. 1.] [0. 0. 1.] [1. 0. 0.] [1. 0. 0.] [0. 1. 0.] [0. 1. 0.]])<line_sep>vectors=np.zeros((len(cells) 3 3))<for_stmt>i,cell enumerate(cells)<block_start>idx1,idx2,idx3=cell<line_sep>vector=np.array([[x_cube[idx1] y_cube[idx1] z_cube[idx1]] [x_cube[idx2] y_cube[idx2] z_cube[idx2]] [x_cube[idx3] y_cube[idx3] z_cube[idx3]]])<line_sep>vectors[i]=vector<block_end><return>x_cube y_cube z_cube cells normals vectors<block_end><class_stmt>TestGeometry(unittest.TestCase)<block_start><def_stmt>test_in_triangle self<block_start><assert_stmt>(G._in_triangle(0.5 0.5 0.0 0.0 1.5 0.0 0.0 1.5)<is><true>)<assert_stmt>(G._in_triangle(1.0 1.0 0.0 0.0 1.0 0.0 0.0 1.0)<is><false>)<block_end><def_stmt>test_interp_2d self# Check interpolation between two points on line y=x
<block_start>dx=0.1<line_sep>r=G._interp_2d(np.array([0. 0.]) np.array([1. 1.]) dx)<line_sep># Check if all points satisfy y=x
np.testing.assert_array_almost_equal(r[: 0]-r[: 1] np.zeros(r.shape[0]))<line_sep># Check if distance between consecutive points is lesser than dx
np.testing.assert_array_less(np.linalg.norm(r[1:]-r[0:-1] axis=1) np.ones(r.shape[0]-1)<times>dx)<block_end><def_stmt>test_fill_triangle self<block_start>triangle=np.array([[0. 0. 0.] [1. 0. 0.] [0. 1. 0.]])<line_sep>dx_triangle=0.1<line_sep>x,y,z=G._fill_triangle(triangle dx_triangle)<line_sep>EPS=np.finfo(float).eps<line_sep>np.testing.assert_array_less(-x np.zeros(x.shape[0])+EPS)<line_sep>np.testing.assert_array_less(-y np.zeros(x.shape[0])+EPS)<line_sep>np.testing.assert_array_less(-(x+y) np.ones(x.shape[0])+EPS)<line_sep>np.testing.assert_almost_equal(z np.zeros(x.shape[0]))<block_end><def_stmt>test_fill_triangle_throws_zero_area_triangle_exception self<block_start>self.assertRaises(G.ZeroAreaTriangleException G._fill_triangle np.zeros((3 3)) 0.5)<block_end><def_stmt>test_fill_triangle_throws_polygon_mesh_error self<block_start>self.assertRaises(G.PolygonMeshError G._fill_triangle np.zeros((4 3)) 0.5)<block_end><def_stmt>test_get_points_from_mgrid self<block_start>"""Find neighbouring particles around a unit cube"""<line_sep>h=0.1<line_sep>x_cube,y_cube,z_cube,cells,normals,vectors=cube_data()<line_sep>x,y,z,x_list,y_list,z_list,vectors=G._get_surface_mesh(x_cube y_cube z_cube cells h uniform=<true>)<line_sep>pa_mesh=ParticleArray(name='mesh' x=x y=y z=z h=h)<line_sep>offset=h<line_sep>x_grid,y_grid,z_grid=np.meshgrid(np.arange(x.min()-offset x.max()+offset h) np.arange(y.min()-offset y.max()+offset h) np.arange(z.min()-offset z.max()+offset h))<line_sep>pa_grid=ParticleArray(name='grid' x=x_grid y=y_grid z=z_grid h=h)<line_sep>x_grid,y_grid,z_grid=G.get_points_from_mgrid(pa_grid pa_mesh x_list y_list z_list 1 h vectors normals)<for_stmt>i range(x.shape[0])<block_start><assert_stmt>((x[i]<power>2+y[i]<power>2+z[i]<power>2)<le>4)<block_end><block_end><def_stmt>_cube_assert self x y z h<block_start>"""Check if x,y,z lie within surface of thickness `h` of a unit cube"""<def_stmt>surface1 x y z<block_start><return>min(abs(x) abs(1-x))<l>h<and>y<g>-h<and>y<l>1+h<and>z<g>-h<and>z<l>1+h<block_end><def_stmt>on_surface x y z<block_start><return>surface1(x y z)<or>surface1(y x z)<or>surface1(z x y)<block_end><for_stmt>i range(x.shape[0])<block_start><assert_stmt>on_surface(x[i] y[i] z[i])<block_end><block_end><def_stmt>test_get_surface_mesh self<block_start>"""Check if mesh is generated correctly for unit cube"""<line_sep>x_cube,y_cube,z_cube,cells,normals,vectors=cube_data()<line_sep>x,y,z=G._get_surface_mesh(x_cube y_cube z_cube cells 0.1)<line_sep>h=np.finfo(float).eps<line_sep>self._cube_assert(x y z h)<block_end><def_stmt>test_get_surface_points self<block_start>"""Check if surface is generated correctly for unit cube"""<line_sep>h=0.1<line_sep>x_cube,y_cube,z_cube,cells,normals,vectors=cube_data()<line_sep>x,y,z=G.surface_points(x_cube y_cube z_cube cells h)<line_sep>self._cube_assert(x y z h)<block_end><def_stmt>test_get_surface_points_uniform self<block_start>"""Check if uniform surface is generated correctly for unit cube"""<line_sep>h=0.1<line_sep>x_cube,y_cube,z_cube,cells,normals,vectors=cube_data()<line_sep>x,y,z=G.surf_points_uniform(x_cube y_cube z_cube cells normals 1.0 1.0)<line_sep>self._cube_assert(x y z h)<block_end><def_stmt>test_prism self<block_start>tri_normal=np.array([0 -1 0])<line_sep>tri_points=np.array([[0 0 0] [1 0 0] [0 0 1]])<line_sep>h=1/1.5<line_sep>prism_normals,prism_points,prism_face_centres=G.prism(tri_normal tri_points h)<assert_stmt>np.array([-1 0 0])<in>prism_normals<assert_stmt>np.array([0 1 0])<in>prism_points<assert_stmt>np.array([0.5 0.5 0])<in>prism_face_centres<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end> |
# A Bubble class
<class_stmt>Bubble(object)# Create the Bubble
<block_start><def_stmt>__init__ self x y diameter name<block_start>self.x=x<line_sep>self.y=y<line_sep>self.diameter=diameter<line_sep>self.name=name<line_sep>self.over=<false><block_end># Checking if mouse is over the Bubble
<def_stmt>rollover self px py<block_start>d=dist(px py self.x self.y)<line_sep>self.over=d<l>self.diameter/2<block_end># Display the Bubble
<def_stmt>display self<block_start>stroke(0)<line_sep>strokeWeight(2)<line_sep>noFill()<line_sep>ellipse(self.x self.y self.diameter self.diameter)<if_stmt>self.over<block_start>fill(0)<line_sep>textAlign(CENTER)<line_sep>text(self.name self.x self.y+self.diameter/2+20)<block_end><block_end><block_end> |
########################################################################
# Copyright 2017 FireEye Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
########################################################################
<import_stmt>ctypes<as>ct<import_stmt>ctypes.wintypes<as>wt<line_sep>EVENT_FILTER_TYPE_NONE=0x00000000<line_sep>EVENT_FILTER_TYPE_SCHEMATIZED=0x80000000<line_sep>EVENT_FILTER_TYPE_SYSTEM_FLAGS=0x80000001<line_sep>VENT_FILTER_TYPE_TRACEHANDLE=0x80000002<line_sep>EVENT_FILTER_TYPE_PID=0x80000004<line_sep>EVENT_FILTER_TYPE_EXECUTABLE_NAME=0x80000008<line_sep>EVENT_FILTER_TYPE_PACKAGE_ID=0x80000010<line_sep>EVENT_FILTER_TYPE_PACKAGE_APP_ID=0x80000020<line_sep>EVENT_FILTER_TYPE_PAYLOAD=0x80000100<line_sep>EVENT_FILTER_TYPE_EVENT_ID=0x80000200<line_sep>EVENT_FILTER_TYPE_STACKWALK=0x80001000<line_sep>MAX_EVENT_FILTER_EVENT_ID_COUNT=64<line_sep>MAX_EVENT_FILTER_DATA_SIZE=1024<class_stmt>EVENT_FILTER_DESCRIPTOR(ct.Structure)<block_start>_fields_=[('Ptr' ct.c_ulonglong) ('Size' ct.c_ulong) ('Type' ct.c_ulong)]<block_end><class_stmt>EVENT_FILTER_HEADER(ct.Structure)<block_start>_fields_=[('Id' wt.USHORT) ('Version' wt.CHAR) ('Reserved' wt.CHAR<times>5) ('InstanceId' ct.c_ulonglong) ('Size' wt.ULONG) ('NextOffset' wt.ULONG)]<block_end><class_stmt>EVENT_FILTER_EVENT_ID(ct.Structure)<block_start>_fields_=[('FilterIn' wt.BOOLEAN) ('Reserved' wt.CHAR) ('Count' wt.USHORT) ('Events' wt.USHORT<times>0)]<def_stmt>__init__ self filter_in events<block_start>struct_size=len(events)<times>ct.sizeof(wt.USHORT)+ct.sizeof(EVENT_FILTER_EVENT_ID)<line_sep>self._buf=(ct.c_char<times>struct_size)()<line_sep>self._props=ct.cast(ct.pointer(self._buf) ct.POINTER(EVENT_FILTER_EVENT_ID))<line_sep>self._props.contents.FilterIn=filter_in<line_sep>self._props.contents.Reserved=0<line_sep>self._props.contents.Count=len(events)<for_stmt>i range(len(events))<block_start>ct.memmove(ct.cast(ct.addressof(self._buf)+ct.sizeof(EVENT_FILTER_EVENT_ID)+(ct.sizeof(wt.WCHAR)<times>i) ct.c_void_p) ct.byref(wt.USHORT(events[i])) ct.sizeof(wt.WCHAR))<block_end><block_end><def_stmt>get self<block_start><return>self._props<block_end><block_end><class_stmt>EVENT_FILTER_LEVEL_KW(ct.Structure)<block_start>_fields_=[('MatchAnyKeyword' ct.c_ulonglong) ('MatchAllKeyword' ct.c_ulonglong) ('Level' wt.CHAR) ('FilterIn' wt.BOOLEAN)]<block_end><class_stmt>EVENT_FILTER_EVENT_NAME(ct.Structure)<block_start>_fields_=[('MatchAnyKeyword' ct.c_ulonglong) ('MatchAllKeyword' ct.c_ulonglong) ('Level' wt.CHAR) ('FilterIn' wt.BOOLEAN) ('NameCount' wt.USHORT) ('Names' wt.CHAR<times>0)]<def_stmt>__init__ self match_any match_all level filter_in names<block_start>struct_size=((sum([len(name)<for>name names])<times>ct.sizeof(wt.CHAR))+(ct.sizeof(wt.CHAR)<times>len(names)))+ct.sizeof(EVENT_FILTER_EVENT_NAME)<line_sep>self._buf=(ct.c_char<times>struct_size)()<line_sep>self._props=ct.cast(ct.pointer(self._buf) ct.POINTER(EVENT_FILTER_EVENT_NAME))<line_sep>self._props.contents.MatchAnyKeyword=match_any<line_sep>self._props.contents.MatchAllKeyword=match_all<line_sep>self._props.contents.Level=level<line_sep>self._props.contents.FilterIn=filter_in<line_sep>self._props.contents.NameCount=len(names)<line_sep>str_off=0<for_stmt>i range(len(names))<block_start>ct.memmove(ct.cast(ct.addressof(self._buf)+ct.sizeof(EVENT_FILTER_EVENT_NAME)+str_off ct.c_void_p) names[i] len(names[i]))<line_sep>str_off<augadd>len(names[i])+ct.sizeof(wt.CHAR)<block_end><block_end><def_stmt>get self<block_start><return>self._props<block_end><block_end><class_stmt>EVENT_DESCRIPTOR(ct.Structure)<block_start>_fields_=[('Id' ct.c_ushort) ('Version' ct.c_ubyte) ('Channel' ct.c_ubyte) ('Level' ct.c_ubyte) ('Opcode' ct.c_ubyte) ('Task' ct.c_ushort) ('Keyword' ct.c_ulonglong)]<block_end> |
<import_stmt>os<import_stmt>itertools<import_stmt>re<import_from_stmt>typing List Optional Tuple Dict Callable Any NamedTuple<import_from_stmt>string Template<import_from_stmt>typing List<import_from_stmt>tokenizers Tokenizer Encoding<line_sep>dirname=os.path.dirname(__file__)<line_sep>css_filename=os.path.join(dirname "visualizer-styles.css")<with_stmt>open(css_filename)<as>f<block_start>css=f.read()<block_end><class_stmt>Annotation<block_start>start:int<line_sep>end:int<line_sep>label:int<def_stmt>__init__ self start:int end:int label:str<block_start>self.start=start<line_sep>self.end=end<line_sep>self.label=label<block_end><block_end>AnnotationList=List[Annotation]<line_sep>PartialIntList=List[Optional[int]]<class_stmt>CharStateKey(NamedTuple)<block_start>token_ix:Optional[int]<line_sep>anno_ix:Optional[int]<block_end><class_stmt>CharState<block_start>char_ix:Optional[int]<def_stmt>__init__ self char_ix<block_start>self.char_ix=char_ix<line_sep>self.anno_ix:Optional[int]=<none><line_sep>self.tokens:List[int]=[]<block_end>@property<def_stmt>token_ix self<block_start><return>self.tokens[0]<if>len(self.tokens)<g>0<else><none><block_end>@property<def_stmt>is_multitoken self<block_start>"""
BPE tokenizers can output more than one token for a char
"""<line_sep><return>len(self.tokens)<g>1<block_end><def_stmt>partition_key self<arrow>CharStateKey<block_start><return>CharStateKey(token_ix=self.token_ix anno_ix=self.anno_ix )<block_end><block_end><class_stmt>Aligned<block_start><pass><block_end><class_stmt>EncodingVisualizer<block_start>"""
Build an EncodingVisualizer
Args:
tokenizer (:class:`~tokenizers.Tokenizer`):
A tokenizer instance
default_to_notebook (:obj:`bool`):
Whether to render html output in a notebook by default
annotation_converter (:obj:`Callable`, `optional`):
An optional (lambda) function that takes an annotation in any format and returns
an Annotation object
"""<line_sep>unk_token_regex=re.compile("(.{1}\b)?(unk|oov)(\b.{1})?" flags=re.IGNORECASE)<def_stmt>__init__ self tokenizer:Tokenizer default_to_notebook:bool=<true> annotation_converter:Optional[Callable[[Any] Annotation]]=<none> <block_start><if_stmt>default_to_notebook<block_start><try_stmt><block_start><import_from_stmt>IPython.core.display display HTML<block_end><except_stmt>ImportError<as>e<block_start><raise>Exception("""We couldn't import IPython utils for html display.
Are you running in a notebook?
You can also pass `default_to_notebook=False` to get back raw HTML
""")<block_end><block_end>self.tokenizer=tokenizer<line_sep>self.default_to_notebook=default_to_notebook<line_sep>self.annotation_coverter=annotation_converter<line_sep><pass><block_end><def_stmt>__call__ self text:str annotations:AnnotationList=[] default_to_notebook:Optional[bool]=<none> <arrow>Optional[str]<block_start>"""
Build a visualization of the given text
Args:
text (:obj:`str`):
The text to tokenize
annotations (:obj:`List[Annotation]`, `optional`):
An optional list of annotations of the text. The can either be an annotation class
or anything else if you instantiated the visualizer with a converter function
default_to_notebook (:obj:`bool`, `optional`, defaults to `False`):
If True, will render the html in a notebook. Otherwise returns an html string.
Returns:
The HTML string if default_to_notebook is False, otherwise (default) returns None and
renders the HTML in the notebook
"""<line_sep>final_default_to_notebook=self.default_to_notebook<if_stmt>default_to_notebook<is><not><none><block_start>final_default_to_notebook=default_to_notebook<block_end><if_stmt>final_default_to_notebook<block_start><try_stmt><block_start><import_from_stmt>IPython.core.display display HTML<block_end><except_stmt>ImportError<as>e<block_start><raise>Exception("""We couldn't import IPython utils for html display.
Are you running in a notebook?""")<block_end><block_end><if_stmt>self.annotation_coverter<is><not><none><block_start>annotations=list(map(self.annotation_coverter annotations))<block_end>encoding=self.tokenizer.encode(text)<line_sep>html=EncodingVisualizer.__make_html(text encoding annotations)<if_stmt>final_default_to_notebook<block_start>display(HTML(html))<block_end><else_stmt><block_start><return>html<block_end><block_end>@staticmethod<def_stmt>calculate_label_colors annotations:AnnotationList<arrow>Dict[str str]<block_start>"""
Generates a color palette for all the labels in a given set of annotations
Args:
annotations (:obj:`Annotation`):
A list of annotations
Returns:
:obj:`dict`: A dictionary mapping labels to colors in HSL format
"""<if_stmt>len(annotations)<eq>0<block_start><return>{}<block_end>labels=set(map(<lambda>x:x.label annotations))<line_sep>num_labels=len(labels)<line_sep>h_step=int(255/num_labels)<if_stmt>h_step<l>20<block_start>h_step=20<block_end>s=32<line_sep>l=64<line_sep>h=10<line_sep>colors={}<for_stmt>label sorted(labels)# sort so we always get the same colors for a given set of labels
<block_start>colors[label]=f"hsl({h},{s}%,{l}%"<line_sep>h<augadd>h_step<block_end><return>colors<block_end>@staticmethod<def_stmt>consecutive_chars_to_html consecutive_chars_list:List[CharState] text:str encoding:Encoding <block_start>"""
Converts a list of "consecutive chars" into a single HTML element.
Chars are consecutive if they fall under the same word, token and annotation.
The CharState class is a named tuple with a "partition_key" method that makes it easy to
compare if two chars are consecutive.
Args:
consecutive_chars_list (:obj:`List[CharState]`):
A list of CharStates that have been grouped together
text (:obj:`str`):
The original text being processed
encoding (:class:`~tokenizers.Encoding`):
The encoding returned from the tokenizer
Returns:
:obj:`str`: The HTML span for a set of consecutive chars
"""<line_sep>first=consecutive_chars_list[0]<if_stmt>first.char_ix<is><none># its a special token
<block_start>stoken=encoding.tokens[first.token_ix]<line_sep># special tokens are represented as empty spans. We use the data attribute and css
# magic to display it
<return>f'<span class="special-token" data-stoken={stoken}></span>'<block_end># We're not in a special token so this group has a start and end.
last=consecutive_chars_list[-1]<line_sep>start=first.char_ix<line_sep>end=last.char_ix+1<line_sep>span_text=text[start:end]<line_sep>css_classes=[]# What css classes will we apply on the resulting span
data_items={}# What data attributes will we apply on the result span
<if_stmt>first.token_ix<is><not><none># We can either be in a token or not (e.g. in white space)
<block_start>css_classes.append("token")<if_stmt>first.is_multitoken<block_start>css_classes.append("multi-token")<block_end><if_stmt>first.token_ix%2# We use this to color alternating tokens.
# A token might be split by an annotation that ends in the middle of it, so this
# lets us visually indicate a consecutive token despite its possible splitting in
# the html markup
<block_start>css_classes.append("odd-token")<block_end><else_stmt># Like above, but a different color so we can see the tokens alternate
<block_start>css_classes.append("even-token")<block_end><if_stmt>(EncodingVisualizer.unk_token_regex.search(encoding.tokens[first.token_ix])<is><not><none>)# This is a special token that is in the text. probably UNK
<block_start>css_classes.append("special-token")<line_sep># TODO is this the right name for the data attribute ?
data_items["stok"]=encoding.tokens[first.token_ix]<block_end><block_end><else_stmt># In this case we are looking at a group/single char that is not tokenized.
# e.g. white space
<block_start>css_classes.append("non-token")<block_end>css=f'''class="{' '.join(css_classes)}"'''<line_sep>data=""<for_stmt>key,val data_items.items()<block_start>data<augadd>f' data-{key}="{val}"'<block_end><return>f"<span {css} {data} >{span_text}</span>"<block_end>@staticmethod<def_stmt>__make_html text:str encoding:Encoding annotations:AnnotationList<arrow>str<block_start>char_states=EncodingVisualizer.__make_char_states(text encoding annotations)<line_sep>current_consecutive_chars=[char_states[0]]<line_sep>prev_anno_ix=char_states[0].anno_ix<line_sep>spans=[]<line_sep>label_colors_dict=EncodingVisualizer.calculate_label_colors(annotations)<line_sep>cur_anno_ix=char_states[0].anno_ix<if_stmt>cur_anno_ix<is><not><none># If we started in an annotation make a span for it
<block_start>anno=annotations[cur_anno_ix]<line_sep>label=anno.label<line_sep>color=label_colors_dict[label]<line_sep>spans.append(f'<span class="annotation" style="color:{color}" data-label="{label}">')<block_end><for_stmt>cs char_states[1:]<block_start>cur_anno_ix=cs.anno_ix<if_stmt>cur_anno_ix<ne>prev_anno_ix# If we've transitioned in or out of an annotation
<block_start>spans.append(# Create a span from the current consecutive characters
EncodingVisualizer.consecutive_chars_to_html(current_consecutive_chars text=text encoding=encoding ))<line_sep>current_consecutive_chars=[cs]<if_stmt>prev_anno_ix<is><not><none># if we transitioned out of an annotation close it's span
<block_start>spans.append("</span>")<block_end><if_stmt>cur_anno_ix<is><not><none># If we entered a new annotation make a span for it
<block_start>anno=annotations[cur_anno_ix]<line_sep>label=anno.label<line_sep>color=label_colors_dict[label]<line_sep>spans.append(f'<span class="annotation" style="color:{color}" data-label="{label}">')<block_end><block_end>prev_anno_ix=cur_anno_ix<if_stmt>cs.partition_key()<eq>current_consecutive_chars[0].partition_key()# If the current charchter is in the same "group" as the previous one
<block_start>current_consecutive_chars.append(cs)<block_end><else_stmt># Otherwise we make a span for the previous group
<block_start>spans.append(EncodingVisualizer.consecutive_chars_to_html(current_consecutive_chars text=text encoding=encoding ))<line_sep># An reset the consecutive_char_list to form a new group
current_consecutive_chars=[cs]<block_end><block_end># All that's left is to fill out the final span
# TODO I think there is an edge case here where an annotation's span might not close
spans.append(EncodingVisualizer.consecutive_chars_to_html(current_consecutive_chars text=text encoding=encoding ))<line_sep>res=HTMLBody(spans)# Send the list of spans to the body of our html
<return>res<block_end>@staticmethod<def_stmt>__make_anno_map text:str annotations:AnnotationList<arrow>PartialIntList<block_start>"""
Args:
text (:obj:`str`):
The raw text we want to align to
annotations (:obj:`AnnotationList`):
A (possibly empty) list of annotations
Returns:
A list of length len(text) whose entry at index i is None if there is no annotation on
charachter i or k, the index of the annotation that covers index i where k is with
respect to the list of annotations
"""<line_sep>annotation_map=[<none>]<times>len(text)<for_stmt>anno_ix,a enumerate(annotations)<block_start><for_stmt>i range(a.start a.end)<block_start>annotation_map[i]=anno_ix<block_end><block_end><return>annotation_map<block_end>@staticmethod<def_stmt>__make_char_states text:str encoding:Encoding annotations:AnnotationList<arrow>List[CharState]<block_start>"""
For each character in the original text, we emit a tuple representing it's "state":
* which token_ix it corresponds to
* which word_ix it corresponds to
* which annotation_ix it corresponds to
Args:
text (:obj:`str`):
The raw text we want to align to
annotations (:obj:`List[Annotation]`):
A (possibly empty) list of annotations
encoding: (:class:`~tokenizers.Encoding`):
The encoding returned from the tokenizer
Returns:
:obj:`List[CharState]`: A list of CharStates, indicating for each char in the text what
it's state is
"""<line_sep>annotation_map=EncodingVisualizer.__make_anno_map(text annotations)<line_sep># Todo make this a dataclass or named tuple
char_states:List[CharState]=[CharState(char_ix)<for>char_ix range(len(text))]<for_stmt>token_ix,token enumerate(encoding.tokens)<block_start>offsets=encoding.token_to_chars(token_ix)<if_stmt>offsets<is><not><none><block_start>start,end=offsets<for_stmt>i range(start end)<block_start>char_states[i].tokens.append(token_ix)<block_end><block_end><block_end><for_stmt>char_ix,anno_ix enumerate(annotation_map)<block_start>char_states[char_ix].anno_ix=anno_ix<block_end><return>char_states<block_end><block_end><def_stmt>HTMLBody children:List[str] css_styles=css<arrow>str<block_start>"""
Generates the full html with css from a list of html spans
Args:
children (:obj:`List[str]`):
A list of strings, assumed to be html elements
css_styles (:obj:`str`, `optional`):
Optional alternative implementation of the css
Returns:
:obj:`str`: An HTML string with style markup
"""<line_sep>children_text="".join(children)<line_sep><return>f"""
<html>
<head>
<style>
{css_styles}
</style>
</head>
<body>
<div class="tokenized-text" dir=auto>
{children_text}
</div>
</body>
</html>
"""<block_end> |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The exponentially scaled modified Bessel function of the first kind."""<import_stmt>numpy<as>np<import_stmt>scipy.special<import_from_stmt>tensorflow.python.ops script_ops<import_from_stmt>tensorflow.python.ops array_ops<import_from_stmt>tensorflow.python.framework dtypes<import_from_stmt>tensorflow.python.framework ops<import_from_stmt>tensorflow.python.ops.custom_gradient custom_gradient<line_sep>@custom_gradient<def_stmt>ive v z<block_start>"""Exponentially scaled modified Bessel function of the first kind."""<line_sep>output=array_ops.reshape(script_ops.py_func(<lambda>v z:np.select(condlist=[v<eq>0 v<eq>1] choicelist=[scipy.special.i0e(z dtype=z.dtype) scipy.special.i1e(z dtype=z.dtype)] default=scipy.special.ive(v z dtype=z.dtype)) [v z] z.dtype) ops.convert_to_tensor(array_ops.shape(z) dtype=dtypes.int32))<def_stmt>grad dy<block_start><return><none> dy<times>(ive(v-1 z)-ive(v z)<times>(v+z)/z)<block_end><return>output grad<block_end> |
"""
__init__.py
@Organization:
@Author: <NAME>
@Time: 4/22/21 5:28 PM
@Function:
"""<line_sep> |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<import_from_stmt>pyspark.sql SQLContext<import_from_stmt>py4j.java_gateway java_import<def_stmt>register sc<block_start>java_import(sc._gateway.jvm "org.apache.spark.sql.hbase.HBaseSQLContext")<block_end>__all__=["HBaseSQLContext"]<class_stmt>HBaseSQLContext(SQLContext)<block_start>"""A variant of Spark SQL that integrates with data stored in HBase.
"""<def_stmt>__init__ self sparkContext<block_start>"""Create a new HbaseContext.
@param sparkContext: The SparkContext to wrap.
"""<line_sep>SQLContext.__init__(self sparkContext)<line_sep>self._scala_HBaseSQLContext=self._get_hbase_ctx()<block_end>@property<def_stmt>_ssql_ctx self<block_start><if_stmt>self._scala_HBaseSQLContext<is><none><block_start>print("loading hbase context ..")<line_sep>self._scala_HBaseSQLContext=self._get_hbase_ctx()<block_end><if_stmt>self._scala_SQLContext<is><none><block_start>self._scala_SQLContext=self._scala_HBaseSQLContext<block_end><return>self._scala_HBaseSQLContext<block_end><def_stmt>_get_hbase_ctx self<block_start><return>self._jvm.HBaseSQLContext(self._jsc.sc())<block_end><block_end>#TODO: add tests if for main
|
"""
STRIP estimators.
"""<import_from_stmt>art.estimators.poison_mitigation.strip.strip STRIPMixin<line_sep> |
# Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""Friend relation.
Viewfinder friends define a relationship between two users predicated on confirmation of photo
sharing permission. Each friend has an associated 'status', which can be:
- 'friend': user has been marked as a friend; however, that user may not have the reverse
friendship object.
- 'muted': a friend who has attained special status as an unwanted irritant. Content shared
from these friends is not shown, though still received and can be retrieved.
- 'blocked': a friend who has attained special status as an unwanted irritant. These users will
not show up in suggestions lists and cannot be contacted for sharing.
Friends are different than contacts. Contacts are the full spectrum of social connections. A
contact doesn't become a viewfinder friend until a share has been completed.
NOTE: Next comment is outdated, but we may re-enable something similar in future.
The 'colocated_shares', 'total_shares', 'last_colocated' and 'last_share' values are used to
quantify the strength of the sharing relationship. Each time the users in a friend relationship
are co-located, 'colocated_shares' is decayed based on 'last_colocated' and the current time
and updated either with a +1 if the sharing occurs or a -1 if not. 'total_shares' is similarly
updated, though not just when the users are co-located, but on every share that a user initiates.
Friend: viewfinder friend information
"""<line_sep>__authors__=['<EMAIL> (<NAME>)' '<EMAIL> (<NAME>)']<import_stmt>logging<import_stmt>math<import_from_stmt>functools partial<import_from_stmt>tornado gen<import_from_stmt>viewfinder.backend.base util<import_from_stmt>viewfinder.backend.base.exceptions NotFoundError<import_from_stmt>viewfinder.backend.db db_client vf_schema<import_from_stmt>viewfinder.backend.db.base DBObject<import_from_stmt>viewfinder.backend.db.range_base DBRangeObject<import_from_stmt>viewfinder.backend.op.notification_manager NotificationManager<line_sep>@DBObject.map_table_attributes<class_stmt>Friend(DBRangeObject)<block_start>"""Viewfinder friend data object."""<line_sep>__slots__=[]<line_sep>_table=DBObject._schema.GetTable(vf_schema.FRIEND)<line_sep>FRIEND='friend'<line_sep>MUTED='muted'<line_sep>BLOCKED='blocked'<line_sep>FRIEND_ATTRIBUTES=set(['nickname'])<line_sep>"""Subset of friend attributes that should be projected to the user."""<line_sep>_SHARE_HALF_LIFE=60<times>60<times>24<times>30# 1 month
<def_stmt>__init__ self user_id=<none> friend_id=<none><block_start>super(Friend self).__init__()<line_sep>self.user_id=user_id<line_sep>self.friend_id=friend_id<line_sep>self.status=Friend.FRIEND<block_end><def_stmt>IsBlocked self<block_start>"""Returns true if the "friend" identified by self.friend_id is blocked."""<line_sep><return>self.status<eq>Friend.BLOCKED<block_end><def_stmt>DecayShares self timestamp<block_start>"""Decays 'total_shares' and 'colocated_shares' based on 'timestamp'. Updates 'last_share'
and 'last_colocated' to 'timestamp'.
"""<def_stmt>_ComputeDecay shares last_time<block_start><if_stmt>last_time<is><none><block_start><assert_stmt>shares<is><none> shares<line_sep><return>0<block_end>decay=math.exp(-math.log(2)<times>(timestamp-last_time)/Friend._SHARE_HALF_LIFE)<line_sep><return>shares<times>decay<block_end>self.total_shares=_ComputeDecay(self.total_shares self.last_share)<line_sep>self.last_share=timestamp<line_sep>self.colocated_shares=_ComputeDecay(self.colocated_shares self.last_colocated)<line_sep>self.last_colocated=timestamp<block_end><def_stmt>IncrementShares self timestamp shared colocated<block_start>"""Decays and updates 'total_shares' and 'last_share' based on whether sharing occurred
('shared'==True). If 'colocated', the 'colocated_shares' and 'last_colocated' are updated
similarly.
"""<line_sep>self.DecayShares(timestamp)<line_sep>self.total_shares<augadd>(1.0<if>shared<else>-1.0)<if_stmt>colocated<block_start>self.colocated_shares<augadd>(1.0<if>shared<else>-1.0)<block_end><block_end>@classmethod@gen.engine<def_stmt>MakeFriends cls client user_id friend_id callback<block_start>"""Creates a bi-directional friendship between user_id and friend_id if it does not already
exist. Invokes the callback with the pair of friendship objects:
[(user_id=>friend_id), (friend_id=>user_id)]
"""<import_from_stmt>viewfinder.backend.db.user User<line_sep># Determine whether one or both sides of the friendship are missing.
forward_friend,reverse_friend=<yield>[gen.Task(Friend.Query client user_id friend_id <none> must_exist=<false>) gen.Task(Friend.Query client friend_id user_id <none> must_exist=<false>)]<line_sep># Make sure that both sides of the friendship have been created.
<if_stmt>forward_friend<is><none><block_start>forward_friend=Friend.CreateFromKeywords(user_id=user_id friend_id=friend_id status=Friend.FRIEND)<line_sep><yield>gen.Task(forward_friend.Update client)<block_end><if_stmt>reverse_friend<is><none><block_start>reverse_friend=Friend.CreateFromKeywords(user_id=friend_id friend_id=user_id status=Friend.FRIEND)<line_sep><yield>gen.Task(reverse_friend.Update client)<block_end>callback((forward_friend reverse_friend))<block_end>@classmethod@gen.engine<def_stmt>MakeFriendsWithGroup cls client user_ids callback<block_start>"""Creates bi-directional friendships between all the specified users. Each user will be
friends with every other user.
"""<line_sep><yield>[gen.Task(Friend.MakeFriends client user_id friend_id)<for>index,user_id enumerate(user_ids)<for>friend_id user_ids[index+1:]<if>user_id<ne>friend_id]<line_sep>callback()<block_end>@classmethod@gen.engine<def_stmt>MakeFriendAndUpdate cls client user_id friend_dict callback<block_start>"""Ensures that the given user has at least a one-way friend relationship with the given
friend. Updates the friend relationship attributes with those given in "friend_dict".
"""<import_from_stmt>viewfinder.backend.db.user User<line_sep>friend=<yield>gen.Task(Friend.Query client user_id friend_dict['user_id'] <none> must_exist=<false>)<if_stmt>friend<is><none># Ensure that the friend exists as user in the system.
<block_start>friend_user=<yield>gen.Task(User.Query client friend_dict['user_id'] <none> must_exist=<false>)<if_stmt>friend_user<is><none><block_start><raise>NotFoundError('User %d does not exist.'%friend_dict['user_id'])<block_end># Create a one-way friend relationship from the calling user to the friend user.
friend=Friend.CreateFromKeywords(user_id=user_id friend_id=friend_dict['user_id'] status=Friend.FRIEND)<block_end># Update all given attributes.
<assert_stmt>friend_dict['user_id']<eq>friend.friend_id (friend_dict friend)<for_stmt>key,value friend_dict.iteritems()<block_start><if_stmt>key<ne>'user_id'<block_start><assert_stmt>key<in>Friend.FRIEND_ATTRIBUTES friend_dict<line_sep>setattr(friend key value)<block_end><block_end><yield>gen.Task(friend.Update client)<line_sep>callback()<block_end>@classmethod@gen.engine<def_stmt>UpdateOperation cls client callback user_id friend<block_start>"""Updates friend metadata for the relationship between the given user and friend."""<line_sep># Update the metadata.
<yield>gen.Task(Friend.MakeFriendAndUpdate client user_id friend)<line_sep># Send notifications to all the calling user's devices.
<yield>NotificationManager.NotifyUpdateFriend(client friend)<line_sep>callback()<block_end><block_end> |
# This file is Public Domain and may be used without restrictions.
<import_stmt>_jpype<import_stmt>jpype<import_from_stmt>jpype.types *<import_from_stmt>jpype java<import_stmt>jpype.dbapi2<as>dbapi2<import_stmt>common<import_stmt>time<try_stmt><block_start><import_stmt>zlib<block_end><except_stmt>ImportError<block_start>zlib=<none><block_end><class_stmt>SQLModuleTestCase(common.JPypeTestCase)<block_start><def_stmt>setUp self<block_start>common.JPypeTestCase.setUp(self)<block_end><def_stmt>assertIsSubclass self a b<block_start>self.assertTrue(issubclass(a b) "`%s` is not a subclass of `%s`"%(a.__name__ b.__name__))<block_end><def_stmt>testConstants self<block_start>self.assertEqual(dbapi2.apilevel "2.0")<line_sep>self.assertEqual(dbapi2.threadsafety 2)<line_sep>self.assertEqual(dbapi2.paramstyle "qmark")<block_end><def_stmt>testExceptions self<block_start>self.assertIsSubclass(dbapi2.Warning Exception)<line_sep>self.assertIsSubclass(dbapi2.Error Exception)<line_sep>self.assertIsSubclass(dbapi2.InterfaceError dbapi2.Error)<line_sep>self.assertIsSubclass(dbapi2.DatabaseError dbapi2.Error)<line_sep>self.assertIsSubclass(dbapi2._SQLException dbapi2.Error)<line_sep>self.assertIsSubclass(dbapi2.DataError dbapi2.DatabaseError)<line_sep>self.assertIsSubclass(dbapi2.OperationalError dbapi2.DatabaseError)<line_sep>self.assertIsSubclass(dbapi2.IntegrityError dbapi2.DatabaseError)<line_sep>self.assertIsSubclass(dbapi2.InternalError dbapi2.DatabaseError)<line_sep>self.assertIsSubclass(dbapi2.InternalError dbapi2.DatabaseError)<line_sep>self.assertIsSubclass(dbapi2.ProgrammingError dbapi2.DatabaseError)<line_sep>self.assertIsSubclass(dbapi2.NotSupportedError dbapi2.DatabaseError)<block_end><def_stmt>testConnectionExceptions self<block_start>cx=dbapi2.Connection<line_sep>self.assertEqual(cx.Warning dbapi2.Warning)<line_sep>self.assertEqual(cx.Error dbapi2.Error)<line_sep>self.assertEqual(cx.InterfaceError dbapi2.InterfaceError)<line_sep>self.assertEqual(cx.DatabaseError dbapi2.DatabaseError)<line_sep>self.assertEqual(cx.DataError dbapi2.DataError)<line_sep>self.assertEqual(cx.OperationalError dbapi2.OperationalError)<line_sep>self.assertEqual(cx.IntegrityError dbapi2.IntegrityError)<line_sep>self.assertEqual(cx.InternalError dbapi2.InternalError)<line_sep>self.assertEqual(cx.InternalError dbapi2.InternalError)<line_sep>self.assertEqual(cx.ProgrammingError dbapi2.ProgrammingError)<line_sep>self.assertEqual(cx.NotSupportedError dbapi2.NotSupportedError)<block_end><def_stmt>test_Date self<block_start>d1=dbapi2.Date(2002 12 25)# noqa F841
d2=dbapi2.DateFromTicks(# noqa F841
time.mktime((2002 12 25 0 0 0 0 0 0)))<line_sep># Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
<block_end><def_stmt>test_Time self<block_start>t1=dbapi2.Time(13 45 30)# noqa F841
t2=dbapi2.TimeFromTicks(# noqa F841
time.mktime((2001 1 1 13 45 30 0 0 0)))<line_sep># Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
<block_end><def_stmt>test_Timestamp self<block_start>t1=dbapi2.Timestamp(2002 12 25 13 45 30)# noqa F841
t2=dbapi2.TimestampFromTicks(# noqa F841
time.mktime((2002 12 25 13 45 30 0 0 0)))<line_sep># Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
<block_end><def_stmt>test_Binary self<block_start>b=dbapi2.Binary(b"Something")<line_sep>b=dbapi2.Binary(b"")<block_end># noqa F841
<def_stmt>test_STRING self<block_start>self.assertTrue(hasattr(dbapi2 "STRING") "module.STRING must be defined")<block_end><def_stmt>test_BINARY self<block_start>self.assertTrue(hasattr(dbapi2 "BINARY") "module.BINARY must be defined.")<block_end><def_stmt>test_NUMBER self<block_start>self.assertTrue(hasattr(dbapi2 "NUMBER") "module.NUMBER must be defined.")<block_end><def_stmt>test_DATETIME self<block_start>self.assertTrue(hasattr(dbapi2 "DATETIME") "module.DATETIME must be defined.")<block_end><def_stmt>test_ROWID self<block_start>self.assertTrue(hasattr(dbapi2 "ROWID") "module.ROWID must be defined.")<block_end><block_end><class_stmt>SQLTablesTestCase(common.JPypeTestCase)<block_start><def_stmt>setUp self<block_start>common.JPypeTestCase.setUp(self)<block_end><def_stmt>testStr self<block_start><for_stmt>i dbapi2._types<block_start>self.assertIsInstance(str(i) str)<block_end><block_end><def_stmt>testRepr self<block_start><for_stmt>i dbapi2._types<block_start>self.assertIsInstance(repr(i) str)<block_end><block_end><block_end> |
<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_from_stmt>torch.autograd Function Variable<import_from_stmt>torch.utils.serialization load_lua<import_from_stmt>spn SoftProposal SpatialSumOverMap hook_spn unhook_spn<class_stmt>CallLegacyModel(Function)<block_start>@staticmethod<def_stmt>forward ctx model x<block_start><if_stmt>x.is_cuda<block_start><return>model.cuda().forward(x)<block_end><else_stmt><block_start><return>model.float().forward(x)<block_end><block_end>@staticmethod<def_stmt>backward ctx *args **kwargs<block_start><raise>NotImplementedError('The backward call of LegacyModel is not implemented')<block_end><block_end><class_stmt>LegacyModel(nn.Module)<block_start><def_stmt>__init__ self model<block_start>super(LegacyModel self).__init__()<line_sep>self.model=model<block_end><def_stmt>forward self x<block_start><return>CallLegacyModel.apply(self.model x)<block_end><def_stmt>__repr__ self<block_start><return>'{}({})'.format(self.__class__.__name__ repr(self.model))<block_end><block_end><class_stmt>SP_GoogLeNet(nn.Module)<block_start><def_stmt>__init__ self state_dict='SP_GoogleNet_ImageNet.pt'<block_start>super(SP_GoogLeNet self).__init__()<line_sep>state_dict=load_lua(state_dict)<line_sep>pretrained_model=state_dict[0]<line_sep>pretrained_model.evaluate()<line_sep>self.features=LegacyModel(pretrained_model)<line_sep>self.pooling=nn.Sequential()<line_sep>self.pooling.add_module('adconv' nn.Conv2d(832 1024 kernel_size=3 stride=1 padding=1 groups=2 bias=<true>))<line_sep>self.pooling.add_module('maps' nn.ReLU())<line_sep>self.pooling.add_module('sp' SoftProposal(factor=2.1))<line_sep>self.pooling.add_module('sum' SpatialSumOverMap())<line_sep>self.pooling.adconv.weight.data.copy_(state_dict[1][0])<line_sep>self.pooling.adconv.bias.data.copy_(state_dict[1][1])<line_sep># classification layer
self.classifier=nn.Linear(1024 1000)<line_sep>self.classifier.weight.data.copy_(state_dict[2][0])<line_sep>self.classifier.bias.data.copy_(state_dict[2][1])<line_sep># image normalization
self.image_normalization_mean=[0.485 0.456 0.406]<line_sep>self.image_normalization_std=[0.229 0.224 0.225]<block_end><def_stmt>forward self x<block_start>x=self.features(x)<line_sep>x=self.pooling(x)<line_sep>x=x.view(x.size(0) -1)<line_sep>x=self.classifier(x)<line_sep><return>x<block_end><def_stmt>inference self mode=<true><block_start>hook_spn(self)<if>mode<else>unhook_spn(self)<line_sep><return>self<block_end><block_end> |
"""Contains derivative calculation with BackPACK."""<import_from_stmt>test.core.derivatives.implementation.base DerivativesImplementation<import_from_stmt>test.utils chunk_sizes<import_from_stmt>typing List<import_from_stmt>torch Tensor einsum zeros<import_from_stmt>backpack.utils.subsampling subsample<class_stmt>BackpackDerivatives(DerivativesImplementation)<block_start>"""Derivative implementations with BackPACK."""<def_stmt>__init__ self problem<block_start>"""Initialization.
Args:
problem: test problem
"""<line_sep>problem.extend()<line_sep>super().__init__(problem)<block_end><def_stmt>store_forward_io self<block_start>"""Do one forward pass.
This implicitly saves relevant quantities for backward pass.
"""<line_sep>self.problem.forward_pass()<block_end><def_stmt>jac_mat_prod self mat# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.jac_mat_prod(self.problem.module <none> <none> mat)<block_end><def_stmt>jac_t_mat_prod self mat:Tensor subsampling:List[int]<arrow>Tensor# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.jac_t_mat_prod(self.problem.module <none> <none> mat subsampling=subsampling)<block_end><def_stmt>param_mjp self param_str:str mat:Tensor sum_batch:bool subsampling:List[int]=<none> <arrow>Tensor# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.param_mjp(param_str self.problem.module <none> <none> mat sum_batch=sum_batch subsampling=subsampling )<block_end><def_stmt>weight_jac_mat_prod self mat# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.weight_jac_mat_prod(self.problem.module <none> <none> mat)<block_end><def_stmt>bias_jac_mat_prod self mat# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.bias_jac_mat_prod(self.problem.module <none> <none> mat)<block_end><def_stmt>ea_jac_t_mat_jac_prod self mat# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.ea_jac_t_mat_jac_prod(self.problem.module <none> <none> mat)<block_end><def_stmt>sum_hessian self# noqa: D102
<block_start>self.store_forward_io()<line_sep><return>self.problem.derivative.sum_hessian(self.problem.module <none> <none>)<block_end><def_stmt>input_hessian_via_sqrt_hessian self mc_samples:int=<none> chunks:int=1 subsampling:List[int]=<none><arrow>Tensor<block_start>"""Computes the Hessian w.r.t. to the input from its matrix square root.
Args:
mc_samples: If int, uses an MC approximation with the specified
number of samples. If None, uses the exact hessian. Defaults to None.
chunks: Maximum sequential split of the computation. Default: ``1``.
Only used if mc_samples is specified.
subsampling: Indices of active samples. ``None`` uses all samples.
Returns:
Hessian with respect to the input. Has shape
``[N, A, B, ..., N, A, B, ...]`` where ``N`` is the batch size or number
of active samples when sub-sampling is used, and ``[A, B, ...]`` are the
input's feature dimensions.
"""<line_sep>self.store_forward_io()<if_stmt>mc_samples<is><not><none><block_start>chunk_samples=chunk_sizes(mc_samples chunks)<line_sep>chunk_weights=[samples/mc_samples<for>samples chunk_samples]<line_sep>individual_hessians:Tensor=sum(weight<times>self._sample_hessians_from_sqrt(self.problem.derivative.sqrt_hessian_sampled(self.problem.module <none> <none> mc_samples=samples subsampling=subsampling ))<for>weight,samples zip(chunk_weights chunk_samples))<block_end><else_stmt><block_start>sqrt_hessian=self.problem.derivative.sqrt_hessian(self.problem.module <none> <none> subsampling=subsampling)<line_sep>individual_hessians=self._sample_hessians_from_sqrt(sqrt_hessian)<block_end>input0=subsample(self.problem.module.input0 subsampling=subsampling)<line_sep><return>self._embed_sample_hessians(individual_hessians input0)<block_end><def_stmt>hessian_is_zero self<arrow>bool# noqa: D102
<block_start><return>self.problem.derivative.hessian_is_zero(self.problem.module)<block_end><def_stmt>_sample_hessians_from_sqrt self sqrt:Tensor<arrow>Tensor<block_start>"""Convert individual matrix square root into individual full matrix.
Args:
sqrt: individual square root of hessian
Returns:
Individual Hessians of shape ``[N, A, B, ..., A, B, ...]`` where
``input.shape[1:] = [A, B, ...]`` are the input feature dimensions
and ``N`` is the batch size.
"""<line_sep>N,input_dims=sqrt.shape[1] sqrt.shape[2:]<line_sep>sqrt_flat=sqrt.flatten(start_dim=2)<line_sep>sample_hessians=einsum("vni,vnj->nij" sqrt_flat sqrt_flat)<line_sep><return>sample_hessians.reshape(N *input_dims *input_dims)<block_end><def_stmt>_embed_sample_hessians self individual_hessians:Tensor input:Tensor<arrow>Tensor<block_start>"""Embed Hessians w.r.t. individual samples into Hessian w.r.t. all samples.
Args:
individual_hessians: Hessians w.r.t. individual samples in the input.
input: Inputs for the for samples whose individual Hessians are passed.
Has shape ``[N, A, B, ..., A, B, ...]`` where ``N`` is the number of
active samples and ``[A, B, ...]`` are the feature dimensions.
Returns:
Hessian that contains the individual Hessians as diagonal blocks.
Has shape ``[N, A, B, ..., N, A, B, ...]``.
"""<line_sep>N,D=input.shape[0] input.shape[1:].numel()<line_sep>hessian=zeros(N D N D device=input.device dtype=input.dtype)<for_stmt>n range(N)<block_start>hessian[n : n :]=individual_hessians[n].reshape(D D)<block_end><return>hessian.reshape(*input.shape *input.shape)<block_end><def_stmt>hessian_mat_prod self mat:Tensor<arrow>Tensor# noqa: D102
<block_start>self.store_forward_io()<line_sep>hmp=self.problem.derivative.make_hessian_mat_prod(self.problem.module <none> <none>)<line_sep><return>hmp(mat)<block_end><block_end> |
expected_output={"tag":{"VRF1":{"hostname_db":{"hostname":{"7777.77ff.eeee":{"hostname":"R7" "level":2} "2222.22ff.4444":{"hostname":"R2" "local_router":<true>} }}} "test":{"hostname_db":{"hostname":{"9999.99ff.3333":{"hostname":"R9" "level":2} "8888.88ff.1111":{"hostname":"R8" "level":2} "7777.77ff.eeee":{"hostname":"R7" "level":2} "5555.55ff.aaaa":{"hostname":"R5" "level":2} "3333.33ff.6666":{"hostname":"R3" "level":2} "1111.11ff.2222":{"hostname":"R1" "level":1} "2222.22ff.4444":{"hostname":"R2" "local_router":<true>} }}} }}<line_sep> |
<import_stmt>time<line_sep>seconds=1<line_sep>time.sleep(seconds)<line_sep> |
# Copyright 2009-2017 <NAME>.
# This program is distributed under the MIT license.
<import_stmt>wx<import_from_stmt>python_toolbox caching<line_sep>is_mac=(wx.Platform<eq>'__WXMAC__')<line_sep>is_gtk=(wx.Platform<eq>'__WXGTK__')<line_sep>is_win=(wx.Platform<eq>'__WXMSW__')<line_sep>@caching.cache(max_size=100)<def_stmt>get_focus_pen color='black' width=1 dashes=[1 4]<block_start>''' '''<if_stmt>isinstance(color basestring)<block_start>color=wx.NamedColour(color)<block_end># todo: do `if is_mac`, also gtk maybe
pen=wx.Pen(color width wx.USER_DASH)<line_sep>pen.SetDashes(dashes)<line_sep><return>pen<block_end> |
"""The test for the sensibo update platform."""<import_from_future_stmt> annotations<import_from_stmt>datetime timedelta<import_from_stmt>unittest.mock patch<import_from_stmt>pysensibo.model SensiboData<import_from_stmt>pytest MonkeyPatch<import_from_stmt>homeassistant.config_entries ConfigEntry<import_from_stmt>homeassistant.const STATE_OFF STATE_ON<import_from_stmt>homeassistant.core HomeAssistant<import_from_stmt>homeassistant.util dt<import_from_stmt>tests.common async_fire_time_changed<async_keyword><def_stmt>test_select hass:HomeAssistant load_int:ConfigEntry monkeypatch:MonkeyPatch get_data:SensiboData <arrow><none><block_start>"""Test the Sensibo update."""<line_sep>state1=hass.states.get("update.hallway_update_available")<line_sep>state2=hass.states.get("update.kitchen_update_available")<assert_stmt>state1.state<eq>STATE_ON<assert_stmt>state1.attributes["installed_version"]<eq>"SKY30046"<assert_stmt>state1.attributes["latest_version"]<eq>"SKY30048"<assert_stmt>state1.attributes["title"]<eq>"skyv2"<assert_stmt>state2.state<eq>STATE_OFF<line_sep>monkeypatch.setattr(get_data.parsed["ABC999111"] "fw_ver" "SKY30048")<with_stmt>patch("homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data" return_value=get_data )<block_start>async_fire_time_changed(hass dt.utcnow()+timedelta(minutes=5) )<line_sep><await>hass.async_block_till_done()<block_end>state1=hass.states.get("update.hallway_update_available")<assert_stmt>state1.state<eq>STATE_OFF<block_end> |
# Copyright 2017, <NAME>, All rights reserved.
<import_stmt>json<import_from_stmt>common overrides Constants Persist PersistError<class_stmt>ControllerPersist(Persist)<block_start>"""
Persisting state for controller
"""<line_sep># Keys
__KEY_DOWNLOADED_FILE_NAMES="downloaded"<line_sep>__KEY_EXTRACTED_FILE_NAMES="extracted"<def_stmt>__init__ self<block_start>self.downloaded_file_names=set()<line_sep>self.extracted_file_names=set()<block_end>@classmethod@overrides(Persist)<def_stmt>from_str cls:"ControllerPersist" content:str<arrow>"ControllerPersist"<block_start>persist=ControllerPersist()<try_stmt><block_start>dct=json.loads(content)<line_sep>persist.downloaded_file_names=set(dct[ControllerPersist.__KEY_DOWNLOADED_FILE_NAMES])<line_sep>persist.extracted_file_names=set(dct[ControllerPersist.__KEY_EXTRACTED_FILE_NAMES])<line_sep><return>persist<block_end><except_stmt>(json.decoder.JSONDecodeError KeyError)<as>e<block_start><raise>PersistError("Error parsing AutoQueuePersist - {}: {}".format(type(e).__name__ str(e)))<block_end><block_end>@overrides(Persist)<def_stmt>to_str self<arrow>str<block_start>dct=dict()<line_sep>dct[ControllerPersist.__KEY_DOWNLOADED_FILE_NAMES]=list(self.downloaded_file_names)<line_sep>dct[ControllerPersist.__KEY_EXTRACTED_FILE_NAMES]=list(self.extracted_file_names)<line_sep><return>json.dumps(dct indent=Constants.JSON_PRETTY_PRINT_INDENT)<block_end><block_end> |
STRING_51_CHARS="SFOTYFUZTMDSOULXMKVFDOBQWNBAVGANMVLXQQZZQZQHBLJRZNY"<line_sep>STRING_301_CHARS=("ZFOMVKXETILJKBZPVKOYAUPNYWWWUICNEVXVPWNAMGCNHDBRMATGPMUHUZHUJKFWWLXBQXVDNCGJHAPKEK"<concat>"DZCXKBXEHWCWBYDIGNYXTOFWWNLPBTVIGTNQKIQDHUAHZPWQDKKCHERBYKLAUOOKJXJJLGOPSCRVEHCOAD"<concat>"BFYKJTXHMPPYWQVXCVGNNSXLNIHVKTVMEOIRXQDPLHIDZBAHUEDWXKXILEBOLILOYGZLNGCNXKWMFJWYYI"<concat>"PIDUKJVGKTUERTPRMMMVZNAAOMZJFXFSEENCAMBOUJMYXTPHJEOPKDB")<line_sep>STRING_3001_CHARS=("<KEY>"<concat>"<KEY>BPRALVWQEYTFBK<KEY>RALDRZHKPGTWZAXOUFQJKOGTMYSFEDBEQQXIGKZMXNKDCEN"<concat>"LSVHNGWVCIDMNSIZTBWBBVUMLPHRUCIZLZBFEGNFXZNJEZBUTNHNCYWWYSJSJDNOPPGHUPZLPJWDKEATZO"<concat>"UGKZEGFTFBGZDNRITDFBDJLYDGETUHBDGFEELBJBDMSRBVFPXMRJXWULONCZRZZBNFOPARFNXPQONKEIKG"<concat>"QDPJWCMGYSEIBAOLJNWPJVUSMJGCSQBLGZCWXJOYJHIZMNFMTLUQFGEBOONOZMGBWORFEUGYIUJAKLVAJZ"<concat>"FTNOPOZNMUJPWRMGPKNQSBMZQRJXLRQJPYYUXLFUPICAFTXDTQIUOQRCSLWPHHUZAOPVTBRCXWUIXMFGYT"<concat>"RBKPWJJXNQPLIAZAOKIMDWCDZABPLNOXYOZZBTHSDIPXXBKXKOSYYCITFSMNVIOCNGEMRKRBPCLBOCXBZQ"<concat>"VVWKNJBPWQNJOJWAGAIBOBFRVDWLXVBLMBSXYLOAWMPLKJOVHABNNIFTKTKBIIBOSHYQZRUFPPPRDQPMUV"<concat>"WMSWBLRUHKEMUFHIMZRUNNITKWYIWRXYPGFPXMNOABRWXGQFCWOYMMBYRQQLOIBFENIZBUIWLMDTIXCPXW"<concat>"NNHBSRPSMCQIMYRCFCPLQQGVOHYZOUGFEXDTOETUKQAXOCNGYBYPYWDQHYOKPCCORGRNHXZAA<KEY>"<concat>"CM<KEY>"<concat>"<KEY>"<concat>"OLHPFFSWTZGYPAZJXRRPATWXKRDFQJRAEOBFNIWVZDKLNYXUFBOAWSDSKFYYRTADBBYHEWNZSTDXAAOQCD"<concat>"WARSJZONQXRACMNBXZSEWZYBWADNDVRXBNJPJZQUNDYLBASCLCPFJWAMJUQAHBUZYDTIQPBPNJVVOHISZP"<concat>"VGBDNXFIHYCABTSVNVILZUPPZXMPPZVBRTRHDGHTXXLBIYTMRDOUBYBVHVVKQAXAKISFJNUTRZKOCACJAX"<concat>"ZXRRKMFOKYBHFUDBIXFAQSNUTYFNVQNGYWPJZGTLQUMOWXKKTUZGOUXAOVLQMMNKKECQCCOBNPPPXZYWZU"<concat>"WHLHZQDIETDDPXWTILXGAYJKPHBXPLRFDPDSHFUPOIWRQDWQQNARPHPVKJPXZGGXOUVBYZSLUPVIJKWKNF"<concat>"WMFKWYSYJJCCSCALMVPYIPHDKRXOWTUAYJFTAANCTVYDNSSIHGCWGKLDHFFBFSIFBMGHHFHZQSWOWZXOUW"<concat>"PKNICGXPFMFIESHPDDMGSSWGBIAQVBANHLGDBYENRLSUARJXLQWPMOUSUKIIVXICBJPSWOEZPEUAJSLITV"<concat>"XEQWSRENUJRJHPLBPFMBRPKGQNSYFWVLFLSQGGETKDUGYOLNFSMRVAZLQOAEKCUGNFEXRUDYSKBOQPYJAH"<concat>"QHEIMSAAMTTYVJTHZDGQEITLERRYYQCTEQPTYQPHLMBDPCZZNNJYLGAGNXONCTIBSXEHXPYWBCTEEZLIYI"<concat>"FMPYONXRVLSGZOEDZIMVDDPRXBKCKEPHOVLRBSPKMLZPXNRZVSSSYAOMGSVJODUZAJDYLGUZAFJMCOVGQX"<concat>"ZUWQJENTEWQRFZYQTVEAHFQUWBUCFWHGRTMNQQFSPKKYYUBJVXKFQCCMBNGWNTRFGFKBFWTTPNDTGGWTAK"<concat>"EOTXUPGFXOVWTOERFQSEZWVUYMGHVBQZIKIBJCNMKTZANNNOVMYTFLQYVNKTVZHFUJTPWNQWRYKGMYRYDC"<concat>"WNTCUCYJCWXMMOJXUJSDWJKTTYOBFJFLBUCECGTVWKELCBDIKDUDOBLZLHYJQTVHXSUAFHDFDMETLHHEEJ"<concat>"XJYWEOTXAUOZARSSQTBBXULKBBSTQHMJAAOUDIQCCETFWAINYIJCGXCILMDCAUYDMNZBDKIPVRCKCYKOIG"<concat>"JHBLUHPOLDBWREFAZVEFFSOQQHMCXQYCQGMBHYKHJDBZXRAXLVZNYQXZEQYRSZHKKGCSOOEGNPFZDNGIMJ"<concat>"QCXAEWWDYIGTQMJKBTMGSJAJCKIODCAEXVEGYCUBEEGCMARPJIKNAROJHYHKKTKGKKRVVSVYADCJXGSXAR"<concat>"KGOUSUSZGJGFIKJDKJUIRQVSAHSTBCVOWZJDCCBWNNCBIYTCNOUPEYACCEWZNGETBTDJWQIEWRYIQXOZKP"<concat>"ULDPCINLDFFPNORJHOZBSSYPPYNZTLXBRFZGBECKTTNVIHYNKGBXTTIXIKRBGVAPNWBPFNCGWQMZHBAHBX"<concat>"MFEPSWVBUDLYDIVLZFHXTQJWUNWQHSWSCYFXQQSVORFQGUQIHUAJYFLBNBKJPOEIPYATRMNMGUTTVBOUHE"<concat>"ZKXVAUEXCJYSCZEMGWTPXMQJEUWYHTFJQTBOQBEPQIPDYLBPIKKGPVYPOVLPPHYNGNWFTNQCDAATJVKRHC"<concat>"OZGEBPFZZDPPZOWQCDFQZJAMXLVREYJQQFTQJKHMLRFJCVPVCTSVFVAGDVNXIGINSGHKGTWCKXNRZCZFVX"<concat>"FPKZHPOMJTQOIVDIYKEVIIBAUHEDGOUNPCPMVLTZQLICXKKIYRJASBNDUZAONDDLQNVRXGWNQAOWSJSFWU"<concat>"YWTTLOVXIJYERRZQCJMRZHCXEEAKYCLEICUWOJUXWHAPHQJDTBVRPVWTMCJRAUYCOTFXLLIQLOBASBMPED"<concat>"KLDZDWDYAPXCKLZMEFIAOFYGFLBMURWVBFJDDEFXNIQOORYRMNROGVCOESSHSNIBNFRHPSWVAUQQVDMAHX"<concat>"STDOVZMZEFRRFCKOLDOOFVOBCPRRLGYFJNXVPPUZONOSALUUI")<line_sep> |
<import_stmt>yaml<import_stmt>json<import_from_stmt>jsonschema validate<import_from_stmt>jsonschema.exceptions ValidationError SchemaError<def_stmt>validate_fim_config fim_config<block_start><with_stmt>open("/etc/df_sysmon/fim_config_schema.json" "r")<as>schemafile<block_start>fim_schema=schemafile.read()<block_end><try_stmt><block_start>validate(yaml.safe_load(fim_config) json.loads(fim_schema))<block_end><except_stmt>ValidationError<as>ex<block_start>print("Fim Config is not valid: \n" ex)<line_sep><return><false><block_end><except_stmt>SchemaError<as>ex<block_start>print("Fim Schema is not valid: \n" ex)<line_sep><return><false><block_end><except_stmt>Exception<as>ex<block_start>print("Error: ".ex)<line_sep><return><false><block_end><return><true><block_end> |
<import_stmt>numpy<as>np<import_from_stmt>numba jit<import_from_stmt>numba.core types<import_from_stmt>numba.tests.support TestCase tag<import_stmt>unittest<line_sep># Array overlaps involving a displacement
<def_stmt>array_overlap1 src dest k=1<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[k:]=src[:-k]<block_end><def_stmt>array_overlap2 src dest k=1<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[:-k]=src[k:]<block_end><def_stmt>array_overlap3 src dest k=1<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[: :-k]=src[: k:]<block_end><def_stmt>array_overlap4 src dest k=1<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[: k:]=src[: :-k]<block_end><def_stmt>array_overlap5 src dest k=1<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[<ellipsis> :-k]=src[<ellipsis> k:]<block_end><def_stmt>array_overlap6 src dest k=1<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[<ellipsis> k:]=src[<ellipsis> :-k]<block_end># Array overlaps involving an in-place reversal
<def_stmt>array_overlap11 src dest<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[::-1]=src<block_end><def_stmt>array_overlap12 src dest<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[:]=src[::-1]<block_end><def_stmt>array_overlap13 src dest<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[: ::-1]=src<block_end><def_stmt>array_overlap14 src dest<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[:]=src[: ::-1]<block_end><def_stmt>array_overlap15 src dest<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[<ellipsis> ::-1]=src<block_end><def_stmt>array_overlap16 src dest<block_start><assert_stmt>src.shape<eq>dest.shape<line_sep>dest[:]=src[<ellipsis> ::-1]<block_end><class_stmt>TestArrayOverlap(TestCase)<block_start><def_stmt>check_overlap self pyfunc min_ndim have_k_argument=<false><block_start>N=4<def_stmt>vary_layouts orig<block_start><yield>orig.copy(order='C')<line_sep><yield>orig.copy(order='F')<line_sep>a=orig[::-1].copy()[::-1]<assert_stmt><not>a.flags.c_contiguous<and><not>a.flags.f_contiguous<line_sep><yield>a<block_end><def_stmt>check pyfunc cfunc pydest cdest kwargs<block_start>pyfunc(pydest pydest **kwargs)<line_sep>cfunc(cdest cdest **kwargs)<line_sep>self.assertPreciseEqual(pydest cdest)<block_end>cfunc=jit(nopython=<true>)(pyfunc)<line_sep># Check for up to 3d arrays
<for_stmt>ndim range(min_ndim 4)<block_start>shape=(N )<times>ndim<line_sep>orig=np.arange(0 N<power>ndim).reshape(shape)<line_sep># Note we cannot copy a 'A' layout array exactly (bitwise),
# so instead we call vary_layouts() twice
<for_stmt>pydest,cdest zip(vary_layouts(orig) vary_layouts(orig))<block_start><if_stmt>have_k_argument<block_start><for_stmt>k range(1 N)<block_start>check(pyfunc cfunc pydest cdest dict(k=k))<block_end><block_end><else_stmt><block_start>check(pyfunc cfunc pydest cdest {})<block_end><block_end><block_end><block_end><def_stmt>check_overlap_with_k self pyfunc min_ndim<block_start>self.check_overlap(pyfunc min_ndim=min_ndim have_k_argument=<true>)<block_end><def_stmt>test_overlap1 self<block_start>self.check_overlap_with_k(array_overlap1 min_ndim=1)<block_end><def_stmt>test_overlap2 self<block_start>self.check_overlap_with_k(array_overlap2 min_ndim=1)<block_end><def_stmt>test_overlap3 self<block_start>self.check_overlap_with_k(array_overlap3 min_ndim=2)<block_end><def_stmt>test_overlap4 self<block_start>self.check_overlap_with_k(array_overlap4 min_ndim=2)<block_end><def_stmt>test_overlap5 self<block_start>self.check_overlap_with_k(array_overlap5 min_ndim=1)<block_end><def_stmt>test_overlap6 self<block_start>self.check_overlap_with_k(array_overlap6 min_ndim=1)<block_end><def_stmt>test_overlap11 self<block_start>self.check_overlap(array_overlap11 min_ndim=1)<block_end><def_stmt>test_overlap12 self<block_start>self.check_overlap(array_overlap12 min_ndim=1)<block_end><def_stmt>test_overlap13 self<block_start>self.check_overlap(array_overlap13 min_ndim=2)<block_end><def_stmt>test_overlap14 self<block_start>self.check_overlap(array_overlap14 min_ndim=2)<block_end><def_stmt>test_overlap15 self<block_start>self.check_overlap(array_overlap15 min_ndim=1)<block_end><def_stmt>test_overlap16 self<block_start>self.check_overlap(array_overlap16 min_ndim=1)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end> |
"""Generate model benchmark source file using template.
"""<line_sep>_TEMPLATE="//src/cpp:models_benchmark.cc.template"<def_stmt>_generate_models_benchmark_src_impl ctx<block_start>ctx.actions.expand_template(template=ctx.file._template output=ctx.outputs.source_file substitutions={"{BENCHMARK_NAME}":ctx.attr.benchmark_name "{TFLITE_CPU_FILEPATH}":ctx.attr.tflite_cpu_filepath "{TFLITE_EDGETPU_FILEPATH}":ctx.attr.tflite_edgetpu_filepath } )<block_end>generate_models_benchmark_src=rule(implementation=_generate_models_benchmark_src_impl attrs={"benchmark_name":attr.string(mandatory=<true>) "tflite_cpu_filepath":attr.string(mandatory=<true>) "tflite_edgetpu_filepath":attr.string(mandatory=<true>) "_template":attr.label(default=Label(_TEMPLATE) allow_single_file=<true> ) } outputs={"source_file":"%{name}.cc"} )<line_sep> |
<import_from_stmt>bytewax Dataflow run<line_sep>flow=Dataflow()<line_sep>flow.map(<lambda>x:x<times>x)<line_sep>flow.capture()<if_stmt>__name__<eq>"__main__"<block_start><for_stmt>epoch,y sorted(run(flow enumerate(range(10))))<block_start>print(y)<block_end><block_end> |
<import_stmt>angr<line_sep>######################################
# recv
######################################
<class_stmt>recv(angr.SimProcedure)#pylint:disable=arguments-differ,unused-argument
<block_start><def_stmt>run self fd dst length flags<block_start>simfd=self.state.posix.get_fd(fd)<if_stmt>simfd<is><none><block_start><return>-1<block_end><return>simfd.read(dst length)<block_end><block_end> |
"""
"""<import_from_stmt>..unitquantity UnitQuantity<import_from_stmt>.substance mol<import_from_stmt>.volume L<line_sep>M=molar=UnitQuantity('molar' mol/L symbol='M' aliases=['Molar'])<line_sep>mM=millimolar=UnitQuantity('millimolar' molar/1000 symbol='mM')<line_sep>uM=micromolar=UnitQuantity('micromolar' mM/1000 symbol='uM' u_symbol='Β΅M')<line_sep> |
__import__("math" fromlist=[])<line_sep>__import__("xml.sax.xmlreader")<line_sep>result="subpackage_2"<class_stmt>PackageBSubpackage2Object_0<block_start><pass><block_end><def_stmt>dynamic_import_test name:str<block_start>__import__(name)<block_end> |
<import_stmt>unittest<class_stmt>TestLogoMain(unittest.TestCase)<block_start><def_stmt>test_imports self<block_start><try_stmt><block_start><import_from_stmt>dreamcoder.domains.logo.main animateSolutions dreamFromGrammar list_options outputDreams enumerateDreams visualizePrimitives Flatten LogoFeatureCNN main <block_end><except_stmt>Exception<block_start>self.fail('Unable to import logo module')<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end> |
# Copyright (c) OpenMMLab. All rights reserved.
<import_stmt>os.path<as>osp<import_stmt>pickle<import_stmt>warnings<import_from_stmt>typing Dict List Optional Sequence Union<import_stmt>mmcv<import_stmt>numpy<as>np<import_from_stmt>mmcls.datasets.builder DATASETS<import_from_stmt>typing_extensions Literal<import_from_stmt>.base BaseFewShotDataset<line_sep>TRAIN_CLASSES=[('Yorkshire terrier' 'terrier') ('space shuttle' 'craft') ('drake' 'aquatic bird') ("plane, carpenter's plane, woodworking plane" 'tool') ('mosquito net' 'protective covering, protective cover, protect') ('sax, saxophone' 'musical instrument, instrument') ('container ship, containership, container vessel' 'craft') ('patas, hussar monkey, Erythrocebus patas' 'primate') ('cheetah, chetah, Acinonyx jubatus' 'feline, felid') ('submarine, pigboat, sub, U-boat' 'craft') ('prison, prison house' 'establishment') ('can opener, tin opener' 'tool') ('syringe' 'instrument') ('odometer, hodometer, mileometer, milometer' 'instrument') ('bassoon' 'musical instrument, instrument') ('Kerry blue terrier' 'terrier') ('scale, weighing machine' 'instrument') ('baseball' 'game equipment') ('cassette player' 'electronic equipment') ('shield, buckler' 'protective covering, protective cover, protect') ('goldfinch, Carduelis carduelis' 'passerine, passeriform bird') ('cornet, horn, trumpet, trump' 'musical instrument, instrument') ('flute, transverse flute' 'musical instrument, instrument') ('stopwatch, stop watch' 'instrument') ('basketball' 'game equipment') ('brassiere, bra, bandeau' 'garment') ('bulbul' 'passerine, passeriform bird') ('steel drum' 'musical instrument, instrument') ('bolo tie, bolo, bola tie, bola' 'garment') ('planetarium' 'building, edifice') ('stethoscope' 'instrument') ('proboscis monkey, Nasalis larvatus' 'primate') ('guillotine' 'instrument') ('Scottish deerhound, deerhound' 'hound, hound dog') ('ocarina, sweet potato' 'musical instrument, instrument') ('Border terrier' 'terrier') ('capuchin, ringtail, Cebus capucinus' 'primate') ('magnetic compass' 'instrument') ('alligator lizard' 'saurian') ('baboon' 'primate') ('sundial' 'instrument') ('gibbon, Hylobates lar' 'primate') ('grand piano, grand' 'musical instrument, instrument') ('Arabian camel, dromedary, Camelus dromedarius' 'ungulate, hoofed mammal') ('basset, basset hound' 'hound, hound dog') ('corkscrew, bottle screw' 'tool') ('miniskirt, mini' 'garment') ('missile' 'instrument') ('hatchet' 'tool') ('acoustic guitar' 'musical instrument, instrument') ('impala, Aepyceros melampus' 'ungulate, hoofed mammal') ('parking meter' 'instrument') ('greenhouse, nursery, glasshouse' 'building, edifice') ('home theater, home theatre' 'building, edifice') ('hartebeest' 'ungulate, hoofed mammal') ('hippopotamus, hippo, river horse, Hippopotamus amphibius' 'ungulate, hoofed mammal') ('warplane, military plane' 'craft') ('albatross, mollymawk' 'aquatic bird') ('umbrella' 'protective covering, protective cover, protect') ('shoe shop, shoe-shop, shoe store' 'establishment') ('suit, suit of clothes' 'garment') ('pickelhaube' 'protective covering, protective cover, protect') ('soccer ball' 'game equipment') ('yawl' 'craft') ('screwdriver' 'tool') ('Madagascar cat, ring-tailed lemur, Lemur catta' 'primate') ('garter snake, grass snake' 'snake, serpent, ophidian') ('bustard' 'aquatic bird') ('tabby, tabby cat' 'feline, felid') ('airliner' 'craft') ('tobacco shop, tobacconist shop, tobacconist' 'establishment') ('Italian greyhound' 'hound, hound dog') ('projector' 'instrument') ('bittern' 'aquatic bird') ('rifle' 'instrument') ('pay-phone, pay-station' 'electronic equipment') ('house finch, linnet, Carpodacus mexicanus' 'passerine, passeriform bird') ('monastery' 'building, edifice') ('lens cap, lens cover' 'protective covering, protective cover, protect') ('maillot, tank suit' 'garment') ('canoe' 'craft') ('letter opener, paper knife, paperknife' 'tool') ('nail' 'restraint, constraint') ('guenon, guenon monkey' 'primate') ('CD player' 'electronic equipment') ('safety pin' 'restraint, constraint') ('harp' 'musical instrument, instrument') ('disk brake, disc brake' 'restraint, constraint') ('otterhound, otter hound' 'hound, hound dog') ('green mamba' 'snake, serpent, ophidian') ('violin, fiddle' 'musical instrument, instrument') ('American coot, marsh hen, mud hen, water hen, Fulica americana' 'aquatic bird') ('ram, tup' 'ungulate, hoofed mammal') ('jay' 'passerine, passeriform bird') ('trench coat' 'garment') ('Indian cobra, Naja naja' 'snake, serpent, ophidian') ('projectile, missile' 'instrument') ('schooner' 'craft') ('magpie' 'passerine, passeriform bird') ('Norwich terrier' 'terrier') ('cairn, cairn terrier' 'terrier') ('crossword puzzle, crossword' 'game equipment') ('snow leopard, ounce, Panthera uncia' 'feline, felid') ('gong, tam-tam' 'musical instrument, instrument') ('library' 'building, edifice') ('swimming trunks, bathing trunks' 'garment') ('Staffordshire bullterrier, Staffordshire bull terrier' 'terrier') ('Lakeland terrier' 'terrier') ('black stork, Ciconia nigra' 'aquatic bird') ('king penguin, Aptenodytes patagonica' 'aquatic bird') ('water ouzel, dipper' 'passerine, passeriform bird') ('macaque' 'primate') ('lynx, catamount' 'feline, felid') ('ping-pong ball' 'game equipment') ('standard schnauzer' 'terrier') ('Australian terrier' 'terrier') ('stupa, tope' 'building, edifice') ('white stork, Ciconia ciconia' 'aquatic bird') ('king snake, kingsnake' 'snake, serpent, ophidian') ('Airedale, Airedale terrier' 'terrier') ('banjo' 'musical instrument, instrument') ('Windsor tie' 'garment') ('abaya' 'garment') ('stole' 'garment') ('vine snake' 'snake, serpent, ophidian') ('Bedlington terrier' 'terrier') ('langur' 'primate') ('catamaran' 'craft') ('sarong' 'garment') ('spoonbill' 'aquatic bird') ('boa constrictor, Constrictor constrictor' 'snake, serpent, ophidian') ('ruddy turnstone, Arenaria interpres' 'aquatic bird') ('hognose snake, puff adder, sand viper' 'snake, serpent, ophidian') ('American chameleon, anole, Anolis carolinensis' 'saurian') ('rugby ball' 'game equipment') ('black swan, Cygnus atratus' 'aquatic bird') ('frilled lizard, Chlamydosaurus kingi' 'saurian') ('oscilloscope, scope, cathode-ray oscilloscope, CRO' 'electronic equipment') ('ski mask' 'protective covering, protective cover, protect') ('marmoset' 'primate') ('Komodo dragon, Komodo lizard, dragon lizard, giant lizard, '<concat>'Varanus komodoensis' 'saurian') ('accordion, piano accordion, squeeze box' 'musical instrument, instrument') ('horned viper, cerastes, sand viper, horned asp, Cerastes cornutus' 'snake, serpent, ophidian') ('bookshop, bookstore, bookstall' 'establishment') ('Boston bull, Boston terrier' 'terrier') ('crane' 'aquatic bird') ('junco, snowbird' 'passerine, passeriform bird') ('silky terrier, Sydney silky' 'terrier') ('Egyptian cat' 'feline, felid') ('Irish terrier' 'terrier') ('leopard, Panthera pardus' 'feline, felid') ('sea snake' 'snake, serpent, ophidian') ('hog, pig, grunter, squealer, Sus scrofa' 'ungulate, hoofed mammal') ('colobus, colobus monkey' 'primate') ('chickadee' 'passerine, passeriform bird') ('Scotch terrier, Scottish terrier, Scottie' 'terrier') ('digital watch' 'instrument') ('analog clock' 'instrument') ('zebra' 'ungulate, hoofed mammal') ('American Staffordshire terrier, Staffordshire terrier, '<concat>'American pit bull terrier, pit bull terrier' 'terrier') ('European gallinule, Porphyrio porphyrio' 'aquatic bird') ('lampshade, lamp shade' 'protective covering, protective cover, protect') ('holster' 'protective covering, protective cover, protect') ('jaguar, panther, Panthera onca, Felis onca' 'feline, felid') ('cleaver, meat cleaver, chopper' 'tool') ('brambling, Fringilla montifringilla' 'passerine, passeriform bird') ('orangutan, orang, orangutang, Pongo pygmaeus' 'primate') ('combination lock' 'restraint, constraint') ('tile roof' 'protective covering, protective cover, protect') ('borzoi, Russian wolfhound' 'hound, hound dog') ('water snake' 'snake, serpent, ophidian') ('knot' 'restraint, constraint') ('window shade' 'protective covering, protective cover, protect') ('mosque' 'building, edifice') ('Walker hound, Walker foxhound' 'hound, hound dog') ('cardigan' 'garment') ('warthog' 'ungulate, hoofed mammal') ('whiptail, whiptail lizard' 'saurian') ('plow, plough' 'tool') ('bluetick' 'hound, hound dog') ('poncho' 'garment') ('shovel' 'tool') ('sidewinder, horned rattlesnake, Crotalus cerastes' 'snake, serpent, ophidian') ('croquet ball' 'game equipment') ('sorrel' 'ungulate, hoofed mammal') ('airship, dirigible' 'craft') ('goose' 'aquatic bird') ('church, church building' 'building, edifice') ('titi, titi monkey' 'primate') ('butcher shop, meat market' 'establishment') ('diamondback, diamondback rattlesnake, Crotalus adamanteus' 'snake, serpent, ophidian') ('common iguana, iguana, Iguana iguana' 'saurian') ('Saluki, gazelle hound' 'hound, hound dog') ('monitor' 'electronic equipment') ('sunglasses, dark glasses, shades' 'instrument') ('flamingo' 'aquatic bird') ('seat belt, seatbelt' 'restraint, constraint') ('Persian cat' 'feline, felid') ('gorilla, Gorilla gorilla' 'primate') ('banded gecko' 'saurian') ('thatch, thatched roof' 'protective covering, protective cover, protect') ('beagle' 'hound, hound dog') ('limpkin, Aramus pictus' 'aquatic bird') ('jigsaw puzzle' 'game equipment') ('rule, ruler' 'instrument') ('hammer' 'tool') ('cello, violoncello' 'musical instrument, instrument') ('lab coat, laboratory coat' 'garment') ('indri, indris, Indri indri, Indri brevicaudatus' 'primate') ('vault' 'protective covering, protective cover, protect') ('cellular telephone, cellular phone, cellphone, cell, mobile phone' 'electronic equipment') ('whippet' 'hound, hound dog') ('siamang, Hylobates syndactylus, Symphalangus syndactylus' 'primate') ("loupe, jeweler's loupe" 'instrument') ('modem' 'electronic equipment') ('lifeboat' 'craft') ('dial telephone, dial phone' 'electronic equipment') ('cougar, puma, catamount, mountain lion, painter, panther, '<concat>'Felis concolor' 'feline, felid') ('thimble' 'protective covering, protective cover, protect') ('ibex, Capra ibex' 'ungulate, hoofed mammal') ('lawn mower, mower' 'tool') ('bell cote, bell cot' 'protective covering, protective cover, protect') ('chain mail, ring mail, mail, chain armor, chain armour, ring armor, '<concat>'ring armour' 'protective covering, protective cover, protect') ('hair slide' 'restraint, constraint') ('apiary, bee house' 'building, edifice') ('harmonica, mouth organ, harp, mouth harp' 'musical instrument, instrument') ('green snake, grass snake' 'snake, serpent, ophidian') ('howler monkey, howler' 'primate') ('digital clock' 'instrument') ('restaurant, eating house, eating place, eatery' 'building, edifice') ('miniature schnauzer' 'terrier') ('panpipe, pandean pipe, syrinx' 'musical instrument, instrument') ('pirate, pirate ship' 'craft') ('window screen' 'protective covering, protective cover, protect') ('binoculars, field glasses, opera glasses' 'instrument') ('Afghan hound, Afghan' 'hound, hound dog') ('cinema, movie theater, movie theatre, movie house, picture palace' 'building, edifice') ('liner, ocean liner' 'craft') ('ringneck snake, ring-necked snake, ring snake' 'snake, serpent, ophidian') ('redshank, Tringa totanus' 'aquatic bird') ('Siamese cat, Siamese' 'feline, felid') ('thunder snake, worm snake, Carphophis amoenus' 'snake, serpent, ophidian') ('boathouse' 'building, edifice') ('jersey, T-shirt, tee shirt' 'garment') ('soft-coated wheaten terrier' 'terrier') ('scabbard' 'protective covering, protective cover, protect') ('muzzle' 'restraint, constraint') ('Ibizan hound, Ibizan Podenco' 'hound, hound dog') ('tennis ball' 'game equipment') ('padlock' 'restraint, constraint') ('kimono' 'garment') ('redbone' 'hound, hound dog') ('wild boar, boar, Sus scrofa' 'ungulate, hoofed mammal') ('dowitcher' 'aquatic bird') ('oboe, hautboy, hautbois' 'musical instrument, instrument') ('electric guitar' 'musical instrument, instrument') ('trimaran' 'craft') ('barometer' 'instrument') ('llama' 'ungulate, hoofed mammal') ('robin, American robin, Turdus migratorius' 'passerine, passeriform bird') ('maraca' 'musical instrument, instrument') ('feather boa, boa' 'garment') ('<NAME>, <NAME> terrier' 'terrier') ('Lhasa, Lhasa apso' 'terrier') ('bow' 'instrument') ('punching bag, punch bag, punching ball, punchball' 'game equipment') ('volleyball' 'game equipment') ('Norfolk terrier' 'terrier') ('Gila monster, Heloderma suspectum' 'saurian') ('fire screen, fireguard' 'protective covering, protective cover, protect') ('hourglass' 'instrument') ('chimpanzee, chimp, Pan troglodytes' 'primate') ('birdhouse' 'protective covering, protective cover, protect') ('Sealyham terrier, Sealyham' 'terrier') ('Tibetan terrier, chrysanthemum dog' 'terrier') ('palace' 'building, edifice') ('wreck' 'craft') ('overskirt' 'garment') ('pelican' 'aquatic bird') ('French horn, horn' 'musical instrument, instrument') ('tiger cat' 'feline, felid') ('barbershop' 'establishment') ('revolver, six-gun, six-shooter' 'instrument') ('Irish wolfhound' 'hound, hound dog') ('lion, king of beasts, Panthera leo' 'feline, felid') ('fur coat' 'garment') ('ox' 'ungulate, hoofed mammal') ('cuirass' 'protective covering, protective cover, protect') ('grocery store, grocery, food market, market' 'establishment') ('hoopskirt, crinoline' 'garment') ('spider monkey, Ateles geoffroyi' 'primate') ('tiger, Panthera tigris' 'feline, felid') ('bloodhound, sleuthhound' 'hound, hound dog') ('red-backed sandpiper, dunlin, Erolia alpina' 'aquatic bird') ('drum, membranophone, tympan' 'musical instrument, instrument') ('radio telescope, radio reflector' 'instrument') ('West Highland white terrier' 'terrier') ('bow tie, bow-tie, bowtie' 'garment') ('golf ball' 'game equipment') ('barn' 'building, edifice') ('binder, ring-binder' 'protective covering, protective cover, protect') ('English foxhound' 'hound, hound dog') ('bison' 'ungulate, hoofed mammal') ('screw' 'restraint, constraint') ('assault rifle, assault gun' 'instrument') ('diaper, nappy, napkin' 'garment') ('bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, '<concat>'Rocky Mountain sheep, Ovis canadensis' 'ungulate, hoofed mammal') ('Weimaraner' 'hound, hound dog') ('computer keyboard, keypad' 'electronic equipment') ('black-and-tan coonhound' 'hound, hound dog') ('little blue heron, Egretta caerulea' 'aquatic bird') ('breastplate, aegis, egis' 'protective covering, protective cover, protect') ('gasmask, respirator, gas helmet' 'protective covering, protective cover, protect') ('aircraft carrier, carrier, flattop, attack aircraft carrier' 'craft') ('iPod' 'electronic equipment') ('organ, pipe organ' 'musical instrument, instrument') ('wall clock' 'instrument') ('rock python, rock snake, Python sebae' 'snake, serpent, ophidian') ('squirrel monkey, Saimiri sciureus' 'primate') ('bikini, two-piece' 'garment') ('water buffalo, water ox, Asiatic buffalo, Bubalus bubalis' 'ungulate, hoofed mammal') ('upright, upright piano' 'musical instrument, instrument') ('chime, bell, gong' 'musical instrument, instrument') ('confectionery, confectionary, candy store' 'establishment') ('indigo bunting, indigo finch, indigo bird, Passerina cyanea' 'passerine, passeriform bird') ('green lizard, Lacerta viridis' 'saurian') ('Norwegian elkhound, elkhound' 'hound, hound dog') ('dome' 'protective covering, protective cover, protect') ('buckle' 'restraint, constraint') ('giant schnauzer' 'terrier') ('jean, blue jean, denim' 'garment') ('wire-haired fox terrier' 'terrier') ('African chameleon, Chamaeleo chamaeleon' 'saurian') ('trombone' 'musical instrument, instrument') ('oystercatcher, oyster catcher' 'aquatic bird') ('sweatshirt' 'garment') ('American egret, great white heron, Egretta albus' 'aquatic bird') ('marimba, xylophone' 'musical instrument, instrument') ('gazelle' 'ungulate, hoofed mammal') ('red-breasted merganser, Mergus serrator' 'aquatic bird') ('tape player' 'electronic equipment') ('speedboat' 'craft') ('gondola' 'craft') ('night snake, Hypsiglena torquata' 'snake, serpent, ophidian') ('cannon' 'instrument') ("plunger, plumber's helper" 'tool') ('balloon' 'craft') ('toyshop' 'establishment') ('agama' 'saurian') ('fireboat' 'craft') ('bakery, bakeshop, bakehouse' 'establishment')]<line_sep>VAL_CLASSES=[('cab, hack, taxi, taxicab' 'motor vehicle, automotive vehicle') ('jeep, landrover' 'motor vehicle, automotive vehicle') ('English setter' 'sporting dog, gun dog') ('flat-coated retriever' 'sporting dog, gun dog') ('bassinet' 'furnishing') ('sports car, sport car' 'motor vehicle, automotive vehicle') ('golfcart, golf cart' 'motor vehicle, automotive vehicle') ('clumber, clumber spaniel' 'sporting dog, gun dog') ('puck, hockey puck' 'mechanism') ('reel' 'mechanism') ('Welsh springer spaniel' 'sporting dog, gun dog') ('car wheel' 'mechanism') ('wardrobe, closet, press' 'furnishing') ('go-kart' 'motor vehicle, automotive vehicle') ('switch, electric switch, electrical switch' 'mechanism') ('crib, cot' 'furnishing') ('laptop, laptop computer' 'machine') ('thresher, thrasher, threshing machine' 'machine') ('web site, website, internet site, site' 'machine') ('English springer, English springer spaniel' 'sporting dog, gun dog') ('iron, smoothing iron' 'durables, durable goods, consumer durables') ('<NAME>' 'sporting dog, gun dog') ('Labrador retriever' 'sporting dog, gun dog') ('<NAME>' 'sporting dog, gun dog') ('amphibian, amphibious vehicle' 'motor vehicle, automotive vehicle') ('file, file cabinet, filing cabinet' 'furnishing') ('harvester, reaper' 'machine') ('convertible' 'motor vehicle, automotive vehicle') ('paddlewheel, paddle wheel' 'mechanism') ('microwave, microwave oven' 'durables, durable goods, consumer durables') ('swing' 'mechanism') ('chiffonier, commode' 'furnishing') ('desktop computer' 'machine') ('gas pump, gasoline pump, petrol pump, island dispenser' 'mechanism') ('beach wagon, station wagon, wagon, estate car, beach waggon, station '<concat>'waggon, waggon' 'motor vehicle, automotive vehicle') ('carousel, carrousel, merry-go-round, roundabout, whirligig' 'mechanism') ("potter's wheel" 'mechanism') ('folding chair' 'furnishing') ('fire engine, fire truck' 'motor vehicle, automotive vehicle') ('slide rule, slipstick' 'machine') ('vizsla, Hungarian pointer' 'sporting dog, gun dog') ('waffle iron' 'durables, durable goods, consumer durables') ('trailer truck, tractor trailer, trucking rig, rig, articulated lorry, '<concat>'semi' 'motor vehicle, automotive vehicle') ('toilet seat' 'furnishing') ('medicine chest, medicine cabinet' 'furnishing') ('<NAME>' 'sporting dog, gun dog') ('Chesapeake Bay retriever' 'sporting dog, gun dog') ('cash machine, cash dispenser, automated teller machine, automatic '<concat>'teller machine, automated teller, automatic teller, ATM' 'machine') ('moped' 'motor vehicle, automotive vehicle') ('Model T' 'motor vehicle, automotive vehicle') ('bookcase' 'furnishing') ('ambulance' 'motor vehicle, automotive vehicle') ('German short-haired pointer' 'sporting dog, gun dog') ('dining table, board' 'furnishing') ('minivan' 'motor vehicle, automotive vehicle') ('police van, police wagon, paddy wagon, patrol wagon, wagon, '<concat>'black Maria' 'motor vehicle, automotive vehicle') ('entertainment center' 'furnishing') ('throne' 'furnishing') ('desk' 'furnishing') ('notebook, notebook computer' 'machine') ('snowplow, snowplough' 'motor vehicle, automotive vehicle') ('cradle' 'furnishing') ('abacus' 'machine') ('hand-held computer, hand-held microcomputer' 'machine') ('Dutch oven' 'durables, durable goods, consumer durables') ('toaster' 'durables, durable goods, consumer durables') ('barber chair' 'furnishing') ('vending machine' 'machine') ('four-poster' 'furnishing') ('rotisserie' 'durables, durable goods, consumer durables') ('hook, claw' 'mechanism') ('vacuum, vacuum cleaner' 'durables, durable goods, consumer durables') ('pickup, pickup truck' 'motor vehicle, automotive vehicle') ('table lamp' 'furnishing') ('rocking chair, rocker' 'furnishing') ('prayer rug, prayer mat' 'furnishing') ('moving van' 'motor vehicle, automotive vehicle') ('studio couch, day bed' 'furnishing') ('racer, race car, racing car' 'motor vehicle, automotive vehicle') ('park bench' 'furnishing') ('Irish setter, red setter' 'sporting dog, gun dog') ('refrigerator, icebox' 'durables, durable goods, consumer durables') ('china cabinet, china closet' 'furnishing') ('cocker spaniel, English cocker spaniel, cocker' 'sporting dog, gun dog') ('radiator' 'mechanism') ('Sussex spaniel' 'sporting dog, gun dog') ('hand blower, blow dryer, blow drier, hair dryer, hair drier' 'durables, durable goods, consumer durables') ('slot, one-armed bandit' 'machine') ('golden retriever' 'sporting dog, gun dog') ('curly-coated retriever' 'sporting dog, gun dog') ('limousine, limo' 'motor vehicle, automotive vehicle') ('washer, automatic washer, washing machine' 'durables, durable goods, consumer durables') ('garbage truck, dustcart' 'motor vehicle, automotive vehicle') ('dishwasher, dish washer, dishwashing machine' 'durables, durable goods, consumer durables') ('pinwheel' 'mechanism') ('espresso maker' 'durables, durable goods, consumer durables') ('tow truck, tow car, wrecker' 'motor vehicle, automotive vehicle')]<line_sep>TEST_CLASSES=[('Siberian husky' 'working dog') ('dung beetle' 'insect') ('jackfruit, jak, jack' 'solid') ('miniature pinscher' 'working dog') ('tiger shark, Galeocerdo cuvieri' 'aquatic vertebrate') ('weevil' 'insect') ('goldfish, Carassius auratus' 'aquatic vertebrate') ('schipperke' 'working dog') ('Tibetan mastiff' 'working dog') ('orange' 'solid') ('whiskey jug' 'vessel') ('hammerhead, hammerhead shark' 'aquatic vertebrate') ('bull mastiff' 'working dog') ('eggnog' 'substance') ('bee' 'insect') ('tench, Tinca tinca' 'aquatic vertebrate') ('chocolate sauce, chocolate syrup' 'substance') ("dragonfly, darning needle, devil's darning needle, sewing needle, "<concat>'snake feeder, snake doctor, mosquito hawk, skeeter hawk' 'insect') ('zucchini, courgette' 'solid') ('kelpie' 'working dog') ('stone wall' 'obstruction, obstructor, obstructer, impedimen') ('butternut squash' 'solid') ('mushroom' 'solid') ('Old English sheepdog, bobtail' 'working dog') ('dam, dike, dyke' 'obstruction, obstructor, obstructer, impedimen') ('picket fence, paling' 'obstruction, obstructor, obstructer, impedimen') ('espresso' 'substance') ('beer bottle' 'vessel') ('plate' 'substance') ('dough' 'substance') ('sandbar, sand bar' 'geological formation, formation') ('boxer' 'working dog') ('bathtub, bathing tub, bath, tub' 'vessel') ('beaker' 'vessel') ('bucket, pail' 'vessel') ('Border collie' 'working dog') ('sturgeon' 'aquatic vertebrate') ('worm fence, snake fence, snake-rail fence, Virginia fence' 'obstruction, obstructor, obstructer, impedimen') ('seashore, coast, seacoast, sea-coast' 'geological formation, formation') ('long-horned beetle, longicorn, longicorn beetle' 'insect') ('turnstile' 'obstruction, obstructor, obstructer, impedimen') ('groenendael' 'working dog') ('vase' 'vessel') ('teapot' 'vessel') ('water tower' 'vessel') ('strawberry' 'solid') ('burrito' 'substance') ('cauliflower' 'solid') ('volcano' 'geological formation, formation') ('valley, vale' 'geological formation, formation') ('head cabbage' 'solid') ('tub, vat' 'vessel') ('lacewing, lacewing fly' 'insect') ('coral reef' 'geological formation, formation') ('hot pot, hotpot' 'substance') ('custard apple' 'solid') ('monarch, monarch butterfly, milkweed butterfly, Danaus plexippus' 'insect') ('cricket' 'insect') ('pill bottle' 'vessel') ('walking stick, walkingstick, stick insect' 'insect') ('promontory, headland, head, foreland' 'geological formation, formation') ('malinois' 'working dog') ('pizza, pizza pie' 'substance') ('malamute, malemute, Alaskan malamute' 'working dog') ('kuvasz' 'working dog') ('trifle' 'substance') ('fig' 'solid') ('komondor' 'working dog') ('ant, emmet, pismire' 'insect') ('electric ray, crampfish, numbfish, torpedo' 'aquatic vertebrate') ('<NAME>' 'solid') ('cockroach, roach' 'insect') ('stingray' 'aquatic vertebrate') ('red wine' 'substance') ('<NAME>, <NAME>' 'working dog') ('ice lolly, lolly, lollipop, popsicle' 'substance') ('bell pepper' 'solid') ('cup' 'substance') ('pomegranate' 'solid') ('Appenzeller' 'working dog') ('hay' 'substance') ('EntleBucher' 'working dog') ('sulphur butterfly, sulfur butterfly' 'insect') ('mantis, mantid' 'insect') ('Bernese mountain dog' 'working dog') ('banana' 'solid') ('water jug' 'vessel') ('cicada, cicala' 'insect') ('barracouta, snoek' 'aquatic vertebrate') ('washbasin, handbasin, washbowl, lavabo, wash-hand basin' 'vessel') ('wine bottle' 'vessel') ('Rottweiler' 'working dog') ('briard' 'working dog') ('puffer, pufferfish, blowfish, globefish' 'aquatic vertebrate') ('ground beetle, carabid beetle' 'insect') ('Bouvier des Flandres, Bouviers des Flandres' 'working dog') ('chainlink fence' 'obstruction, obstructor, obstructer, impedimen') ('damselfly' 'insect') ('grasshopper, hopper' 'insect') ('carbonara' 'substance') ('German shepherd, German shepherd dog, German police dog, alsatian' 'working dog') ('guacamole' 'substance') ('leaf beetle, chrysomelid' 'insect') ('caldron, cauldron' 'vessel') ('fly' 'insect') ('bannister, banister, balustrade, balusters, handrail' 'obstruction, obstructor, obstructer, impedimen') ('spaghetti squash' 'solid') ('coffee mug' 'vessel') ('gar, garfish, garpike, billfish, Lepisosteus osseus' 'aquatic vertebrate') ('barrel, cask' 'vessel') ('eel' 'aquatic vertebrate') ('rain barrel' 'vessel') ('coho, cohoe, coho salmon, blue jack, silver salmon, '<concat>'Oncorhynchus kisutch' 'aquatic vertebrate') ('water bottle' 'vessel') ('menu' 'substance') ('tiger beetle' 'insect') ('Great Dane' 'working dog') ('rock beauty, Holocanthus tricolor' 'aquatic vertebrate') ('anemone fish' 'aquatic vertebrate') ('mortar' 'vessel') ('Eskimo dog, husky' 'working dog') ('affenpinscher, monkey pinscher, monkey dog' 'working dog') ('breakwater, groin, groyne, mole, bulwark, seawall, jetty' 'obstruction, obstructor, obstructer, impedimen') ('artichoke, globe artichoke' 'solid') ('broccoli' 'solid') ('French bulldog' 'working dog') ('coffeepot' 'vessel') ('cliff, drop, drop-off' 'geological formation, formation') ('ladle' 'vessel') ('sliding door' 'obstruction, obstructor, obstructer, impedimen') ('leafhopper' 'insect') ('collie' 'working dog') ('Doberman, <NAME>' 'working dog') ('pitcher, ewer' 'vessel') ('admiral' 'insect') ('cabbage butterfly' 'insect') ('geyser' 'geological formation, formation') ('cheeseburger' 'substance') ('grille, radiator grille' 'obstruction, obstructor, obstructer, impedimen') ('ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle' 'insect') ('great white shark, white shark, man-eater, man-eating shark, '<concat>'Carcharodon carcharias' 'aquatic vertebrate') ('pineapple, ananas' 'solid') ('cardoon' 'solid') ('pop bottle, soda bottle' 'vessel') ('lionfish' 'aquatic vertebrate') ('cucumber, cuke' 'solid') ('face powder' 'substance') ('Shetland sheepdog, Shetland sheep dog, Shetland' 'working dog') ('ringlet, ringlet butterfly' 'insect') ('Greater Swiss Mountain dog' 'working dog') ('alp' 'geological formation, formation') ('consomme' 'substance') ('potpie' 'substance') ('acorn squash' 'solid') ('ice cream, icecream' 'substance') ('lakeside, lakeshore' 'geological formation, formation') ('hotdog, hot dog, red hot' 'substance') ('rhinoceros beetle' 'insect') ('lycaenid, lycaenid butterfly' 'insect') ('lemon' 'solid')]<line_sep>@DATASETS.register_module()<class_stmt>TieredImageNetDataset(BaseFewShotDataset)<block_start>"""TieredImageNet dataset for few shot classification.
Args:
subset (str| list[str]): The classes of whole dataset are split into
three disjoint subset: train, val and test. If subset is a string,
only one subset data will be loaded. If subset is a list of
string, then all data of subset in list will be loaded.
Options: ['train', 'val', 'test']. Default: 'train'.
"""<line_sep>resource='https://github.com/renmengye/few-shot-ssl-public'<line_sep>TRAIN_CLASSES=TRAIN_CLASSES<line_sep>VAL_CLASSES=VAL_CLASSES<line_sep>TEST_CLASSES=TEST_CLASSES<def_stmt>__init__ self subset:Literal['train' 'test' 'val']='train' *args **kwargs<block_start><if_stmt>isinstance(subset str)<block_start>subset=[subset]<block_end><for_stmt>subset_ subset<block_start><assert_stmt>subset_<in>['train' 'test' 'val']<block_end>self.subset=subset<line_sep>self.GENERAL_CLASSES=self.get_general_classes()<line_sep>super().__init__(*args **kwargs)<block_end><def_stmt>get_classes self classes:Optional[Union[Sequence[str] str]]=<none><arrow>Sequence[str]<block_start>"""Get class names of current dataset.
Args:
classes (Sequence[str] | str | None): Three types of input
will correspond to different processing logics:
- If `classes` is a tuple or list, it will override the
CLASSES predefined in the dataset.
- If `classes` is None, we directly use pre-defined CLASSES
will be used by the dataset.
- If `classes` is a string, it is the path of a classes file
that contains the name of all classes. Each line of the file
contains a single class name.
Returns:
tuple[str] or list[str]: Names of categories of the dataset.
"""<if_stmt>classes<is><none><block_start>class_names=[]<for_stmt>subset_ self.subset<block_start><if_stmt>subset_<eq>'train'<block_start>class_names<augadd>[i[0]<for>i self.TRAIN_CLASSES]<block_end><elif_stmt>subset_<eq>'val'<block_start>class_names<augadd>[i[0]<for>i self.VAL_CLASSES]<block_end><elif_stmt>subset_<eq>'test'<block_start>class_names<augadd>[i[0]<for>i self.TEST_CLASSES]<block_end><else_stmt><block_start><raise>ValueError(f'invalid subset {subset_} only '<concat>f'support train, val or test.')<block_end><block_end><block_end><elif_stmt>isinstance(classes str)# take it as a file path
<block_start>class_names=mmcv.list_from_file(classes)<block_end><elif_stmt>isinstance(classes (tuple list))<block_start>class_names=classes<block_end><else_stmt><block_start><raise>ValueError(f'Unsupported type {type(classes)} of classes.')<block_end><return>class_names<block_end><def_stmt>get_general_classes self<arrow>List[str]<block_start>"""Get general classes of each classes."""<line_sep>general_classes=[]<for_stmt>subset_ self.subset<block_start><if_stmt>subset_<eq>'train'<block_start>general_classes<augadd>[i[1]<for>i self.TRAIN_CLASSES]<block_end><elif_stmt>subset_<eq>'val'<block_start>general_classes<augadd>[i[1]<for>i self.VAL_CLASSES]<block_end><elif_stmt>subset_<eq>'test'<block_start>general_classes<augadd>[i[1]<for>i self.TEST_CLASSES]<block_end><else_stmt><block_start><raise>ValueError(f'invalid subset {subset_} only '<concat>f'support train, val or test.')<block_end><block_end><return>general_classes<block_end><def_stmt>load_annotations self<arrow>List[Dict]<block_start>"""Load annotation according to the classes subset."""<line_sep>data_infos=[]<for_stmt>subset_ self.subset<block_start>labels_file=osp.join(self.data_prefix f'{subset_}_labels.pkl')<line_sep>img_bytes_file=osp.join(self.data_prefix f'{subset_}_images_png.pkl')<assert_stmt>osp.exists(img_bytes_file)<and>osp.exists(labels_file) f'Please download ann_file through {self.resource}.'<line_sep>data_infos=[]<with_stmt>open(labels_file 'rb')<as>labels open(img_bytes_file 'rb')<as>img_bytes<block_start>labels=pickle.load(labels)<line_sep>img_bytes=pickle.load(img_bytes)<line_sep>label_specific=labels['label_specific']<line_sep>label_general=labels['label_general']<line_sep>class_specific=labels['label_specific_str']<line_sep>class_general=labels['label_general_str']<line_sep>unzip_file_path=osp.join(self.data_prefix subset_)<line_sep>is_unzip_file=osp.exists(unzip_file_path)<if_stmt><not>is_unzip_file<block_start>msg=('Please use the provided script '<concat>'tools/classification/data/unzip_tiered_imagenet.py'<concat>'to unzip pickle file. Otherwise the whole pickle '<concat>'file may cost heavy memory usage when the model '<concat>'is trained with distributed parallel.')<line_sep>warnings.warn(msg)<block_end><for_stmt>i range(len(img_bytes))<block_start>class_specific_name=class_specific[label_specific[i]]<line_sep>class_general_name=class_general[label_general[i]]<line_sep>gt_label=self.class_to_idx[class_specific_name]<assert_stmt>class_general_name<eq>self.GENERAL_CLASSES[gt_label]<line_sep>filename=osp.join(subset_ f'{subset_}_image_{i}.byte')<line_sep>info={'img_prefix':self.data_prefix 'img_info':{'filename':filename} 'gt_label':np.array(gt_label dtype=np.int64) }<line_sep># if the whole pickle file isn't unzipped,
# image bytes of will be put into data_info
<if_stmt><not>is_unzip_file<block_start>info['img_bytes']=img_bytes[i]<block_end>data_infos.append(info)<block_end><block_end><block_end><return>data_infos<block_end><block_end> |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
<import_stmt>os<import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<import_stmt>six<import_from_stmt>tensorpack.utils logger<import_from_stmt>tensorpack.tfutils.common get_op_tensor_name get_global_step_var <import_from_stmt>tensorpack.tfutils.varmanip SessionUpdate<import_from_stmt>tensorpack.tfutils.sessinit SessionInit SaverRestore CheckpointReaderAdapter <line_sep>__all__=['SaverRestoreSizeRelaxed' 'read_parameter_val']<class_stmt>SaverRestoreSizeRelaxed(SaverRestore)<block_start>""" Same as :class:`SaverRestore`, but has more relaxed constraints.
It allows loading variable of difference sizes, but of the same number of dimensions.
The lower of value of each dim is the chosen dimension value.
The first chunk of the each dim of the value is loaded into the variable.
"""<def_stmt>_run_init self sess<block_start>logger.info("Restoring checkpoint with size relaxation from {} ...".format(self.path))<def_stmt>f reader name v<block_start>val=reader.get_tensor(name)<line_sep>val_shape=list(val.shape)<line_sep>var_shape=v.get_shape().as_list()<if_stmt>var_shape<ne>val_shape<block_start>n_dims=len(val_shape)<assert_stmt>len(var_shape)<eq>n_dims "Size Relaxation requires the variable match in number of dimensions"<line_sep>slices=[]<line_sep>pad_params=[]<line_sep>logger.info("Loading variable {} with var_shape {} and val_shape {}".format(name var_shape val_shape))<for_stmt>var_s,val_s zip(var_shape val_shape)<block_start><if_stmt>var_s<g>val_s<block_start>pad_params.append([0 var_s-val_s])<block_end><else_stmt><block_start>pad_params.append([0 0])<block_end>slices.append(slice(0 var_s))<block_end>val=np.pad(val pad_params 'constant')[slices]<block_end>SessionUpdate.load_value_to_var(v val)<block_end><with_stmt>sess.as_default()<block_start>self._match_vars(f)<block_end><block_end><block_end><class_stmt>AssignGlobalStep(SessionInit)<block_start><def_stmt>__init__ self global_step_val<block_start>self.global_step_val=global_step_val<line_sep>self.assign_op=<none><block_end><def_stmt>_setup_graph self<block_start>global_step=get_global_step_var()<line_sep>self.assign_op=global_step.assign(self.global_step_val)<block_end><def_stmt>_run_init self sess<block_start>sess.run(self.assign_op)<block_end><block_end><def_stmt>read_parameter_val model_dir l_names<block_start>model_path=tf.train.latest_checkpoint(model_dir)<line_sep>reader=tf.train.NewCheckpointReader(model_path)<line_sep>reader=CheckpointReaderAdapter(reader)# use an adapter to standardize the name
<return>[reader.get_tensor(var_name)<for>var_name l_names]<block_end> |
<import_stmt>ctypes<import_stmt>os<import_stmt>subprocess<import_stmt>sys<if_stmt>__name__<eq>"__main__"<block_start>filename=os.path.abspath(sys.argv[1])<line_sep>funcnames=sys.argv[2:]<if_stmt><not>funcnames<block_start>print("Usage: python test_optimize.py FILENAME FUNCNAME+")<line_sep>sys.exit(1)<block_end>os.chdir(os.path.join(os.path.dirname(__file__) ".."))<if_stmt>filename.endswith(".c")<or>filename.endswith(".cpp")<block_start>new_fn=filename.rsplit(".c" 1)[0]+".ll"<if_stmt><not>os.path.exists(new_fn)<or>os.stat(new_fn).st_mtime<l>os.stat(filename).st_mtime<block_start>args=["build/Release/llvm/bin/clang-10" "-g" "-O3" "-Ibuild/cpython_bc_install/include/python3.8" "-DNDEBUG" "-Wall" "-c" "-emit-llvm" "-S" filename]<line_sep>print(' '.join(args))<line_sep>subprocess.check_call(args)<block_end>filename=new_fn<block_end>nitrous_so=ctypes.PyDLL("libinterp.so")<line_sep>loadBitcode=nitrous_so.loadBitcode<line_sep>loadBitcode.argtypes=[ctypes.c_char_p]<line_sep>link_fn=filename+".link.bc"<if_stmt><not>os.path.exists(link_fn)<or>os.stat(link_fn).st_mtime<l>os.stat(filename).st_mtime<block_start>args=["build/Release/llvm/bin/llvm-link" "aot/all.bc" filename "-o" link_fn]<line_sep>print(" ".join(args))<line_sep>subprocess.check_call(args)<block_end>loadBitcode(link_fn.encode("ascii"))<line_sep>initializeJIT=nitrous_so.initializeJIT<line_sep>initializeJIT.argtypes=[ctypes.c_long]<line_sep>initializeJIT(3)<line_sep>pystol_so=ctypes.PyDLL("libpystol.so")<line_sep>pystol_so.pystolGlobalPythonSetup()<line_sep>optimize=nitrous_so["optimizeBitcode"]<line_sep>optimize.argtypes=[ctypes.c_char_p]<for_stmt>funcname funcnames<block_start>optimize(funcname.encode("ascii"))<block_end><block_end> |
<import_stmt>snap<line_sep>Graph=snap.GenFull(snap.PNEANet 10)<line_sep>Src=1<line_sep>Dst=2<line_sep>EI=Graph.GetEI(Src Dst)<line_sep>EId=EI.GetId()<line_sep>print(EId Graph.GetEI(Src Dst).GetId())<line_sep>print(Graph.GetEI(Src Dst).GetSrcNId() Graph.GetEI(Src Dst).GetDstNId())<line_sep>print(Graph.GetEI(EId).GetSrcNId() Graph.GetEI(EId).GetDstNId())<if_stmt>EId<ne>Graph.GetEI(Src Dst).GetId()<block_start>print("*** error1")<block_end><if_stmt>Graph.GetEI(Src Dst).GetSrcNId()<ne>Graph.GetEI(EId).GetSrcNId()<block_start>print("*** error2")<block_end><if_stmt>Graph.GetEI(Src Dst).GetDstNId()<ne>Graph.GetEI(EId).GetDstNId()<block_start>print("*** error3")<block_end> |
<import_from_stmt>abc ABCMeta abstractmethod<import_stmt>json<import_stmt>logging<import_stmt>copy<import_stmt>boto3<import_stmt>botocore<import_from_stmt>botocore.exceptions ClientError<import_from_stmt>endgame.shared.response_message ResponseMessage<import_from_stmt>endgame.shared.list_resources_response ListResourcesResponse<import_from_stmt>endgame.shared.response_message ResponseGetRbp<line_sep>logger=logging.getLogger(__name__)<class_stmt>ResourceType(object)<block_start>__meta_class__=ABCMeta<def_stmt>__init__ self name:str resource_type:str service:str region:str client:boto3.Session.client current_account_id:str override_action:str=<none> include_resource_block:bool=<true> override_resource_block:str=<none> override_account_id_instead_of_principal:bool=<false><block_start>self.name=name<line_sep>self.resource_type=resource_type<line_sep>self.client=client<line_sep>self.current_account_id=current_account_id<line_sep>self.service=service<line_sep>self.region=region<line_sep>self.include_resource_block=include_resource_block# Override for IAM
self.override_action=override_action# Override for IAM
self.override_resource_block=override_resource_block# Override for EFS
self.override_account_id_instead_of_principal=override_account_id_instead_of_principal# Override for logs, sns, sqs, and lambda
self.policy_document=self._get_rbp().policy_document<line_sep># Store an original copy of the policy so we can compare it later.
self.original_policy=copy.deepcopy(json.loads(json.dumps(self.policy_document.original_policy)))<block_end><def_stmt>__str__ self<block_start><return>'%s'%(json.dumps(json.loads(self.policy_document.__str__())))<block_end>@abstractmethod<def_stmt>_get_rbp self<arrow>ResponseGetRbp<block_start><raise>NotImplementedError("Must override _get_rbp")<block_end>@property@abstractmethod<def_stmt>arn self<arrow>str<block_start><raise>NotImplementedError("Must override arn")<block_end>@abstractmethod<def_stmt>set_rbp self evil_policy:dict<arrow>ResponseMessage<block_start><raise>NotImplementedError("Must override set_rbp")<block_end><def_stmt>add_myself self evil_principal:str dry_run:bool=<false><arrow>ResponseMessage<block_start>"""Add your rogue principal to the AWS resource"""<line_sep>logger.debug(f"Adding {evil_principal} to {self.arn}")<line_sep>evil_policy=self.policy_document.policy_plus_evil_principal(victim_account_id=self.current_account_id evil_principal=evil_principal resource_arn=self.arn)<if_stmt><not>dry_run<block_start>set_rbp_response=self.set_rbp(evil_policy=evil_policy)<line_sep>operation="ADD_MYSELF"<line_sep>message=set_rbp_response.message<line_sep>success=set_rbp_response.success<block_end><else_stmt># new_policy = evil_policy
<block_start>operation="DRY_RUN_ADD_MYSELF"<line_sep>message="DRY_RUN_ADD_MYSELF"<try_stmt><block_start>tmp=self._get_rbp()<line_sep>success=tmp.success<block_end><except_stmt>botocore.exceptions.ClientError<as>error<block_start>message=str(error)<line_sep>success=<false><block_end><block_end>response_message=ResponseMessage(message=message operation=operation success=success evil_principal=evil_principal victim_resource_arn=self.arn original_policy=self.original_policy updated_policy=evil_policy resource_type=self.resource_type resource_name=self.name service=self.service)<line_sep><return>response_message<block_end><def_stmt>undo self evil_principal:str dry_run:bool=<false><arrow>ResponseMessage<block_start>"""Remove all traces"""<line_sep>logger.debug(f"Removing {evil_principal} from {self.arn}")<line_sep>policy_stripped=self.policy_document.policy_minus_evil_principal(victim_account_id=self.current_account_id evil_principal=evil_principal resource_arn=self.arn)<if_stmt><not>dry_run<block_start>operation="UNDO"<line_sep>set_rbp_response=self.set_rbp(evil_policy=policy_stripped)<line_sep>message=set_rbp_response.message<line_sep>success=set_rbp_response.success<block_end><else_stmt><block_start>operation="DRY_RUN_UNDO"<line_sep>message="DRY_RUN_UNDO"<line_sep>success=<true><block_end>response_message=ResponseMessage(message=message operation=operation success=success evil_principal=evil_principal victim_resource_arn=self.arn original_policy=self.original_policy updated_policy=policy_stripped resource_type=self.resource_type resource_name=self.name service=self.service)<line_sep><return>response_message<block_end><block_end><class_stmt>ResourceTypes(object)<block_start>__meta_class__=ABCMeta<def_stmt>__init__ self client:boto3.Session.client current_account_id:str region:str<block_start>self.client=client<line_sep>self.current_account_id=current_account_id<line_sep>self.region=region<block_end><def_stmt>__str__ self<block_start><return>'%s'%(json.dumps(self.resources.arn))<block_end>@property@abstractmethod<def_stmt>resources self<arrow>[ListResourcesResponse]<block_start><raise>NotImplementedError("Must override property 'resources'")<block_end><block_end> |
# Registry Class
# CMD
# Refer this in Detectron2
<class_stmt>Registry<block_start><def_stmt>__init__ self name<block_start>self._name=name<line_sep>self._obj_map={}<block_end><def_stmt>_do_register self name obj<block_start><assert_stmt>(name<not><in>self._obj_map) "The object named: {} was already registered in {} registry! ".format(name self._name)<line_sep>self._obj_map[name]=obj<block_end><def_stmt>register self obj=<none><block_start>"""
Register the given object under the name obj.__name__.
Can be used as either a decorator or not.
"""<if_stmt>obj<is><none># used as a decorator
<block_start><def_stmt>deco func_or_class<block_start>name=func_or_class.__name__<line_sep>self._do_register(name func_or_class)<line_sep><return>func_or_class<block_end><return>deco<block_end>name=obj.__name__<line_sep>self._do_register(name obj)<block_end><def_stmt>get self name<block_start>ret=self._obj_map.get(name)<if_stmt>ret<is><none><block_start><raise>KeyError("No object names {} found in {} registry!".format(name self._name))<block_end><return>ret<block_end><def_stmt>__getitem__ self name<block_start><return>self.get(name)<block_end><def_stmt>keys self<block_start><return>self._obj_map.keys()<block_end><block_end> |
<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<class_stmt>FPA(nn.Module)<block_start><def_stmt>__init__ self channels=2048<block_start>"""
Feature Pyramid Attention
:type channels: int
"""<line_sep>super(FPA self).__init__()<line_sep>channels_mid=int(channels/4)<line_sep>self.channels_cond=channels<line_sep># Master branch
self.conv_master=nn.Conv2d(self.channels_cond channels kernel_size=1 bias=<false>)<line_sep>self.bn_master=nn.BatchNorm2d(channels)<line_sep># Global pooling branch
self.conv_gpb=nn.Conv2d(self.channels_cond channels kernel_size=1 bias=<false>)<line_sep>#self.bn_gpb = nn.BatchNorm2d(channels)
# C333 because of the shape of last feature maps is (16, 16).
self.conv7x7_1=nn.Conv2d(self.channels_cond channels_mid kernel_size=(7 7) stride=2 padding=3 bias=<false>)<line_sep>self.bn1_1=nn.BatchNorm2d(channels_mid)<line_sep>self.conv5x5_1=nn.Conv2d(channels_mid channels_mid kernel_size=(5 5) stride=2 padding=2 bias=<false>)<line_sep>self.bn2_1=nn.BatchNorm2d(channels_mid)<line_sep>self.conv3x3_1=nn.Conv2d(channels_mid channels_mid kernel_size=(3 3) stride=2 padding=1 bias=<false>)<line_sep>self.bn3_1=nn.BatchNorm2d(channels_mid)<line_sep>self.conv7x7_2=nn.Conv2d(channels_mid channels_mid kernel_size=(7 7) stride=1 padding=3 bias=<false>)<line_sep>self.bn1_2=nn.BatchNorm2d(channels_mid)<line_sep>self.conv5x5_2=nn.Conv2d(channels_mid channels_mid kernel_size=(5 5) stride=1 padding=2 bias=<false>)<line_sep>self.bn2_2=nn.BatchNorm2d(channels_mid)<line_sep>self.conv3x3_2=nn.Conv2d(channels_mid channels_mid kernel_size=(3 3) stride=1 padding=1 bias=<false>)<line_sep>self.bn3_2=nn.BatchNorm2d(channels_mid)<line_sep>self.bn_upsample_1=nn.BatchNorm2d(channels)<line_sep>self.conv1x1_up1=nn.Conv2d(channels_mid channels kernel_size=(1 1) stride=1 padding=0 bias=<false>)<line_sep>self.relu=nn.ReLU(inplace=<true>)<block_end><def_stmt>forward self x<block_start>"""
:param x: Shape: [b, 2048, h, w]
:return: out: Feature maps. Shape: [b, 2048, h, w]
"""<line_sep># Master branch
x_master=self.conv_master(x)<line_sep>x_master=self.bn_master(x_master)<line_sep># Global pooling branch
x_gpb=nn.AvgPool2d(x.shape[2:])(x).view(x.shape[0] self.channels_cond 1 1)<line_sep>x_gpb=self.conv_gpb(x_gpb)<line_sep>#x_gpb = self.bn_gpb(x_gpb)
# Branch 1
x1_1=self.conv7x7_1(x)<line_sep>x1_1=self.bn1_1(x1_1)<line_sep>x1_1=self.relu(x1_1)<line_sep>x1_2=self.conv7x7_2(x1_1)<line_sep>x1_2=self.bn1_2(x1_2)<line_sep># Branch 2
x2_1=self.conv5x5_1(x1_1)<line_sep>x2_1=self.bn2_1(x2_1)<line_sep>x2_1=self.relu(x2_1)<line_sep>x2_2=self.conv5x5_2(x2_1)<line_sep>x2_2=self.bn2_2(x2_2)<line_sep># Branch 3
x3_1=self.conv3x3_1(x2_1)<line_sep>x3_1=self.bn3_1(x3_1)<line_sep>x3_1=self.relu(x3_1)<line_sep>x3_2=self.conv3x3_2(x3_1)<line_sep>x3_2=self.bn3_2(x3_2)<line_sep># Merge branch 1 and 2
x3_upsample=F.interpolate(x3_2 size=x2_2.shape[-2:] mode='bilinear' align_corners=<false>)<line_sep>x2_merge=self.relu(x2_2+x3_upsample)<line_sep>x2_upsample=F.interpolate(x2_merge size=x1_2.shape[-2:] mode='bilinear' align_corners=<false>)<line_sep>x1_merge=self.relu(x1_2+x2_upsample)<line_sep>x1_merge_upsample=F.interpolate(x1_merge size=x_master.shape[-2:] mode='bilinear' align_corners=<false>)<line_sep>x1_merge_upsample_ch=self.relu(self.bn_upsample_1(self.conv1x1_up1(x1_merge_upsample)))<line_sep>x_master=x_master<times>x1_merge_upsample_ch<line_sep>#
out=self.relu(x_master+x_gpb)<line_sep><return>out<block_end><block_end><class_stmt>GAU(nn.Module)<block_start><def_stmt>__init__ self channels_high channels_low upsample=<true><block_start>super(GAU self).__init__()<line_sep># Global Attention Upsample
self.upsample=upsample<line_sep>self.conv3x3=nn.Conv2d(channels_low channels_low kernel_size=3 padding=1 bias=<false>)<line_sep>self.bn_low=nn.BatchNorm2d(channels_low)<line_sep>self.conv1x1=nn.Conv2d(channels_high channels_low kernel_size=1 padding=0 bias=<false>)<line_sep>#self.bn_high = nn.BatchNorm2d(channels_low)
<if_stmt>upsample<block_start>self.conv_upsample=nn.ConvTranspose2d(channels_high channels_low kernel_size=4 stride=2 padding=1 bias=<false>)<line_sep>self.bn_upsample=nn.BatchNorm2d(channels_low)<block_end><else_stmt><block_start>self.conv_reduction=nn.Conv2d(channels_high channels_low kernel_size=1 padding=0 bias=<false>)<line_sep>self.bn_reduction=nn.BatchNorm2d(channels_low)<block_end>self.relu=nn.ReLU(inplace=<true>)<block_end><def_stmt>forward self fms_high fms_low fm_mask=<none><block_start>"""
Use the high level features with abundant catagory information to weight the low level features with pixel
localization information. In the meantime, we further use mask feature maps with catagory-specific information
to localize the mask position.
:param fms_high: Features of high level. Tensor.
:param fms_low: Features of low level. Tensor.
:param fm_mask:
:return: fms_att_upsample
"""<line_sep>b,c,h,w=fms_high.shape<line_sep>fms_high_gp=nn.AvgPool2d(fms_high.shape[2:])(fms_high).view(len(fms_high) c 1 1)<line_sep>fms_high_gp=self.conv1x1(fms_high_gp)<line_sep># fms_high_gp = self.bn_high(fms_high_gp)# arlog, when the spatial size HxW = 1x1, the BN cannot be used.
fms_high_gp=self.relu(fms_high_gp)<line_sep># fms_low_mask = torch.cat([fms_low, fm_mask], dim=1)
fms_low_mask=self.conv3x3(fms_low)<line_sep>fms_low_mask=self.bn_low(fms_low_mask)<line_sep>fms_att=fms_low_mask<times>fms_high_gp<if_stmt>self.upsample<block_start>out=self.relu(self.bn_upsample(self.conv_upsample(fms_high))+fms_att)<block_end><else_stmt><block_start>out=self.relu(self.bn_reduction(self.conv_reduction(fms_high))+fms_att)<block_end><return>out<block_end><block_end><class_stmt>PAN(nn.Module)<block_start><def_stmt>__init__ self<block_start>"""
:param blocks: Blocks of the network with reverse sequential.
"""<line_sep>super(PAN self).__init__()<line_sep>channels_blocks=[2048 1024 512 256]<line_sep>self.fpa=FPA(channels=channels_blocks[0])<line_sep>self.gau_block1=GAU(channels_blocks[0] channels_blocks[1])<line_sep>self.gau_block2=GAU(channels_blocks[1] channels_blocks[2])<line_sep>self.gau_block3=GAU(channels_blocks[2] channels_blocks[3])<line_sep>self.gau=[self.gau_block1 self.gau_block2 self.gau_block3]<block_end><def_stmt>forward self fms<block_start>"""
:param fms: Feature maps of forward propagation in the network with reverse sequential. shape:[b, c, h, w]
:return: fm_high. [b, 256, h, w]
"""<line_sep>feats=[]<for_stmt>i,fm_low enumerate(fms[::-1])<block_start><if_stmt>i<eq>0<block_start>fm_high=self.fpa(fm_low)<block_end><else_stmt><block_start>fm_high=self.gau[int(i-1)](fm_high fm_low)<block_end>feats.append(fm_high)<block_end>feats.reverse()<line_sep><return>tuple(feats)<block_end><block_end> |
<import_stmt>subprocess<def_stmt>grep filename arg<block_start>process=subprocess.Popen(['grep' '-n' arg filename] stdout=subprocess.PIPE)<line_sep>stdout,stderr=process.communicate()<line_sep><return>stdout stderr<block_end> |
"""
The tool to check the availability or syntax of domain, IP or URL.
::
βββββββ βββ ββββββββββββββ βββββββ βββ ββββββββββββββββββββββ βββ ββββββββ
ββββββββββββ βββββββββββββββ ββββββββ ββββββββββββββββββββββββββββββ ββββββββ
ββββββββ βββββββ ββββββ βββ βββββββββ ββββββ ββββββ βββββββββββ ββββββ
βββββββ βββββ ββββββ βββ ββββββββββββββββ ββββββ βββββββββββ ββββββ
βββ βββ βββ ββββββββββββ ββββββββββββββββββββββββββββββββββββββββββββββ
βββ βββ βββ βββββββ βββ βββββ ββββββββββββββββββββββ ββββββββββββββββ
Provides some facilities for the storage module.
Author:
<NAME>, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/#/special-thanks
Contributors:
https://pyfunceble.github.io/#/contributors
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/dev/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2021 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""<import_stmt>os<import_from_stmt>PyFunceble.helpers.directory DirectoryHelper<import_from_stmt>PyFunceble.helpers.environment_variable EnvironmentVariableHelper<import_from_stmt>PyFunceble.utils.platform PlatformUtility<import_from_stmt>PyFunceble.utils.version VersionUtility<def_stmt>get_config_directory * project_name:str project_version:str<arrow>str# pragma: no cover ## Not relevant
<block_start>"""
Provides the location of the configuration directory.
"""<line_sep># pylint: disable=too-many-branches
env_var_helper=EnvironmentVariableHelper()<line_sep>directory_helper=DirectoryHelper()<if_stmt>env_var_helper.set_name("PYFUNCEBLE_CONFIG_DIR").exists()<block_start>config_directory=env_var_helper.get_value()<block_end><elif_stmt>env_var_helper.set_name("PYFUNCEBLE_OUTPUT_DIR").exists()<block_start>config_directory=env_var_helper.get_value()<block_end><elif_stmt>(VersionUtility(project_version).is_cloned()<or>env_var_helper.set_name("TRAVIS_BUILD_DIR").exists()<or>env_var_helper.set_name("CI_PROJECT_DIR").exists()<and>env_var_helper.set_name("GITLAB_CI").exists())<block_start>config_directory=directory_helper.get_current(with_end_sep=<true>)<block_end><else_stmt><block_start><if_stmt>PlatformUtility.is_unix()<block_start>config_dir_path=os.path.expanduser(os.path.join("~" ".config"))<if_stmt>directory_helper.set_path(config_dir_path).exists()<block_start>config_directory=config_dir_path<block_end><elif_stmt>directory_helper.set_path(os.path.expanduser("~")).exists()<block_start>config_directory=directory_helper.join_path(".")<block_end><else_stmt><block_start>config_directory=directory_helper.get_current(with_end_sep=<true>)<block_end><block_end><elif_stmt>PlatformUtility.is_windows()<block_start><if_stmt>env_var_helper.set_name("APPDATA").exists()<block_start>config_directory=env_var_helper.get_value()<block_end><else_stmt><block_start>config_directory=directory_helper.get_current(with_end_sep=<true>)<block_end><block_end><else_stmt><block_start>config_directory=directory_helper.get_current(with_end_sep=<true>)<block_end><if_stmt><not>config_directory.endswith(os.sep)<block_start>config_directory<augadd>os.sep<block_end>config_directory<augadd>project_name+os.sep<if_stmt><not>directory_helper.set_path(config_directory).exists()<block_start>directory_helper.create()<block_end><block_end><if_stmt><not>config_directory.endswith(os.sep)<block_start>config_directory<augadd>os.sep<block_end><return>config_directory<block_end> |
# -*- coding: utf-8 -*-
#@+leo-ver=5-thin
#@+node:ekr.20201129023817.1: * @file leoTest2.py
#@@first
"""
Support for Leo's new unit tests, contained in leo/unittests/test_*.py.
Run these tests using unittest or pytest from the command line.
See g.run_unit_tests and g.run_coverage_tests.
This file also contains classes that convert @test nodes in unitTest.leo to
tests in leo/unittest. Eventually these classes will move to scripts.leo.
"""<import_stmt>time<import_stmt>unittest<import_from_stmt>leo.core leoGlobals<as>g<import_from_stmt>leo.core leoApp<line_sep>#@+others
#@+node:ekr.20201130195111.1: ** function.create_app
<def_stmt>create_app gui_name='null'<block_start>"""
Create the Leo application, g.app, the Gui, g.app.gui, and a commander.
This method is expensive (0.5 sec) only the first time it is called.
Thereafter, recreating g.app, g.app.gui, and new commands is fast.
"""<line_sep>trace=<false><line_sep>t1=time.process_time()<line_sep>#
# Set g.unitTesting *early*, for guards, to suppress the splash screen, etc.
g.unitTesting=<true><line_sep># Create g.app now, to avoid circular dependencies.
g.app=leoApp.LeoApp()<line_sep># Late imports.
<import_from_stmt>leo.core leoConfig<import_from_stmt>leo.core leoNodes<import_from_stmt>leo.core leoCommands<import_from_stmt>leo.core.leoGui NullGui<if_stmt>gui_name<eq>'qt'<block_start><import_from_stmt>leo.plugins.qt_gui LeoQtGui<block_end>t2=time.process_time()<line_sep>g.app.recentFilesManager=leoApp.RecentFilesManager()<line_sep>g.app.loadManager=lm=leoApp.LoadManager()<line_sep>lm.computeStandardDirectories()<if_stmt><not>g.app.setLeoID(useDialog=<false> verbose=<true>)<block_start><raise>ValueError("unable to set LeoID.")<block_end>g.app.nodeIndices=leoNodes.NodeIndices(g.app.leoID)<line_sep>g.app.config=leoConfig.GlobalConfigManager()<line_sep>g.app.db=g.NullObject('g.app.db')<line_sep>g.app.pluginsController=g.NullObject('g.app.pluginsController')<line_sep>g.app.commander_cacher=g.NullObject('g.app.commander_cacher')<if_stmt>gui_name<eq>'null'<block_start>g.app.gui=NullGui()<block_end><elif_stmt>gui_name<eq>'qt'<block_start>g.app.gui=LeoQtGui()<block_end><else_stmt><block_start><raise>TypeError(f"create_gui: unknown gui_name: {gui_name!r}")<block_end>t3=time.process_time()<line_sep># Create a dummy commander, to do the imports in c.initObjects.
# Always use a null gui to avoid screen flash.
# setUp will create another commander.
c=leoCommands.Commands(fileName=<none> gui=g.app.gui)<line_sep># Create minimal config dictionaries.
settings_d,bindings_d=lm.createDefaultSettingsDicts()<line_sep>lm.globalSettingsDict=settings_d<line_sep>lm.globalBindingsDict=bindings_d<line_sep>c.config.settingsDict=settings_d<line_sep>c.config.bindingsDict=bindings_d<assert_stmt>g.unitTesting<is><true># Defensive.
t4=time.process_time()<line_sep># Trace times. This trace happens only once:
# imports: 0.016
# gui: 0.000
# commander: 0.469
# total: 0.484
<if_stmt>trace<and>t4-t3<g>0.1<block_start>print('create_app:\n'<concat>f" imports: {(t2-t1):.3f}\n"<concat>f" gui: {(t3-t2):.3f}\n"<concat>f"commander: {(t4-t2):.3f}\n"<concat>f" total: {(t4-t1):.3f}\n")<block_end><return>c<block_end>#@+node:ekr.20210902014907.1: ** class LeoUnitTest(unittest.TestCase)
<class_stmt>LeoUnitTest(unittest.TestCase)<block_start>"""
The base class for all unit tests in Leo.
Contains setUp/tearDown methods and various utilites.
"""<line_sep>#@+others
#@+node:ekr.20210901140855.2: *3* LeoUnitTest.setUp, tearDown & setUpClass
@classmethod<def_stmt>setUpClass cls<block_start>create_app(gui_name='null')<block_end><def_stmt>setUp self<block_start>"""
Create a commander using a **null** gui, regardless of g.app.gui.
Create the nodes in the commander.
"""<line_sep># Do the import here to avoid circular dependencies.
<import_from_stmt>leo.core leoCommands<import_from_stmt>leo.core.leoGui NullGui<line_sep># Set g.unitTesting *early*, for guards.
g.unitTesting=<true><line_sep># Create a new commander for each test.
# This is fast, because setUpClass has done all the imports.
self.c=c=leoCommands.Commands(fileName=<none> gui=NullGui())<line_sep># Init the 'root' and '@settings' nodes.
self.root_p=c.rootPosition()<line_sep>self.root_p.h='root'<line_sep>self.settings_p=self.root_p.insertAfter()<line_sep>self.settings_p.h='@settings'<line_sep># Select the 'root' node.
c.selectPosition(self.root_p)<block_end><def_stmt>tearDown self<block_start>self.c=<none><block_end>#@+node:ekr.20210830151601.1: *3* LeoUnitTest.create_test_outline
<def_stmt>create_test_outline self<block_start>p=self.c.p<line_sep># Create the following outline:
#
# root
# child clone a
# node clone 1
# child b
# child clone a
# node clone 1
# child c
# node clone 1
# child clone a
# node clone 1
# child b
# child clone a
# node clone 1
<assert_stmt>p<eq>self.root_p<assert_stmt>p.h<eq>'root'<line_sep># Child a
child_clone_a=p.insertAsLastChild()<line_sep>child_clone_a.h='child clone a'<line_sep>node_clone_1=child_clone_a.insertAsLastChild()<line_sep>node_clone_1.h='node clone 1'<line_sep># Child b
child_b=p.insertAsLastChild()<line_sep>child_b.h='child b'<line_sep># Clone 'child clone a'
clone=child_clone_a.clone()<line_sep>clone.moveToLastChildOf(child_b)<line_sep># Child c
child_c=p.insertAsLastChild()<line_sep>child_c.h='child c'<line_sep># Clone 'node clone 1'
clone=node_clone_1.clone()<line_sep>clone.moveToLastChildOf(child_c)<line_sep># Clone 'child clone a'
clone=child_clone_a.clone()<line_sep>clone.moveToLastChildOf(p)<line_sep># Clone 'child b'
clone=child_b.clone()<line_sep>clone.moveToLastChildOf(p)<block_end>#@+node:ekr.20210831101111.1: *3* LeoUnitTest.dump_tree
<def_stmt>dump_tree self tag=''<block_start>c=self.c<line_sep>print('')<line_sep>g.trace(tag)<for_stmt>p c.all_positions()<block_start>print(f"clone? {int(p.isCloned())} {' '<times>p.level()} {p.h}")<block_end><block_end>#@-others
<block_end>#@-others
#@-leo
|
<import_stmt>ctypes<import_stmt>netifaces<import_stmt>NetworkManager# pylint: disable=import-error
<import_from_stmt>xv_leak_tools.exception XVEx<import_from_stmt>xv_leak_tools.log L<import_from_stmt>xv_leak_tools.process check_subprocess<class_stmt>_NetworkObject<block_start><def_stmt>__init__ self conn<block_start>self._settings=conn.GetSettings()<line_sep>self._id=self._settings['connection']['id']<line_sep>self._uuid=self._settings['connection']['uuid']<block_end><def_stmt>__str__ self<block_start><return>"{} ({})".format(self.id() self.uuid())<block_end><def_stmt>__repr__ self<block_start><return>str(self)<block_end><def_stmt>__eq__ self other<block_start><return>self.uuid()<eq>other.uuid()<block_end><def_stmt>uuid self<block_start><return>self._uuid<block_end><def_stmt>id self<block_start><return>self._id<block_end><def_stmt>name self# TODO: Decide on this API.
<block_start><return>self._id<block_end><block_end><class_stmt>NetworkService(_NetworkObject)<block_start><def_stmt>active self<block_start>active_conns=NetworkManager.NetworkManager.ActiveConnections<line_sep>active_conns=[NetworkService(conn.Connection)<for>conn active_conns]<if_stmt>self<in>active_conns<block_start><return><true><block_end><return><false><block_end><def_stmt>enable self<block_start>L.debug("Enabling connection {}".format(self.name()))<line_sep>check_subprocess(['nmcli' 'connection' 'up' self.name()])<block_end><def_stmt>disable self<block_start>L.debug("Disabling connection {}".format(self.name()))<line_sep>check_subprocess(['nmcli' 'connection' 'down' self.name()])<block_end><def_stmt>interface self# TODO: Reject this idea? Maybe interfaces should be chosen without
# regard to connection status, if NM can't be trusted.
# In which case, tests that get a list of interfaces should just use
# netifaces directly.
<block_start><try_stmt><block_start><return>self._settings['connection']['interface-name']<block_end><except_stmt>KeyError<block_start>connection_type=self._settings['connection']['type']<line_sep># TODO: Test this on different types.
mac_address=self._settings[connection_type]['mac-address']<for_stmt>iface netifaces.interfaces()<block_start>iface_mac=netifaces.ifaddresses(iface)[netifaces.AF_LINK][0]['addr'].lower()<if_stmt>mac_address.lower()<eq>iface_mac<block_start><return>iface<block_end><block_end><block_end><raise>XVEx("Couldn't find any connection interfaces")<block_end><def_stmt>enable_interface self<block_start>L.debug("Enabling interface {}".format(self.interface()))<line_sep># TODO: Move to unix tools or use "ip link set dev iface up"?
check_subprocess(['ifconfig' self.interface() 'up'])<block_end><def_stmt>disable_interface self<block_start>L.debug("Disabling interface {}".format(self.interface()))<line_sep># TODO: Move to unix tools or use "ip link set dev iface up"?
check_subprocess(['ifconfig' self.interface() 'down'])<block_end><block_end><class_stmt>LinuxNetwork<block_start>@staticmethod<def_stmt>network_services_in_priority_order <block_start>conns=NetworkManager.Settings.ListConnections()<line_sep>conns=list(filter(<lambda>x:'autoconnect-priority'<in>x.GetSettings()['connection'] conns))<line_sep># NetworkManager uses int32s so we need to "cast" the autoconnect-priority value.
<def_stmt>uint32 signed_integer<block_start><return>int(ctypes.c_uint32(signed_integer).value)<block_end>conns.sort(key=<lambda>x:uint32(x.GetSettings()['connection']['autoconnect-priority']) reverse=<true>)<line_sep><return>[NetworkService(conn)<for>conn conns]<block_end><block_end> |
# pylint: disable=missing-function-docstring, missing-module-docstring/
a=[i<times>j<for>i range(1 3)<for>j range(1 4)<for>k range(i j)]<line_sep>n=5<line_sep>a=[i<times>j<for>i range(1 n)<for>j range(1 4)<for>k range(i j)]<line_sep> |
<import_stmt>factory<import_stmt>factory.django<import_from_stmt>pycon.schedule.models Session SessionRole<import_from_stmt>symposion.schedule.tests.factories DayFactory<class_stmt>SessionFactory(factory.django.DjangoModelFactory)<block_start><class_stmt>Meta<block_start>model=Session<block_end>day=factory.SubFactory(DayFactory)<block_end><class_stmt>SessionRoleFactory(factory.django.DjangoModelFactory)<block_start><class_stmt>Meta<block_start>model=SessionRole<block_end><block_end> |
<import_from_stmt>arm.logicnode.arm_nodes *<class_stmt>GetTilesheetStateNode(ArmLogicTreeNode)<block_start>"""Returns the information about the current tilesheet of the given object."""<line_sep>bl_idname='LNGetTilesheetStateNode'<line_sep>bl_label='Get Tilesheet State'<line_sep>arm_version=1<line_sep>arm_section='tilesheet'<def_stmt>arm_init self context<block_start>self.add_input('ArmNodeSocketObject' 'Object')<line_sep>self.add_output('ArmStringSocket' 'Name')<line_sep>self.add_output('ArmIntSocket' 'Frame')<line_sep>self.add_output('ArmBoolSocket' 'Is Paused')<block_end><block_end> |
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""test cases for Poisson distribution"""<import_stmt>numpy<as>np<import_from_stmt>scipy stats<import_stmt>mindspore.context<as>context<import_stmt>mindspore.nn<as>nn<import_stmt>mindspore.nn.probability.distribution<as>msd<import_from_stmt>mindspore Tensor<import_from_stmt>mindspore dtype<line_sep>context.set_context(mode=context.GRAPH_MODE device_target="Ascend")<class_stmt>Prob(nn.Cell)<block_start>"""
Test class: probability of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(Prob self).__init__()<line_sep>self.p=msd.Poisson([0.5] dtype=dtype.float32)<block_end><def_stmt>construct self x_<block_start><return>self.p.prob(x_)<block_end><block_end><def_stmt>test_pdf <block_start>"""
Test pdf.
"""<line_sep>poisson_benchmark=stats.poisson(mu=0.5)<line_sep>expect_pdf=poisson_benchmark.pmf([-1.0 0.0 1.0]).astype(np.float32)<line_sep>pdf=Prob()<line_sep>x_=Tensor(np.array([-1.0 0.0 1.0]).astype(np.float32) dtype=dtype.float32)<line_sep>output=pdf(x_)<line_sep>tol=1e-6<assert_stmt>(np.abs(output.asnumpy()-expect_pdf)<l>tol).all()<block_end><class_stmt>LogProb(nn.Cell)<block_start>"""
Test class: log probability of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(LogProb self).__init__()<line_sep>self.p=msd.Poisson([0.5] dtype=dtype.float32)<block_end><def_stmt>construct self x_<block_start><return>self.p.log_prob(x_)<block_end><block_end><def_stmt>test_log_likelihood <block_start>"""
Test log_pdf.
"""<line_sep>poisson_benchmark=stats.poisson(mu=0.5)<line_sep>expect_logpdf=poisson_benchmark.logpmf([1.0 2.0]).astype(np.float32)<line_sep>logprob=LogProb()<line_sep>x_=Tensor(np.array([1.0 2.0]).astype(np.float32) dtype=dtype.float32)<line_sep>output=logprob(x_)<line_sep>tol=1e-6<assert_stmt>(np.abs(output.asnumpy()-expect_logpdf)<l>tol).all()<block_end><class_stmt>Basics(nn.Cell)<block_start>"""
Test class: mean/sd/mode of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(Basics self).__init__()<line_sep>self.p=msd.Poisson([1.44] dtype=dtype.float32)<block_end><def_stmt>construct self<block_start><return>self.p.mean() self.p.sd() self.p.mode()<block_end><block_end><def_stmt>test_basics <block_start>"""
Test mean/standard/mode deviation.
"""<line_sep>basics=Basics()<line_sep>mean,sd,mode=basics()<line_sep>expect_mean=1.44<line_sep>expect_sd=1.2<line_sep>expect_mode=1<line_sep>tol=1e-6<assert_stmt>(np.abs(mean.asnumpy()-expect_mean)<l>tol).all()<assert_stmt>(np.abs(sd.asnumpy()-expect_sd)<l>tol).all()<assert_stmt>(np.abs(mode.asnumpy()-expect_mode)<l>tol).all()<block_end><class_stmt>Sampling(nn.Cell)<block_start>"""
Test class: sample of Poisson distribution.
"""<def_stmt>__init__ self shape seed=0<block_start>super(Sampling self).__init__()<line_sep>self.p=msd.Poisson([[1.0] [0.5]] seed=seed dtype=dtype.float32)<line_sep>self.shape=shape<block_end><def_stmt>construct self rate=<none><block_start><return>self.p.sample(self.shape rate)<block_end><block_end><def_stmt>test_sample <block_start>"""
Test sample.
"""<line_sep>shape=(2 3)<line_sep>seed=10<line_sep>rate=Tensor([1.0 2.0 3.0] dtype=dtype.float32)<line_sep>sample=Sampling(shape seed=seed)<line_sep>output=sample(rate)<assert_stmt>output.shape<eq>(2 3 3)<block_end><class_stmt>CDF(nn.Cell)<block_start>"""
Test class: cdf of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(CDF self).__init__()<line_sep>self.p=msd.Poisson([0.5] dtype=dtype.float32)<block_end><def_stmt>construct self x_<block_start><return>self.p.cdf(x_)<block_end><block_end><def_stmt>test_cdf <block_start>"""
Test cdf.
"""<line_sep>poisson_benchmark=stats.poisson(mu=0.5)<line_sep>expect_cdf=poisson_benchmark.cdf([-1.0 0.0 1.0]).astype(np.float32)<line_sep>cdf=CDF()<line_sep>x_=Tensor(np.array([-1.0 0.0 1.0]).astype(np.float32) dtype=dtype.float32)<line_sep>output=cdf(x_)<line_sep>tol=1e-6<assert_stmt>(np.abs(output.asnumpy()-expect_cdf)<l>tol).all()<block_end><class_stmt>LogCDF(nn.Cell)<block_start>"""
Test class: log_cdf of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(LogCDF self).__init__()<line_sep>self.p=msd.Poisson([0.5] dtype=dtype.float32)<block_end><def_stmt>construct self x_<block_start><return>self.p.log_cdf(x_)<block_end><block_end><def_stmt>test_log_cdf <block_start>"""
Test log_cdf.
"""<line_sep>poisson_benchmark=stats.poisson(mu=0.5)<line_sep>expect_logcdf=poisson_benchmark.logcdf([0.5 1.0 2.5]).astype(np.float32)<line_sep>logcdf=LogCDF()<line_sep>x_=Tensor(np.array([0.5 1.0 2.5]).astype(np.float32) dtype=dtype.float32)<line_sep>output=logcdf(x_)<line_sep>tol=1e-6<assert_stmt>(np.abs(output.asnumpy()-expect_logcdf)<l>tol).all()<block_end><class_stmt>SF(nn.Cell)<block_start>"""
Test class: survival function of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(SF self).__init__()<line_sep>self.p=msd.Poisson([0.5] dtype=dtype.float32)<block_end><def_stmt>construct self x_<block_start><return>self.p.survival_function(x_)<block_end><block_end><def_stmt>test_survival <block_start>"""
Test survival function.
"""<line_sep>poisson_benchmark=stats.poisson(mu=0.5)<line_sep>expect_survival=poisson_benchmark.sf([-1.0 0.0 1.0]).astype(np.float32)<line_sep>survival=SF()<line_sep>x_=Tensor(np.array([-1.0 0.0 1.0]).astype(np.float32) dtype=dtype.float32)<line_sep>output=survival(x_)<line_sep>tol=1e-6<assert_stmt>(np.abs(output.asnumpy()-expect_survival)<l>tol).all()<block_end><class_stmt>LogSF(nn.Cell)<block_start>"""
Test class: log survival function of Poisson distribution.
"""<def_stmt>__init__ self<block_start>super(LogSF self).__init__()<line_sep>self.p=msd.Poisson([0.5] dtype=dtype.float32)<block_end><def_stmt>construct self x_<block_start><return>self.p.log_survival(x_)<block_end><block_end><def_stmt>test_log_survival <block_start>"""
Test log survival function.
"""<line_sep>poisson_benchmark=stats.poisson(mu=0.5)<line_sep>expect_logsurvival=poisson_benchmark.logsf([-1.0 0.0 1.0]).astype(np.float32)<line_sep>logsurvival=LogSF()<line_sep>x_=Tensor(np.array([-1.0 0.0 1.0]).astype(np.float32) dtype=dtype.float32)<line_sep>output=logsurvival(x_)<line_sep>tol=1e-6<assert_stmt>(np.abs(output.asnumpy()-expect_logsurvival)<l>tol).all()<block_end> |
# Generated by Django 2.2.11 on 2020-04-23 17:14
<import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('capdb' '0100_auto_20200410_1755') ]<line_sep>operations=[migrations.AddField(model_name='historicalvolumemetadata' name='second_part_of' field=models.ForeignKey(blank=<true> db_constraint=<false> null=<true> on_delete=django.db.models.deletion.DO_NOTHING related_name='+' to='capdb.VolumeMetadata') ) migrations.AddField(model_name='volumemetadata' name='second_part_of' field=models.ForeignKey(null=<true> on_delete=django.db.models.deletion.DO_NOTHING related_name='second_part' to='capdb.VolumeMetadata') ) ]<block_end> |
<import_stmt>os<import_from_stmt>tensorboardX SummaryWriter<import_from_stmt>claf nsml<class_stmt>TensorBoard<block_start>""" TensorBoard Wrapper for Pytorch """<def_stmt>__init__ self log_dir<block_start><if_stmt><not>os.path.exists(log_dir)<block_start>os.makedirs(log_dir)<block_end>self.writer=SummaryWriter(log_dir=log_dir)<block_end><def_stmt>scalar_summaries self step summary<block_start><if_stmt>nsml.IS_ON_NSML<block_start><if_stmt>type(summary)<ne>dict<block_start><raise>ValueError(f"summary type is dict. not {type(summary)}")<block_end>kwargs={"summary":<true> "scope":locals() "step":step}<line_sep>kwargs.update(summary)<line_sep>nsml.report(**kwargs)<block_end><else_stmt><block_start><for_stmt>tag,value summary.items()<block_start>self.scalar_summary(step tag value)<block_end><block_end><block_end><def_stmt>scalar_summary self step tag value<block_start>"""Log a scalar variable."""<if_stmt>nsml.IS_ON_NSML<block_start>nsml.report(**{"summary":<true> "scope":locals() "step":step tag:value})<block_end><else_stmt><block_start>self.writer.add_scalar(tag value step)<block_end><block_end><def_stmt>image_summary self tag images step<block_start>"""Log a list of images."""<line_sep><raise>NotImplementedError()<block_end><def_stmt>embedding_summary self features metadata=<none> label_img=<none><block_start><raise>NotImplementedError()<block_end><def_stmt>histogram_summary self tag values step bins=1000<block_start>"""Log a histogram of the tensor of values."""<line_sep><raise>NotImplementedError()<block_end><def_stmt>graph_summary self model input_to_model=<none><block_start><raise>NotImplementedError()<block_end><block_end> |
<import_from_future_stmt> print_function<import_from_stmt>src cli<import_from_stmt>os environ<as>ENV<line_sep>PROFILE=<false><if_stmt>PROFILE<block_start>print("PROFILING")<import_stmt>cProfile<line_sep>cProfile.run("cli.main()" "restats")<import_stmt>pstats<line_sep>p=pstats.Stats('restats')<line_sep>p.strip_dirs().sort_stats('cumulative').print_stats(50)<block_end><else_stmt><block_start>cli.main()<block_end> |
"""Test-Suite for the configuration system."""<line_sep> |
<import_from_stmt>pytest mark<import_from_stmt>tests CUSTOM_TAGS<line_sep>POTATO_TAGS=["Potato"]<line_sep>PATHS=["/potato" "/carrot"]<line_sep>PATH_TAGS={"/potato":POTATO_TAGS "/potato/{item_id}":POTATO_TAGS "/carrot":CUSTOM_TAGS "/carrot/{item_id}":CUSTOM_TAGS }<class_stmt>TestOpenAPISpec<block_start><def_stmt>test_schema_exists self client<block_start>res=client.get("/openapi.json")<assert_stmt>res.status_code<eq>200<line_sep><return>res<block_end><def_stmt>test_schema_tags self client<block_start>schema=self.test_schema_exists(client).json()<line_sep>paths=schema["paths"]<assert_stmt>len(paths)<eq>len(PATH_TAGS)<for_stmt>path,method paths.items()<block_start><assert_stmt>len(method)<eq>3<for_stmt>m method<block_start><assert_stmt>method[m]["tags"]<eq>PATH_TAGS[path]<block_end><block_end><block_end>@mark.parametrize("path" PATHS)<def_stmt>test_response_types self client path<block_start>schema=self.test_schema_exists(client).json()<line_sep>paths=schema["paths"]<for_stmt>method ["get" "post" "delete"]<block_start><assert_stmt>"200"<in>paths[path][method]["responses"]<block_end><assert_stmt>"422"<in>paths[path]["post"]["responses"]<line_sep>item_path=path+"/{item_id}"<for_stmt>method ["get" "put" "delete"]<block_start><assert_stmt>"200"<in>paths[item_path][method]["responses"]<assert_stmt>"404"<in>paths[item_path][method]["responses"]<assert_stmt>"422"<in>paths[item_path][method]["responses"]<block_end><block_end><block_end> |
# Time: O(m + n)
# Space: O(1)
<class_stmt>ListNode(object)<block_start><def_stmt>__init__ self x<block_start>self.val=x<line_sep>self.next=<none><block_end><block_end><class_stmt>Solution(object)# @param two ListNodes
# @return the intersected ListNode
<block_start><def_stmt>getIntersectionNode self headA headB<block_start>curA,curB=headA headB<while_stmt>curA<ne>curB<block_start>curA=curA.next<if>curA<else>headB<line_sep>curB=curB.next<if>curB<else>headA<block_end><return>curA<block_end><block_end> |
<import_stmt>bleach<import_stmt>bleach_whitelist<import_from_stmt>django.conf settings<import_from_stmt>rest_framework.pagination PageNumberPagination<def_stmt>sanitize string# bleach doesn't handle None so let's not pass it
<block_start><if_stmt>string<and>getattr(settings "RESPONSE_SANITIZE_USER_INPUT" <true>)<block_start><return>bleach.clean(string tags=bleach_whitelist.markdown_tags attributes=bleach_whitelist.markdown_attrs styles=bleach_whitelist.all_styles )<block_end><return>string<block_end><class_stmt>LargeResultsSetPagination(PageNumberPagination)<block_start>page_size=500<line_sep>max_page_size=1000<line_sep>page_size_query_param="page_size"<block_end> |
<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>shutil<import_stmt>sys<import_stmt>tempfile<import_from_stmt>observations.r.lost_letter lost_letter<def_stmt>test_lost_letter <block_start>"""Test module lost_letter.py by downloading
lost_letter.csv and testing shape of
extracted data has 140 rows and 8 columns
"""<line_sep>test_path=tempfile.mkdtemp()<line_sep>x_train,metadata=lost_letter(test_path)<try_stmt><block_start><assert_stmt>x_train.shape<eq>(140 8)<block_end><except_stmt><block_start>shutil.rmtree(test_path)<line_sep><raise>()<block_end><block_end> |
#
# Unicode escape format setting dialog for the following plugins:
# Unicode escape
# Unicode unescape
#
# Copyright (c) 2020, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<import_stmt>sys<import_stmt>tkinter<import_stmt>tkinter.ttk<line_sep># Print setting to stdout
<def_stmt>print_setting r cf ce<block_start>escape_format={"\\uXXXX (Java, JavaScript)":"\\u" "\\uXXXX and \\UXXXXXXXX (C, Python)":"\\U" "\\u{XXXX} (JavaScript ES6+, PHP 7+)":"\\u{" "`u{XXXX} (PowerShell 6+)":"`u" "%uXXXX (Legacy JavaScript)":"%u" "U+XXXX (Unicode code point)":"U+"}<line_sep>print("%s\t%s"%(escape_format[cf.get()] ce.get()))<line_sep>root.quit()<block_end># Create input dialog
root=tkinter.Tk()<line_sep>root.title("Unicode escape/unescape format setting")<line_sep>root.protocol("WM_DELETE_WINDOW" (<lambda>r=root:r.quit()))<line_sep>label_format=tkinter.Label(root text="Unicode escape format:")<line_sep>label_format.grid(row=0 column=0 padx=5 pady=5 sticky="w")<line_sep>combo_format=tkinter.ttk.Combobox(root width=40 state="readonly")<line_sep>combo_format["values"]=("\\uXXXX (Java, JavaScript)" "\\uXXXX and \\UXXXXXXXX (C, Python)" "\\u{XXXX} (JavaScript ES6+, PHP 7+)" "`u{XXXX} (PowerShell 6+)" "%uXXXX (Legacy JavaScript)" "U+XXXX (Unicode code point)")<line_sep>combo_format.current(0)<line_sep>combo_format.grid(row=0 column=1 padx=5 pady=5 sticky="w")<if_stmt>len(sys.argv)<g>1<and>sys.argv[1]<eq>"-e"<block_start>label_encoding=tkinter.Label(root text="Input encoding:")<block_end><elif_stmt>len(sys.argv)<g>1<and>sys.argv[1]<eq>"-u"<block_start>label_encoding=tkinter.Label(root text="Output encoding:")<block_end><else_stmt><block_start>label_encoding=tkinter.Label(root text="Encoding:")<block_end>label_encoding.grid(row=1 column=0 padx=5 pady=5 sticky="w")<line_sep>combo_encoding=tkinter.ttk.Combobox(root width=10 state="readonly")<line_sep>combo_encoding["values"]=("UTF-8" "UTF-16LE" "UTF-16BE")<line_sep>combo_encoding.current(0)<line_sep>combo_encoding.grid(row=1 column=1 padx=5 pady=5 sticky="w")<line_sep>button=tkinter.Button(root text='OK' command=(<lambda>r=root cf=combo_format ce=combo_encoding:print_setting(r cf ce)))<line_sep>button.grid(row=2 column=0 padx=5 pady=5 columnspan=3)<line_sep>button.focus()# Focus to this widget
# Set callback functions
<for_stmt>x (combo_format combo_encoding button)<block_start>x.bind("<Return>" <lambda>event r=root cf=combo_format ce=combo_encoding:print_setting(r cf ce))<block_end># Adjust window position
sw=root.winfo_screenwidth()<line_sep>sh=root.winfo_screenheight()<line_sep>root.update_idletasks()# Necessary to get width and height of the window
ww=root.winfo_width()<line_sep>wh=root.winfo_height()<line_sep>root.geometry('+%d+%d'%((sw/2)-(ww/2) (sh/2)-(wh/2)))<line_sep>root.mainloop()<line_sep> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.